com.amazonaws.services.lambda.runtime.events.S3Event Java Examples
The following examples show how to use
com.amazonaws.services.lambda.runtime.events.S3Event.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LambdaContainer.java From aws-big-data-blog with Apache License 2.0 | 6 votes |
public void auditValidatedFile(S3Event event,Context ctx) throws Exception{ Connection conn = new com.mysql.jdbc.Driver().connect(props.getProperty("url"), props); List<S3EventNotificationRecord> notificationRecords = event.getRecords(); PreparedStatement ps = conn.prepareStatement(props.getProperty("sql.auditValidatedFile")); for(S3EventNotificationRecord record : notificationRecords){ String fileURL = record.getS3().getBucket().getName()+"/"+record.getS3().getObject().getKey(); ps.setString(1, fileURL); ps.setString(2, "VALIDATED"); ps.setString(3,"VALIDATED"); ps.addBatch(); } ps.executeBatch(); ps.close(); conn.close(); }
Example #2
Source File: ThumbnailHandler.java From blog-tutorials with MIT License | 5 votes |
@Override public Void handleRequest(S3Event s3Event, Context context) { String bucket = s3Event.getRecords().get(0).getS3().getBucket().getName(); String key = s3Event.getRecords().get(0).getS3().getObject().getKey(); System.out.println("Going to create a thumbnail for: " + bucket + "/" + key); AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient(); System.out.println("Connection to S3 established"); try { File tempFile = File.createTempFile(key, ".tmp"); s3Client.getObject(new GetObjectRequest(bucket, key), tempFile); System.out.println("Successfully read S3 object to local temp file"); BufferedImage img = new BufferedImage(THUMBNAIL_SIZE, THUMBNAIL_SIZE, BufferedImage.TYPE_INT_RGB); img.createGraphics().drawImage(ImageIO.read(tempFile).getScaledInstance(100, 100, Image.SCALE_SMOOTH), 0, 0, null); File resizedTempFile = File.createTempFile(key, ".resized.tmp"); ImageIO.write(img, "png", resizedTempFile); System.out.println("Successfully created resized image"); String targetKey = THUMBNAIL_PREFIX + key.replace("uploads/", ""); s3Client.putObject(bucket, targetKey, resizedTempFile); System.out.println("Successfully uploaded resized image with key " + targetKey); } catch (IOException e) { e.printStackTrace(); } return null; }
Example #3
Source File: FunctionConfiguration.java From blog-tutorials with MIT License | 5 votes |
@Bean public Consumer<S3Event> processS3Event() { return s3Event -> { String bucket = s3Event.getRecords().get(0).getS3().getBucket().getName(); String key = s3Event.getRecords().get(0).getS3().getObject().getKey(); logger.info("Something was uploaded to S3: " + bucket + "/" + key); // ... further processing of the S3Ev ent }; }
Example #4
Source File: Handler.java From Building-Serverless-Architectures with MIT License | 5 votes |
@Override public Void handleRequest(S3Event input, Context context) { input.getRecords().forEach(s3EventNotificationRecord -> resizeImage(s3EventNotificationRecord.getS3().getBucket().getName(), s3EventNotificationRecord.getS3().getObject().getKey())); return null; }
Example #5
Source File: Handler.java From Building-Serverless-Architectures with MIT License | 5 votes |
@Override public Void handleRequest(S3Event input, Context context) { input.getRecords().forEach(s3EventNotificationRecord -> resizeImage(s3EventNotificationRecord.getS3().getBucket().getName(), s3EventNotificationRecord.getS3().getObject().getKey())); return null; }
Example #6
Source File: Handler.java From Building-Serverless-Architectures with MIT License | 5 votes |
@Override public Void handleRequest(S3Event input, Context context) { input.getRecords().forEach(s3EventNotificationRecord -> resizeImage(s3EventNotificationRecord.getS3().getBucket().getName(), s3EventNotificationRecord.getS3().getObject().getKey())); return null; }
Example #7
Source File: Handler.java From Building-Serverless-Architectures with MIT License | 5 votes |
@Override public Void handleRequest(S3Event input, Context context) { input.getRecords().forEach(s3EventNotificationRecord -> resizeImage(s3EventNotificationRecord.getS3().getBucket().getName(), s3EventNotificationRecord.getS3().getObject().getKey())); return null; }
Example #8
Source File: S3TriggerImageProcessingHandler.java From smart-security-camera with GNU General Public License v3.0 | 5 votes |
@Override public Parameters handleRequest(S3Event event, Context context) { context.getLogger() .log("Input Function [" + context.getFunctionName() + "], S3Event [" + event.toJson().toString() + "]"); Parameters parameters = new Parameters( event.getRecords().get(0).getS3().getBucket().getName(), event.getRecords().get(0).getS3().getObject().getKey(), UUID.randomUUID()); AWSStepFunctions client = AWSStepFunctionsClientBuilder.defaultClient(); ObjectMapper jsonMapper = new ObjectMapper(); StartExecutionRequest request = new StartExecutionRequest(); request.setStateMachineArn(System.getenv("STEP_MACHINE_ARN")); request.setName(parameters.getStepFunctionID().toString()); try { request.setInput(jsonMapper.writeValueAsString(parameters)); } catch (JsonProcessingException e) { throw new AmazonServiceException("Error in ["+context.getFunctionName()+"]", e); } context.getLogger() .log("Step Function [" + request.getStateMachineArn() + "] will be called with [" + request.getInput() + "]"); StartExecutionResult result = client.startExecution(request); context.getLogger() .log("Output Function [" + context.getFunctionName() + "], Result [" + result.toString() + "]"); return parameters; }
Example #9
Source File: CreateAthenaPartitionsBasedOnS3Event.java From aws-big-data-blog with Apache License 2.0 | 5 votes |
@Override public Void handleRequest(S3Event s3Event, Context context) { Collection<Partition> requiredPartitions = new HashSet<>(); TableService tableService = new TableService(); for (S3EventNotification.S3EventNotificationRecord record : s3Event.getRecords()) { String bucket = record.getS3().getBucket().getName(); String key = record.getS3().getObject().getKey(); System.out.printf("S3 event [Event: %s, Bucket: %s, Key: %s]%n", record.getEventName(), bucket, key); S3Object s3Object = new S3Object(bucket, key); if (s3Object.hasDateTimeKey()) { requiredPartitions.add(partitionConfig.createPartitionFor(s3Object)); } } if (!requiredPartitions.isEmpty()) { Collection<Partition> missingPartitions = determineMissingPartitions( partitionConfig.tableName(), requiredPartitions, tableService); tableService.addPartitions(partitionConfig.tableName(), missingPartitions); } return null; }
Example #10
Source File: RemoveAthenaPartitionsBasedOnS3Event.java From aws-big-data-blog with Apache License 2.0 | 5 votes |
@Override public Void handleRequest(S3Event s3Event, Context context) { Collection<Partition> partitionsToRemove = new HashSet<>(); TableService tableService = new TableService(); for (S3EventNotification.S3EventNotificationRecord record : s3Event.getRecords()) { String bucket = record.getS3().getBucket().getName(); String key = record.getS3().getObject().getKey(); System.out.printf("S3 event [Event: %s, Bucket: %s, Key: %s]%n", record.getEventName(), bucket, key); S3Object s3Object = new S3Object(bucket, key); if (s3Object.hasDateTimeKey()) { partitionsToRemove.add(partitionConfig.createPartitionFor(s3Object)); } } if (!partitionsToRemove.isEmpty()) { tableService.removePartitions( partitionConfig.tableName(), partitionsToRemove.stream().map(Partition::spec).collect(Collectors.toList())); } return null; }
Example #11
Source File: TikaLambdaHandler.java From tika-lambda with Apache License 2.0 | 4 votes |
public String handleRequest(S3Event s3event, Context context) { _logger = context.getLogger(); _logger.log("Received S3 Event: " + s3event.toJson()); try { S3EventNotificationRecord record = s3event.getRecords().get(0); String bucket = record.getS3().getBucket().getName(); String extractBucket = "extracts." + bucket; // Object key may have spaces or unicode non-ASCII characters. String key = URLDecoder.decode(record.getS3().getObject().getKey().replace('+', ' '), "UTF-8"); // Short-circuit ignore .extract files because they have already been extracted, this prevents an endless loop if (key.toLowerCase().endsWith(".extract")) { _logger.log("Ignoring extract file " + key); return "Ignored"; } AmazonS3 s3Client = new AmazonS3Client(); S3Object s3Object = s3Client.getObject(new GetObjectRequest(bucket, key)); try (InputStream objectData = s3Object.getObjectContent()) { String extractJson = doTikaStuff(bucket, key, objectData); byte[] extractBytes = extractJson.getBytes(Charset.forName("UTF-8")); int extractLength = extractBytes.length; ObjectMetadata metaData = new ObjectMetadata(); metaData.setContentLength(extractLength); _logger.log("Saving extract file to S3"); InputStream inputStream = new ByteArrayInputStream(extractBytes); s3Client.putObject(extractBucket, key + ".extract", inputStream, metaData); } } catch (IOException | TransformerConfigurationException | SAXException e) { _logger.log("Exception: " + e.getLocalizedMessage()); throw new RuntimeException(e); } return "Success"; }
Example #12
Source File: S3EventProcessorUnzip.java From aws-lambda-unzip with Apache License 2.0 | 4 votes |
@Override public String handleRequest(S3Event s3Event, Context context) { byte[] buffer = new byte[1024]; try { for (S3EventNotificationRecord record: s3Event.getRecords()) { String srcBucket = record.getS3().getBucket().getName(); // Object key may have spaces or unicode non-ASCII characters. String srcKey = record.getS3().getObject().getKey() .replace('+', ' '); srcKey = URLDecoder.decode(srcKey, "UTF-8"); // Detect file type Matcher matcher = Pattern.compile(".*\\.([^\\.]*)").matcher(srcKey); if (!matcher.matches()) { System.out.println("Unable to detect file type for key " + srcKey); return ""; } String extension = matcher.group(1).toLowerCase(); if (!"zip".equals(extension)) { System.out.println("Skipping non-zip file " + srcKey + " with extension " + extension); return ""; } System.out.println("Extracting zip file " + srcBucket + "/" + srcKey); // Download the zip from S3 into a stream AmazonS3 s3Client = new AmazonS3Client(); S3Object s3Object = s3Client.getObject(new GetObjectRequest(srcBucket, srcKey)); ZipInputStream zis = new ZipInputStream(s3Object.getObjectContent()); ZipEntry entry = zis.getNextEntry(); while(entry != null) { String fileName = entry.getName(); String mimeType = FileMimeType.fromExtension(FilenameUtils.getExtension(fileName)).mimeType(); System.out.println("Extracting " + fileName + ", compressed: " + entry.getCompressedSize() + " bytes, extracted: " + entry.getSize() + " bytes, mimetype: " + mimeType); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); int len; while ((len = zis.read(buffer)) > 0) { outputStream.write(buffer, 0, len); } InputStream is = new ByteArrayInputStream(outputStream.toByteArray()); ObjectMetadata meta = new ObjectMetadata(); meta.setContentLength(outputStream.size()); meta.setContentType(mimeType); s3Client.putObject(srcBucket, FilenameUtils.getFullPath(srcKey) + fileName, is, meta); is.close(); outputStream.close(); entry = zis.getNextEntry(); } zis.closeEntry(); zis.close(); //delete zip file when done System.out.println("Deleting zip file " + srcBucket + "/" + srcKey + "..."); s3Client.deleteObject(new DeleteObjectRequest(srcBucket, srcKey)); System.out.println("Done deleting"); } return "Ok"; } catch (IOException e) { throw new RuntimeException(e); } }
Example #13
Source File: SimpleEventHandler.java From blog-tutorials with MIT License | 3 votes |
@Override public Void handleRequest(S3Event input, Context context) { System.out.println("Bucket name:" + input.getRecords().get(0).getS3().getBucket().getName()); System.out.println("Key name:" + input.getRecords().get(0).getS3().getObject().getKey()); // processing return null; }