com.amazonaws.services.lambda.runtime.LambdaLogger Java Examples
The following examples show how to use
com.amazonaws.services.lambda.runtime.LambdaLogger.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: EchoHandler.java From kinesis-aggregation with Apache License 2.0 | 6 votes |
@Override public Void handleRequest(KinesisEvent event, Context context) { LambdaLogger logger = context.getLogger(); // extract the records from the event List<KinesisEventRecord> records = event.getRecords(); logger.log(String.format("Recieved %s Raw Records", records.size())); // now deaggregate the message contents List<UserRecord> deaggregated = new RecordDeaggregator<KinesisEventRecord>().deaggregate(records); logger.log(String.format("Received %s Deaggregated User Records", deaggregated.size())); deaggregated.stream().forEachOrdered(rec -> { logger.log(rec.getPartitionKey()); }); return null; }
Example #2
Source File: KinesisLambdaReceiver.java From kinesis-aggregation with Apache License 2.0 | 6 votes |
/** * @see com.amazonaws.services.lambda.runtime.RequestHandler#handleRequest(java.lang.Object, com.amazonaws.services.lambda.runtime.Context) */ public Void handleRequestWithLists(KinesisEvent event, Context context) { LambdaLogger logger = context.getLogger(); logger.log("Received " + event.getRecords().size() + " raw Event Records."); try { // process the user records with an anonymous record processor // instance RecordDeaggregator.processRecords(event.getRecords(), new KinesisUserRecordProcessor() { public Void process(List<UserRecord> userRecords) { for (UserRecord userRecord : userRecords) { // Your User Record Processing Code Here! logger.log(new String(userRecord.getData().array())); } return null; } }); } catch (Exception e) { logger.log(e.getMessage()); } return null; }
Example #3
Source File: KinesisLambdaReceiver.java From kinesis-aggregation with Apache License 2.0 | 6 votes |
/** * @see com.amazonaws.services.lambda.runtime.RequestHandler#handleRequest(java.lang.Object, com.amazonaws.services.lambda.runtime.Context) */ public Void handleRequestBulkList(KinesisEvent event, Context context) { LambdaLogger logger = context.getLogger(); logger.log("Received " + event.getRecords().size() + " raw Event Records."); try { List<UserRecord> userRecords = RecordDeaggregator.deaggregate(event.getRecords()); for (UserRecord userRecord : userRecords) { // Your User Record Processing Code Here! logger.log(new String(userRecord.getData().array())); } } catch (Exception e) { logger.log(e.getMessage()); } return null; }
Example #4
Source File: Handler3.java From aws-doc-sdk-examples with Apache License 2.0 | 6 votes |
@Override public String handleRequest(String event, Context context) { LambdaLogger logger = context.getLogger(); String email = event ; // log execution details logger.log("Email value " + email); SendMessage msg = new SendMessage(); try { msg.sendMessage(email); } catch (IOException e) { e.getStackTrace(); } return ""; }
Example #5
Source File: LambdaAggregatingForwarder.java From kinesis-aggregation with Apache License 2.0 | 6 votes |
/** * Check if the input aggregated record is complete and if so, forward it to the * configured destination Kinesis stream. * * @param logger The LambdaLogger from the input Context * @param aggRecord The aggregated record to transmit or null if the record isn't full yet. */ private void checkAndForwardRecords(LambdaLogger logger, AggRecord aggRecord) { if(aggRecord == null) { return; } logger.log("Forwarding " + aggRecord.getNumUserRecords() + " as an aggregated record."); PutRecordRequest request = aggRecord.toPutRecordRequest(DESTINATION_STREAM_NAME); try { PutRecordResult result = this.kinesisForwarder.putRecord(request); logger.log("Successfully published record Seq #" + result.getSequenceNumber() + " to shard " + result.getShardId()); } catch(Exception e) { logger.log("ERROR: Failed to forward Kinesis records to destination stream: " + e.getMessage()); return; } }
Example #6
Source File: LambdaKinesisEventHandler.java From Serverless-Programming-Cookbook with MIT License | 6 votes |
/** * Handle request. * * @param kinesisEvent - Kinesis Event passed as input to lambda handler * @param context - context object * @return true if success, else false. */ public Boolean handleRequest(final KinesisEvent kinesisEvent, final Context context) { LambdaLogger logger = context.getLogger(); logger.log("Received Kinesis event: " + kinesisEvent); logger.log("Number of records: " + kinesisEvent.getRecords().size()); try { kinesisEvent.getRecords().forEach(r -> { final KinesisEvent.Record kr = r.getKinesis(); logger.log("Record: " + kr.toString()); logger.log("Data: " + StandardCharsets.UTF_8.decode(kr.getData()).toString()); }); } catch (final Exception e) { logger.log("There was an exception: " + e.getMessage()); return false; } return true; }
Example #7
Source File: ProxyStreamHandlerLambda.java From Serverless-Programming-Cookbook with MIT License | 6 votes |
/** * handleRequest implementation. * @param inputStream - Input stream from API Gateway. * @param outputStream - Output stream to API Gateway. * @param context - Context. * @throws IOException - If something goes wrong. */ public final void handleRequest(final InputStream inputStream, final OutputStream outputStream, final Context context) throws IOException { LambdaLogger logger = context.getLogger(); context.getLogger().log("Inside Proxy Stream Handler."); final String greeting = generateGreetingFromInputStream(inputStream); final JSONObject responseJson = generateResponseJson(greeting); logger.log(responseJson.toJSONString()); OutputStreamWriter writer = new OutputStreamWriter(outputStream, "UTF-8"); writer.write(responseJson.toJSONString()); writer.close(); }
Example #8
Source File: SnsServiceImpl.java From Serverless-Programming-Cookbook with MIT License | 6 votes |
@Override public final Boolean processEvent(final SNSEvent event, final String outputQueueURL, final LambdaLogger logger) { try { logger.log("Number of records in event: " + event.getRecords().size()); Collection<SendMessageBatchRequestEntry> entries = new ArrayList<>(); int idVal = 1; for (SNSRecord r : event.getRecords()) { logger.log("Adding message: " + r.getSNS().getMessage()); entries.add(new SendMessageBatchRequestEntry("id_" + idVal, r.getSNS().getMessage())); idVal++; } final SendMessageBatchRequest sendBatchRequest = new SendMessageBatchRequest() .withQueueUrl(outputQueueURL) .withEntries(entries); this.sqsClient.sendMessageBatch(sendBatchRequest); } catch (Exception e) { final String errorMessage = "Error occurred: " + e.getMessage(); logger.log(errorMessage); return false; } return true; }
Example #9
Source File: EchoHandler.java From kinesis-aggregation with Apache License 2.0 | 6 votes |
@Override public Void handleRequest(KinesisEvent event, Context context) { LambdaLogger logger = context.getLogger(); // extract the records from the event List<KinesisEventRecord> records = event.getRecords(); logger.log(String.format("Recieved %s Raw Records", records.size())); try { // now deaggregate the message contents List<KinesisClientRecord> deaggregated = new RecordDeaggregator<KinesisEventRecord>().deaggregate(records); logger.log(String.format("Received %s Deaggregated User Records", deaggregated.size())); deaggregated.stream().forEachOrdered(rec -> { logger.log(rec.partitionKey()); }); } catch (Exception e) { logger.log(e.getMessage()); } return null; }
Example #10
Source File: Handler.java From djl-demo with Apache License 2.0 | 5 votes |
@Override public void handleRequest(InputStream is, OutputStream os, Context context) throws IOException { LambdaLogger logger = context.getLogger(); String input = Utils.toString(is); try { Request request = GSON.fromJson(input, Request.class); String url = request.getInputImageUrl(); String artifactId = request.getArtifactId(); Map<String, String> filters = request.getFilters(); Criteria<Image, Classifications> criteria = Criteria.builder() .setTypes(Image.class, Classifications.class) .optArtifactId(artifactId) .optFilters(filters) .build(); try (ZooModel<Image, Classifications> model = ModelZoo.loadModel(criteria); Predictor<Image, Classifications> predictor = model.newPredictor()) { Image image = ImageFactory.getInstance().fromUrl(url); List<Classifications.Classification> result = predictor.predict(image).topK(5); os.write(GSON.toJson(result).getBytes(StandardCharsets.UTF_8)); } } catch (RuntimeException | ModelException | TranslateException e) { logger.log("Failed handle input: " + input); logger.log(e.toString()); String msg = "{\"status\": \"invoke failed: " + e.toString() + "\"}"; os.write(msg.getBytes(StandardCharsets.UTF_8)); } }
Example #11
Source File: KinesisLambdaReceiver.java From kinesis-aggregation with Apache License 2.0 | 5 votes |
/** * @see com.amazonaws.services.lambda.runtime.RequestHandler#handleRequest(java.lang.Object, com.amazonaws.services.lambda.runtime.Context) */ public Void handleRequest(KinesisEvent event, Context context) { LambdaLogger logger = context.getLogger(); logger.log("Received " + event.getRecords().size() + " raw Event Records."); // Stream the User Records from the Lambda Event RecordDeaggregator.stream(event.getRecords().stream(), userRecord -> { // Your User Record Processing Code Here! logger.log(new String(userRecord.getData().array())); }); return null; }
Example #12
Source File: CloudWatchLogPublisher.java From cloudformation-cli-java-plugin with Apache License 2.0 | 5 votes |
public CloudWatchLogPublisher(final CloudWatchLogsProvider cloudWatchLogsProvider, final String logGroupName, final String logStreamName, final LambdaLogger platformLambdaLogger, final MetricsPublisherProxy metricsPublisherProxy, final LogFilter... logFilters) { super(logFilters); this.cloudWatchLogsProvider = cloudWatchLogsProvider; this.logGroupName = logGroupName; this.logStreamName = logStreamName; this.platformLambdaLogger = platformLambdaLogger; this.metricsPublisherProxy = metricsPublisherProxy; }
Example #13
Source File: CloudWatchLogHelper.java From cloudformation-cli-java-plugin with Apache License 2.0 | 5 votes |
public CloudWatchLogHelper(final CloudWatchLogsProvider cloudWatchLogsProvider, final String logGroupName, final LambdaLogger platformLambdaLogger, final MetricsPublisherProxy metricsPublisherProxy) { this.cloudWatchLogsProvider = cloudWatchLogsProvider; this.logGroupName = logGroupName; this.platformLambdaLogger = platformLambdaLogger; this.metricsPublisherProxy = metricsPublisherProxy; }
Example #14
Source File: KinesisServiceImpl.java From Serverless-Programming-Cookbook with MIT License | 5 votes |
private void flushBatch(final String streamName, final LambdaLogger logger) { final PutRecordsResult result = this.kinesisClient.putRecords(new PutRecordsRequest() .withStreamName(streamName) .withRecords(this.kinesisBatch)); result.getRecords().forEach(r -> { if (!(StringUtils.hasValue(r.getErrorCode()))) { String successMessage = "Successfully processed record with sequence number: " + r.getSequenceNumber() + ", shard id: " + r.getShardId(); logger.log(successMessage); } else { this.documentAddedCount--; String errorMessage = "Did not process record with sequence number: " + r.getSequenceNumber() + ", error code: " + r.getErrorCode() + ", error message: " + r.getErrorMessage(); logger.log(errorMessage); this.isError = true; } }); // You may also implement a retry logic only for failed records (e.g. Create a list for failed records, // add error records to that list and finally retry all failed records until a max retry count is reached.) /* if (result.getFailedRecordCount() != null && result.getFailedRecordCount() > 0) { result.getRecords().forEach(r -> { if ((r != null) && (StringUtils.hasValue(r.getErrorCode()))) { // add this record to the retry list. } }); } */ }
Example #15
Source File: rekognition-video-java-detect-labels-lambda.java From aws-doc-sdk-examples with Apache License 2.0 | 5 votes |
@Override public String handleRequest(SNSEvent event, Context context) { String message = event.getRecords().get(0).getSNS().getMessage(); LambdaLogger logger = context.getLogger(); // Parse SNS event for analysis results. Log results try { ObjectMapper operationResultMapper = new ObjectMapper(); JsonNode jsonResultTree = operationResultMapper.readTree(message); logger.log("Rekognition Video Operation:========================="); logger.log("Job id: " + jsonResultTree.get("JobId")); logger.log("Status : " + jsonResultTree.get("Status")); logger.log("Job tag : " + jsonResultTree.get("JobTag")); logger.log("Operation : " + jsonResultTree.get("API")); if (jsonResultTree.get("API").asText().equals("StartLabelDetection")) { if (jsonResultTree.get("Status").asText().equals("SUCCEEDED")){ GetResultsLabels(jsonResultTree.get("JobId").asText(), context); } else{ String errorMessage = "Video analysis failed for job " + jsonResultTree.get("JobId") + "State " + jsonResultTree.get("Status"); throw new Exception(errorMessage); } } else logger.log("Operation not StartLabelDetection"); } catch (Exception e) { logger.log("Error: " + e.getMessage()); throw new RuntimeException (e); } return message; }
Example #16
Source File: SqsServiceImpl.java From Serverless-Programming-Cookbook with MIT License | 5 votes |
@Override public final Boolean processEvent(final SQSEvent event, final String outputQueueURL, final LambdaLogger logger) { try { logger.log("Number of messages in event: " + event.getRecords().size()); logger.log("Output Queue URL: " + outputQueueURL); Collection<SendMessageBatchRequestEntry> entries = new ArrayList<>(); int idVal = 1; for (SQSMessage m : event.getRecords()) { logger.log("Adding message: " + m.getBody()); entries.add(new SendMessageBatchRequestEntry("id_" + idVal, m.getBody())); idVal++; } final SendMessageBatchRequest sendBatchRequest = new SendMessageBatchRequest() .withQueueUrl(outputQueueURL) .withEntries(entries); this.sqsClient.sendMessageBatch(sendBatchRequest); } catch (Exception e) { final String errorMessage = "Error occurred: " + e.getMessage(); logger.log(errorMessage); return false; } return true; }
Example #17
Source File: Handler.java From aws-doc-sdk-examples with Apache License 2.0 | 5 votes |
@Override public String handleRequest(Map<String,String> event, Context context) { LambdaLogger logger = context.getLogger(); Gson gson = new GsonBuilder().create(); // log execution details logger.log("ENVIRONMENT VARIABLES: " + gson.toJson(System.getenv())); logger.log("CONTEXT: " + gson.toJson(context)); // process event logger.log("EVENT Data: " + gson.toJson(event)); String myCaseID = event.get("inputCaseID"); logger.log("CaseId44: " + myCaseID); return myCaseID; }
Example #18
Source File: Handler2.java From aws-doc-sdk-examples with Apache License 2.0 | 5 votes |
@Override public String handleRequest(String event, Context context) { PersistCase perCase = new PersistCase(); LambdaLogger logger = context.getLogger(); String val = event ; logger.log("CASE is about to be assigned " +val); // Create very simple logic to assign case to an employee int tmp = (Math.random() <= 0.5) ? 1 : 2; logger.log("TMP IS " +tmp); String emailEmp= ""; if (tmp == 1) { // assign to tblue emailEmp = "[email protected]"; perCase.putRecord(val, "Tom Blue", emailEmp ); } else { // assign to swhite emailEmp = "[email protected]"; perCase.putRecord(val, "Sarah White", emailEmp); } logger.log("emailEmp IS " +emailEmp); //return email - used in the next step return emailEmp; }
Example #19
Source File: Handler.java From djl-demo with Apache License 2.0 | 5 votes |
@Override public void handleRequest(InputStream is, OutputStream os, Context context) throws IOException { LambdaLogger logger = context.getLogger(); String input = Utils.toString(is); try { Request request = GSON.fromJson(input, Request.class); String base64Img = request.getImageData().split(",")[1]; byte[] imgBytes = Base64.getDecoder().decode(base64Img); Image img; try (ByteArrayInputStream bis = new ByteArrayInputStream(imgBytes)) { ImageFactory factory = ImageFactory.getInstance(); img = factory.fromInputStream(bis); } Translator<Image, Classifications> translator = ImageClassificationTranslator.builder() .addTransform(new ToTensor()) .optFlag(Image.Flag.GRAYSCALE) .optApplySoftmax(true) .build(); Criteria<Image, Classifications> criteria = Criteria.builder() .setTypes(Image.class, Classifications.class) .optModelUrls( "https://djl-ai.s3.amazonaws.com/resources/demo/pytorch/doodle_mobilenet.zip") .optTranslator(translator) .build(); ZooModel<Image, Classifications> model = ModelZoo.loadModel(criteria); try (Predictor<Image, Classifications> predictor = model.newPredictor()) { List<Classifications.Classification> result = predictor.predict(img).topK(5); os.write(GSON.toJson(result).getBytes(StandardCharsets.UTF_8)); } } catch (RuntimeException | ModelException | TranslateException e) { logger.log("Failed handle input: " + input); logger.log(e.toString()); String msg = "{\"status\": \"invoke failed: " + e.toString() + "\"}"; os.write(msg.getBytes(StandardCharsets.UTF_8)); } }
Example #20
Source File: ActionsAWSHandler.java From dialogflow-webhook-boilerplate-java with Apache License 2.0 | 5 votes |
@Override public void handleRequest(InputStream inputStream, OutputStream outputStream, Context context) throws IOException { BufferedReader reader = new BufferedReader( new InputStreamReader(inputStream)); JSONObject awsResponse = new JSONObject(); LambdaLogger logger = context.getLogger(); try { JSONObject awsRequest = (JSONObject) parser.parse(reader); JSONObject headers = (JSONObject) awsRequest.get("headers"); String body = (String) awsRequest.get("body"); logger.log("AWS request body = " + body); actionsApp.handleRequest(body, headers) .thenAccept((webhookResponseJson) -> { logger.log("Generated json = " + webhookResponseJson); JSONObject responseHeaders = new JSONObject(); responseHeaders.put("Content-Type", "application/json"); awsResponse.put("statusCode", "200"); awsResponse.put("headers", responseHeaders); awsResponse.put("body", webhookResponseJson); writeResponse(outputStream, awsResponse); }).exceptionally((throwable -> { awsResponse.put("statusCode", "500"); awsResponse.put("exception", throwable); writeResponse(outputStream, awsResponse); return null; })); } catch (ParseException e) { e.printStackTrace(); } }
Example #21
Source File: rekognition-video-java-detect-labels-lambda.java From aws-doc-sdk-examples with Apache License 2.0 | 4 votes |
void GetResultsLabels(String startJobId, Context context) throws Exception { LambdaLogger logger = context.getLogger(); AmazonRekognition rek = AmazonRekognitionClientBuilder.standard().withRegion(Regions.US_EAST_1).build(); int maxResults = 1000; String paginationToken = null; GetLabelDetectionResult labelDetectionResult = null; String labels = ""; Integer labelsCount = 0; String label = ""; String currentLabel = ""; //Get label detection results and log them. do { GetLabelDetectionRequest labelDetectionRequest = new GetLabelDetectionRequest().withJobId(startJobId) .withSortBy(LabelDetectionSortBy.NAME).withMaxResults(maxResults).withNextToken(paginationToken); labelDetectionResult = rek.getLabelDetection(labelDetectionRequest); paginationToken = labelDetectionResult.getNextToken(); VideoMetadata videoMetaData = labelDetectionResult.getVideoMetadata(); // Add labels to log List<LabelDetection> detectedLabels = labelDetectionResult.getLabels(); for (LabelDetection detectedLabel : detectedLabels) { label = detectedLabel.getLabel().getName(); if (label.equals(currentLabel)) { continue; } labels = labels + label + " / "; currentLabel = label; labelsCount++; } } while (labelDetectionResult != null && labelDetectionResult.getNextToken() != null); logger.log("Total number of labels : " + labelsCount); logger.log("labels : " + labels); }
Example #22
Source File: TestContext.java From aws-doc-sdk-examples with Apache License 2.0 | 4 votes |
public LambdaLogger getLogger(){ return new TestLogger(); }
Example #23
Source File: TestContext.java From aws-lambda-java-example with Apache License 2.0 | 4 votes |
@Override public LambdaLogger getLogger() { return logger; }
Example #24
Source File: TestContext.java From aws-lambda-java-example with Apache License 2.0 | 4 votes |
public void setLogger(LambdaLogger value) { logger = value; }
Example #25
Source File: MockLambdaContext.java From aws-serverless-java-container with Apache License 2.0 | 4 votes |
@Override public LambdaLogger getLogger() { return logger; }
Example #26
Source File: TestContext.java From bender with Apache License 2.0 | 4 votes |
@Override public LambdaLogger getLogger() { return null; }
Example #27
Source File: HandlerTest.java From lambadaframework with MIT License | 4 votes |
private Context getContext() { return new Context() { @Override public String getAwsRequestId() { return "23234234"; } @Override public String getLogGroupName() { return null; } @Override public String getLogStreamName() { return null; } @Override public String getFunctionName() { return null; } @Override public String getFunctionVersion() { return null; } @Override public String getInvokedFunctionArn() { return null; } @Override public CognitoIdentity getIdentity() { return null; } @Override public ClientContext getClientContext() { return null; } @Override public int getRemainingTimeInMillis() { return 5000; } @Override public int getMemoryLimitInMB() { return 128; } @Override public LambdaLogger getLogger() { return null; } }; }
Example #28
Source File: SyncMemcachedCache.java From serverless-examples-cached-rds-ws with Apache License 2.0 | 4 votes |
public LambdaLogger getLogger() { return logger; }
Example #29
Source File: KinesisServiceImpl.java From Serverless-Programming-Cookbook with MIT License | 4 votes |
@Override public final Response addRecords(final Request request, final LambdaLogger logger) { this.documentAddedCount = request.getCount(); DescribeStreamResult result = this.kinesisClient.describeStream(request.getStreamName()); logger.log("Stream Status: " + result.getStreamDescription().getStreamStatus() + ". "); logger.log("Adding records to Stream..."); String payload; for (int i = 1; i <= request.getCount(); i++) { payload = request.getPayload() + i; this.kinesisBatch.add(new PutRecordsRequestEntry() .withPartitionKey(request.getPartitionKey()) .withData(ByteBuffer.wrap(payload.getBytes()))); if (this.kinesisBatch.size() >= request.getBatchSize()) { try { logger.log("Flushing records to Stream..."); flushBatch(request.getStreamName(), logger); } catch (Exception e) { logger.log("Exception occurred: " + e); this.isError = false; } finally { this.kinesisBatch.clear(); } } } if (this.isError) { return new Response(ERROR_MESSAGE, documentAddedCount); } else { return new Response(SUCCESS_MESSAGE, documentAddedCount); } }
Example #30
Source File: MockContext.java From djl-demo with Apache License 2.0 | 4 votes |
@Override public LambdaLogger getLogger() { return new MockLogger(); }