com.google.cloud.pubsub.v1.AckReplyConsumer Java Examples
The following examples show how to use
com.google.cloud.pubsub.v1.AckReplyConsumer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PubsubIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 7 votes |
private List<String> receiveLines(int expectedMessageCount) throws Exception { List<String> received = new CopyOnWriteArrayList<>(); ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); MessageReceiver receiver = ((PubsubMessage message, AckReplyConsumer consumer) -> { try { String encoded = Json.asString(new org.apache.beam.sdk.io.gcp.pubsub.PubsubMessage( message.getData().toByteArray(), message.getAttributesMap())); received.add(encoded); } catch (IOException e) { throw new UncheckedIOException(e); } consumer.ack(); }); Subscriber subscriber = Subscriber.newBuilder(subscriptionName, receiver).build(); subscriber.startAsync(); while (received.size() < expectedMessageCount) { Thread.sleep(100); } subscriber.stopAsync(); return received; }
Example #2
Source File: GooglePubsubSubscriber.java From echo with Apache License 2.0 | 6 votes |
@Override public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { String messagePayload = message.getData().toStringUtf8(); String messageId = message.getMessageId(); Map<String, String> messageAttributes = message.getAttributesMap() == null ? new HashMap<>() : message.getAttributesMap(); log.debug( "Received Google pub/sub message with payload: {}\n and attributes: {}", messagePayload, messageAttributes); MessageDescription description = MessageDescription.builder() .subscriptionName(subscriptionName) .messagePayload(messagePayload) .messageAttributes(messageAttributes) .pubsubSystem(pubsubSystem) .ackDeadlineSeconds( 5 * ackDeadlineSeconds) // Set a high upper bound on message processing time. .retentionDeadlineSeconds( 7 * 24 * 60 * 60) // Expire key after max retention time, which is 7 days. .build(); GoogleMessageAcknowledger acknowledger = new GoogleMessageAcknowledger(consumer); pubsubMessageHandler.handleMessage( description, acknowledger, identity.getIdentity(), messageId); }
Example #3
Source File: NotificationReceiver.java From java-docs-samples with Apache License 2.0 | 6 votes |
@Override public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { NotificationMessage.Builder notificationMessageBuilder = NotificationMessage.newBuilder(); try { String jsonString = message.getData().toStringUtf8(); JsonFormat.parser().merge(jsonString, notificationMessageBuilder); NotificationMessage notificationMessage = notificationMessageBuilder.build(); System.out.println( String.format("Config id: %s", notificationMessage.getNotificationConfigName())); System.out.println(String.format("Finding: %s", notificationMessage.getFinding())); } catch (InvalidProtocolBufferException e) { System.out.println("Could not parse message: " + e); } finally { consumer.ack(); } }
Example #4
Source File: PubSubChannelAdaptersIntegrationTests.java From spring-cloud-gcp with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("deprecation") public void sendAndReceiveMessageManualAckThroughAcknowledgementHeader() { this.contextRunner .withUserConfiguration(PollableConfiguration.class, CommonConfiguration.class) .run((context) -> { context.getBean(PubSubInboundChannelAdapter.class).setAckMode(AckMode.MANUAL); context.getBean("inputChannel", MessageChannel.class).send( MessageBuilder.withPayload("I am a message (sendAndReceiveMessageManualAckThroughAcknowledgementHeader).".getBytes()).build()); PollableChannel channel = context.getBean("outputChannel", PollableChannel.class); Message<?> message = channel.receive(RECEIVE_TIMEOUT_MS); assertThat(message).isNotNull(); AckReplyConsumer acker = (AckReplyConsumer) message.getHeaders().get(GcpPubSubHeaders.ACKNOWLEDGEMENT); assertThat(acker).isNotNull(); acker.ack(); message = channel.receive(RECEIVE_TIMEOUT_MS); assertThat(message).isNull(); assertThat(this.outputCaptureRule.getOut()).contains("ACKNOWLEDGEMENT header is deprecated"); }); }
Example #5
Source File: MessageProcessorImpl.java From datacollector with Apache License 2.0 | 6 votes |
public MessageProcessorImpl( PushSource.Context context, int batchSize, long maxWaitTime, DataParserFactory parserFactory, SynchronousQueue<MessageReplyConsumerBundle> queue, // NOSONAR Clock clock ) { this.context = context; this.batchSize = batchSize; this.maxWaitTime = maxWaitTime; this.parserFactory = parserFactory; this.queue = queue; this.clock = clock; if (context.isPreview()) { reply = AckReplyConsumer::nack; } else { reply = AckReplyConsumer::ack; } }
Example #6
Source File: RiskAnalysisLDiversity.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #7
Source File: RiskAnalysisNumericalStats.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #8
Source File: InspectBigQueryTable.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #9
Source File: RiskAnalysisKAnonymity.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #10
Source File: RiskAnalysisKMap.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #11
Source File: InspectGcsFileWithSampling.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #12
Source File: InspectBigQueryTableWithSampling.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #13
Source File: MessageReceiverImpl.java From datacollector with Apache License 2.0 | 5 votes |
@Override public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { try { messages.put(new MessageReplyConsumerBundle(message, consumer)); } catch (InterruptedException e) { LOG.warn( "Thread interrupted while trying to enqueue message with id '{}'. Sending nack. Message will be re-received", message.getMessageId() ); consumer.nack(); Thread.currentThread().interrupt(); } }
Example #14
Source File: RiskAnalysisCategoricalStats.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #15
Source File: InspectGcsFile.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #16
Source File: InspectDatastoreEntity.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void handleMessage( DlpJob job, SettableApiFuture<Boolean> done, PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) { String messageAttribute = pubsubMessage.getAttributesMap().get("DlpJobName"); if (job.getName().equals(messageAttribute)) { done.set(true); ackReplyConsumer.ack(); } else { ackReplyConsumer.nack(); } }
Example #17
Source File: Subscriptions.java From java-docs-samples with Apache License 2.0 | 5 votes |
@Override public synchronized void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { // Every time a Pub/Sub message comes in, print it and count it System.out.println("Message " + messageCount + ": " + message.getData().toStringUtf8()); messageCount += 1; // Acknowledge the message consumer.ack(); }
Example #18
Source File: CPSSubscriberTask.java From pubsub with Apache License 2.0 | 5 votes |
@Override public void receiveMessage(final PubsubMessage message, final AckReplyConsumer consumer) { this.metricsHandler.add( LoadtestProto.MessageIdentifier.newBuilder() .setPublisherClientId(Integer.parseInt(message.getAttributesMap().get("clientId"))) .setSequenceNumber(Integer.parseInt(message.getAttributesMap().get("sequenceNumber"))) .build(), Duration.ofMillis( System.currentTimeMillis() - Long.parseLong(message.getAttributesMap().get("sendTime")))); consumer.ack(); }
Example #19
Source File: TestMessageReceiverImpl.java From datacollector with Apache License 2.0 | 5 votes |
@Test public void interruptReceive() throws Exception { SynchronousQueue<MessageReplyConsumerBundle> queue = new SynchronousQueue<>(); MessageReceiver messageReceiver = new MessageReceiverImpl(queue); PubsubMessage message = PubsubMessage.newBuilder().setMessageId("1234").build(); AckReplyConsumer consumer = mock(AckReplyConsumer.class); Thread t = new Thread(() -> messageReceiver.receiveMessage(message, consumer)); t.start(); t.interrupt(); ThreadUtil.sleep(50); verify(consumer, times(1)).nack(); }
Example #20
Source File: ExportMessageReceiver.java From healthcare-dicom-dicomweb-adapter with Apache License 2.0 | 5 votes |
@Override public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { try { MonitoringService.addEvent(Event.REQUEST); dicomSender.send(message); consumer.ack(); } catch (Exception e) { MonitoringService.addEvent(Event.ERROR); e.printStackTrace(); consumer.nack(); } }
Example #21
Source File: PubsubBenchWrapperImpl.java From google-cloud-java with Apache License 2.0 | 4 votes |
@Override public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { consumer.ack(); }
Example #22
Source File: MessageReplyConsumerBundle.java From datacollector with Apache License 2.0 | 4 votes |
public MessageReplyConsumerBundle(PubsubMessage message, AckReplyConsumer consumer) { this.message = message; this.consumer = consumer; }
Example #23
Source File: MessageReplyConsumerBundle.java From datacollector with Apache License 2.0 | 4 votes |
public AckReplyConsumer getConsumer() { return consumer; }
Example #24
Source File: TestMessageProcessorImpl.java From datacollector with Apache License 2.0 | 4 votes |
@Test public void testProcessMessages() throws Exception { PushSource.Context context = mock(PushSource.Context.class); BatchContext batchContext = mock(BatchContext.class); BatchMaker batchMaker = mock(BatchMaker.class); when(context.isPreview()).thenReturn(isPreview); when(context.createRecord(anyString())).thenReturn(RecordCreator.create()); when(context.startBatch()).thenReturn(batchContext); when(batchContext.getBatchMaker()).thenReturn(batchMaker); int batchSize = 1; long maxWaitTime = 100; DataParserFactory parserFactory = new DataParserFactoryBuilder( context, DataParserFormat.TEXT ).setCharset(Charsets.UTF_8).setMaxDataLen(1000).build(); SynchronousQueue<MessageReplyConsumerBundle> queue = new SynchronousQueue<>(); MessageProcessor processor = new MessageProcessorImpl( context, batchSize, maxWaitTime, parserFactory, queue, Clock.fixed(Instant.now(), ZoneId.of("UTC")) ); ExecutorService executor = Executors.newSingleThreadExecutor(); Future future = executor.submit(processor); PubsubMessage message = PubsubMessage.newBuilder() .setMessageId("1") .setData(ByteString.copyFrom("Hello, World!", Charsets.UTF_8)) .putAttributes("attribute1", "attributevalue") .setPublishTime(Timestamp.getDefaultInstance()) .build(); AckReplyConsumer consumer = mock(AckReplyConsumer.class); queue.offer(new MessageReplyConsumerBundle(message, consumer), 1000, TimeUnit.MILLISECONDS); ThreadUtil.sleep(50); processor.stop(); future.get(); executor.shutdownNow(); if (isPreview) { verify(consumer, times(1)).nack(); } else { verify(consumer, times(1)).ack(); } }
Example #25
Source File: GoogleMessageAcknowledger.java From echo with Apache License 2.0 | 4 votes |
public GoogleMessageAcknowledger(AckReplyConsumer consumer) { this.consumer = consumer; }
Example #26
Source File: RiskAnalysisNumericalStats.java From java-docs-samples with Apache License 2.0 | 4 votes |
public static void numericalStatsAnalysis( String projectId, String datasetId, String tableId, String topicId, String subscriptionId) throws ExecutionException, InterruptedException, IOException { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. After completing all of your requests, call // the "close" method on the client to safely clean up any remaining background resources. try (DlpServiceClient dlpServiceClient = DlpServiceClient.create()) { // Specify the BigQuery table to analyze BigQueryTable bigQueryTable = BigQueryTable.newBuilder() .setTableId(tableId) .setDatasetId(datasetId) .setProjectId(projectId) .build(); // This represents the name of the column to analyze, which must contain numerical data String columnName = "Age"; // Configure the privacy metric for the job FieldId fieldId = FieldId.newBuilder().setName(columnName).build(); NumericalStatsConfig numericalStatsConfig = NumericalStatsConfig.newBuilder().setField(fieldId).build(); PrivacyMetric privacyMetric = PrivacyMetric.newBuilder().setNumericalStatsConfig(numericalStatsConfig).build(); // Create action to publish job status notifications over Google Cloud Pub/Sub ProjectTopicName topicName = ProjectTopicName.of(projectId, topicId); PublishToPubSub publishToPubSub = PublishToPubSub.newBuilder().setTopic(topicName.toString()).build(); Action action = Action.newBuilder().setPubSub(publishToPubSub).build(); // Configure the risk analysis job to perform RiskAnalysisJobConfig riskAnalysisJobConfig = RiskAnalysisJobConfig.newBuilder() .setSourceTable(bigQueryTable) .setPrivacyMetric(privacyMetric) .addActions(action) .build(); CreateDlpJobRequest createDlpJobRequest = CreateDlpJobRequest.newBuilder() .setParent(LocationName.of(projectId, "global").toString()) .setRiskJob(riskAnalysisJobConfig) .build(); // Send the request to the API using the client DlpJob dlpJob = dlpServiceClient.createDlpJob(createDlpJobRequest); // Set up a Pub/Sub subscriber to listen on the job completion status final SettableApiFuture<Boolean> done = SettableApiFuture.create(); ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); MessageReceiver messageHandler = (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> { handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer); }; Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build(); subscriber.startAsync(); // Wait for job completion semi-synchronously // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions try { done.get(15, TimeUnit.MINUTES); } catch (TimeoutException e) { System.out.println("Job was not completed after 15 minutes."); return; } finally { subscriber.stopAsync(); subscriber.awaitTerminated(); } // Build a request to get the completed job GetDlpJobRequest getDlpJobRequest = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build(); // Retrieve completed job status DlpJob completedJob = dlpServiceClient.getDlpJob(getDlpJobRequest); System.out.println("Job status: " + completedJob.getState()); // Get the result and parse through and process the information NumericalStatsResult result = completedJob.getRiskDetails().getNumericalStatsResult(); System.out.printf( "Value range : [%.3f, %.3f]\n", result.getMinValue().getFloatValue(), result.getMaxValue().getFloatValue()); int percent = 1; Double lastValue = null; for (Value quantileValue : result.getQuantileValuesList()) { Double currentValue = quantileValue.getFloatValue(); if (lastValue == null || !lastValue.equals(currentValue)) { System.out.printf("Value at %s %% quantile : %.3f", percent, currentValue); } lastValue = currentValue; } } }
Example #27
Source File: InspectGcsFileWithSampling.java From java-docs-samples with Apache License 2.0 | 4 votes |
public static void inspectGcsFileWithSampling( String projectId, String gcsUri, String topicId, String subscriptionId) throws ExecutionException, InterruptedException, IOException { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. After completing all of your requests, call // the "close" method on the client to safely clean up any remaining background resources. try (DlpServiceClient dlp = DlpServiceClient.create()) { // Specify the GCS file to be inspected and sampling configuration CloudStorageOptions cloudStorageOptions = CloudStorageOptions.newBuilder() .setFileSet(FileSet.newBuilder().setUrl(gcsUri)) .setBytesLimitPerFile(200) .addFileTypes(FileType.TEXT_FILE) .setFilesLimitPercent(90) .setSampleMethod(SampleMethod.RANDOM_START) .build(); StorageConfig storageConfig = StorageConfig.newBuilder().setCloudStorageOptions(cloudStorageOptions).build(); // Specify the type of info the inspection will look for. // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build(); // Specify how the content should be inspected. InspectConfig inspectConfig = InspectConfig.newBuilder() .addInfoTypes(infoType) .setExcludeInfoTypes(true) .setIncludeQuote(true) .setMinLikelihood(Likelihood.POSSIBLE) .build(); // Specify the action that is triggered when the job completes. String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId); Action.PublishToPubSub publishToPubSub = Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build(); Action action = Action.newBuilder().setPubSub(publishToPubSub).build(); // Configure the long running job we want the service to perform. InspectJobConfig inspectJobConfig = InspectJobConfig.newBuilder() .setStorageConfig(storageConfig) .setInspectConfig(inspectConfig) .addActions(action) .build(); // Create the request for the job configured above. CreateDlpJobRequest createDlpJobRequest = CreateDlpJobRequest.newBuilder() .setParent(LocationName.of(projectId, "global").toString()) .setInspectJob(inspectJobConfig) .build(); // Use the client to send the request. final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest); System.out.println("Job created: " + dlpJob.getName()); // Set up a Pub/Sub subscriber to listen on the job completion status final SettableApiFuture<Boolean> done = SettableApiFuture.create(); ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); MessageReceiver messageHandler = (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> { handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer); }; Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build(); subscriber.startAsync(); // Wait for job completion semi-synchronously // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions try { done.get(15, TimeUnit.MINUTES); } catch (TimeoutException e) { System.out.println("Job was not completed after 15 minutes."); return; } finally { subscriber.stopAsync(); subscriber.awaitTerminated(); } // Get the latest state of the job from the service GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build(); DlpJob completedJob = dlp.getDlpJob(request); // Parse the response and process results. System.out.println("Job status: " + completedJob.getState()); InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult(); System.out.println("Findings: "); for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) { System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName()); System.out.println("\tCount: " + infoTypeStat.getCount()); } } }
Example #28
Source File: InspectBigQueryTableWithSampling.java From java-docs-samples with Apache License 2.0 | 4 votes |
public static void inspectBigQueryTableWithSampling( String projectId, String topicId, String subscriptionId) throws ExecutionException, InterruptedException, IOException { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. After completing all of your requests, call // the "close" method on the client to safely clean up any remaining background resources. try (DlpServiceClient dlp = DlpServiceClient.create()) { // Specify the BigQuery table to be inspected. BigQueryTable tableReference = BigQueryTable.newBuilder() .setProjectId("bigquery-public-data") .setDatasetId("usa_names") .setTableId("usa_1910_current") .build(); BigQueryOptions bigQueryOptions = BigQueryOptions.newBuilder() .setTableReference(tableReference) .setRowsLimit(1000) .setSampleMethod(SampleMethod.RANDOM_START) .addIdentifyingFields(FieldId.newBuilder().setName("name")) .build(); StorageConfig storageConfig = StorageConfig.newBuilder().setBigQueryOptions(bigQueryOptions).build(); // Specify the type of info the inspection will look for. // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types InfoType infoType = InfoType.newBuilder().setName("PERSON_NAME").build(); // Specify how the content should be inspected. InspectConfig inspectConfig = InspectConfig.newBuilder() .addInfoTypes(infoType) .setIncludeQuote(true) .build(); // Specify the action that is triggered when the job completes. String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId); Action.PublishToPubSub publishToPubSub = Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build(); Action action = Action.newBuilder().setPubSub(publishToPubSub).build(); // Configure the long running job we want the service to perform. InspectJobConfig inspectJobConfig = InspectJobConfig.newBuilder() .setStorageConfig(storageConfig) .setInspectConfig(inspectConfig) .addActions(action) .build(); // Create the request for the job configured above. CreateDlpJobRequest createDlpJobRequest = CreateDlpJobRequest.newBuilder() .setParent(LocationName.of(projectId, "global").toString()) .setInspectJob(inspectJobConfig) .build(); // Use the client to send the request. final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest); System.out.println("Job created: " + dlpJob.getName()); // Set up a Pub/Sub subscriber to listen on the job completion status final SettableApiFuture<Boolean> done = SettableApiFuture.create(); ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); MessageReceiver messageHandler = (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> { handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer); }; Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build(); subscriber.startAsync(); // Wait for job completion semi-synchronously // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions try { done.get(15, TimeUnit.MINUTES); } catch (TimeoutException e) { System.out.println("Job was not completed after 15 minutes."); return; } finally { subscriber.stopAsync(); subscriber.awaitTerminated(); } // Get the latest state of the job from the service GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build(); DlpJob completedJob = dlp.getDlpJob(request); // Parse the response and process results. System.out.println("Job status: " + completedJob.getState()); InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult(); System.out.println("Findings: "); for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) { System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName()); System.out.println("\tCount: " + infoTypeStat.getCount()); } } }
Example #29
Source File: InspectGcsFile.java From java-docs-samples with Apache License 2.0 | 4 votes |
public static void inspectGcsFile( String projectId, String gcsUri, String topicId, String subscriptionId) throws ExecutionException, InterruptedException, IOException { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. After completing all of your requests, call // the "close" method on the client to safely clean up any remaining background resources. try (DlpServiceClient dlp = DlpServiceClient.create()) { // Specify the GCS file to be inspected. CloudStorageOptions cloudStorageOptions = CloudStorageOptions.newBuilder() .setFileSet(FileSet.newBuilder().setUrl(gcsUri)) .build(); StorageConfig storageConfig = StorageConfig.newBuilder().setCloudStorageOptions(cloudStorageOptions).build(); // Specify the type of info the inspection will look for. // See https://cloud.google.com/dlp/docs/infotypes-reference for complete list of info types List<InfoType> infoTypes = Stream.of("PHONE_NUMBER", "EMAIL_ADDRESS", "CREDIT_CARD_NUMBER") .map(it -> InfoType.newBuilder().setName(it).build()) .collect(Collectors.toList()); // Specify how the content should be inspected. InspectConfig inspectConfig = InspectConfig.newBuilder() .addAllInfoTypes(infoTypes) .setIncludeQuote(true) .build(); // Specify the action that is triggered when the job completes. String pubSubTopic = String.format("projects/%s/topics/%s", projectId, topicId); Action.PublishToPubSub publishToPubSub = Action.PublishToPubSub.newBuilder().setTopic(pubSubTopic).build(); Action action = Action.newBuilder().setPubSub(publishToPubSub).build(); // Configure the long running job we want the service to perform. InspectJobConfig inspectJobConfig = InspectJobConfig.newBuilder() .setStorageConfig(storageConfig) .setInspectConfig(inspectConfig) .addActions(action) .build(); // Create the request for the job configured above. CreateDlpJobRequest createDlpJobRequest = CreateDlpJobRequest.newBuilder() .setParent(LocationName.of(projectId, "global").toString()) .setInspectJob(inspectJobConfig) .build(); // Use the client to send the request. final DlpJob dlpJob = dlp.createDlpJob(createDlpJobRequest); System.out.println("Job created: " + dlpJob.getName()); // Set up a Pub/Sub subscriber to listen on the job completion status final SettableApiFuture<Boolean> done = SettableApiFuture.create(); ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(projectId, subscriptionId); MessageReceiver messageHandler = (PubsubMessage pubsubMessage, AckReplyConsumer ackReplyConsumer) -> { handleMessage(dlpJob, done, pubsubMessage, ackReplyConsumer); }; Subscriber subscriber = Subscriber.newBuilder(subscriptionName, messageHandler).build(); subscriber.startAsync(); // Wait for job completion semi-synchronously // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions try { done.get(15, TimeUnit.MINUTES); } catch (TimeoutException e) { System.out.println("Job was not completed after 15 minutes."); return; } finally { subscriber.stopAsync(); subscriber.awaitTerminated(); } // Get the latest state of the job from the service GetDlpJobRequest request = GetDlpJobRequest.newBuilder().setName(dlpJob.getName()).build(); DlpJob completedJob = dlp.getDlpJob(request); // Parse the response and process results. System.out.println("Job status: " + completedJob.getState()); InspectDataSourceDetails.Result result = completedJob.getInspectDetails().getResult(); System.out.println("Findings: "); for (InfoTypeStats infoTypeStat : result.getInfoTypeStatsList()) { System.out.print("\tInfo type: " + infoTypeStat.getInfoType().getName()); System.out.println("\tCount: " + infoTypeStat.getCount()); } } }
Example #30
Source File: PubSubSubscriberTemplate.java From spring-cloud-gcp with Apache License 2.0 | 4 votes |
ConvertedPushedAcknowledgeablePubsubMessage(ProjectSubscriptionName projectSubscriptionName, PubsubMessage message, T payload, AckReplyConsumer ackReplyConsumer) { super(projectSubscriptionName, message, ackReplyConsumer); this.payload = payload; }