com.google.pubsub.v1.PublishRequest Java Examples
The following examples show how to use
com.google.pubsub.v1.PublishRequest.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Publisher.java From curiostack with MIT License | 6 votes |
public ListenableFuture<String> publish(PubsubMessage message) { Span span = tracer.currentSpan(); if (span != null) { PubsubMessage.Builder messageBuilder = message.toBuilder(); traceInjector.inject(span.context(), messageBuilder); message = messageBuilder.build(); } PublishRequest request = PublishRequest.newBuilder().setTopic(options.getTopic()).addMessages(message).build(); return Futures.transform( stub.publish(request), (response) -> { if (response.getMessageIdsCount() != 1) { throw new IllegalStateException( String.format( "The publish result count %s does not match " + "the expected 1 result. Please contact Cloud Pub/Sub support " + "if this frequently occurs", response.getMessageIdsCount())); } return response.getMessageIds(0); }, MoreExecutors.directExecutor()); }
Example #2
Source File: PubsubGrpcClient.java From beam with Apache License 2.0 | 6 votes |
@Override public int publish(TopicPath topic, List<OutgoingMessage> outgoingMessages) throws IOException { PublishRequest.Builder request = PublishRequest.newBuilder().setTopic(topic.getPath()); for (OutgoingMessage outgoingMessage : outgoingMessages) { PubsubMessage.Builder message = outgoingMessage.message().toBuilder(); if (timestampAttribute != null) { message.putAttributes( timestampAttribute, String.valueOf(outgoingMessage.timestampMsSinceEpoch())); } if (idAttribute != null && !Strings.isNullOrEmpty(outgoingMessage.recordId())) { message.putAttributes(idAttribute, outgoingMessage.recordId()); } request.addMessages(message); } PublishResponse response = publisherStub().publish(request.build()); return response.getMessageIdsCount(); }
Example #3
Source File: PublisherService.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
@Override public void publish(PublishRequest request, StreamObserver<PublishResponse> responseObserver) { logger.atFine().log( "Publishing %d messages to %s", request.getMessagesCount(), request.getTopic()); Optional<Topic> topic = configurationManager.getTopicByName(request.getTopic()); if (!topic.isPresent()) { String message = request.getTopic() + " is not a valid Topic"; logger.atWarning().log(message); responseObserver.onError(Status.NOT_FOUND.withDescription(message).asException()); } else { publishToKafka(request, topic.get(), responseObserver); } }
Example #4
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
/** * Publish tests need to manipulate the MockProducer in a separate thread from the blocking * publish request so we'll use the PUBLISH_EXECUTOR to submit Runnables that implement the * desired producer behaviors. */ @Test public void publish() { int messages = 5; PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/topic-1") .addAllMessages(generatePubsubMessages(messages)) .build(); MockProducer<String, ByteBuffer> mockProducer = startPublishExecutor(messages); PublishResponse response = blockingStub.publish(request); List<String> topics = new ArrayList<>(); List<String> data = new ArrayList<>(); for (ProducerRecord<String, ByteBuffer> producerRecord : mockProducer.history()) { topics.add(producerRecord.topic()); data.add(UTF_8.decode(producerRecord.value()).toString()); } assertThat(response.getMessageIdsList(), Matchers.contains("0-0", "0-1", "0-2", "0-3", "0-4")); assertThat( topics, Matchers.contains( "kafka-topic-1", "kafka-topic-1", "kafka-topic-1", "kafka-topic-1", "kafka-topic-1")); assertThat( data, Matchers.contains("message-0", "message-1", "message-2", "message-3", "message-4")); verify(statisticsManager, times(5)) .computePublish( eq("projects/project-1/topics/topic-1"), argThat(message -> message.toStringUtf8().matches(MESSAGE_CONTENT_REGEX)), anyLong()); verify(statisticsManager, never()).computePublishError(anyString()); }
Example #5
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
@Test public void publish_topicDoesNotExist() { expectedException.expect(StatusRuntimeException.class); expectedException.expectMessage(Status.NOT_FOUND.getCode().toString()); PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/unknown") .addAllMessages(generatePubsubMessages(1)) .build(); blockingStub.publish(request); }
Example #6
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
@Test public void publish_withAttributes() { int messages = 3; PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/topic-2") .addAllMessages(generatePubsubMessagesWithHeader(messages)) .build(); MockProducer<String, ByteBuffer> producer = startPublishExecutor(messages); PublishResponse response = blockingStub.publish(request); assertThat(response.getMessageIdsList(), Matchers.contains("0-0", "0-1", "0-2")); List<Headers> headers = producer.history().stream().map(ProducerRecord::headers).collect(Collectors.toList()); assertThat( headers, Matchers.contains( new RecordHeaders( Collections.singletonList( new RecordHeader("some-key", "some-value".getBytes(UTF_8)))), new RecordHeaders( Collections.singletonList( new RecordHeader("some-key", "some-value".getBytes(UTF_8)))), new RecordHeaders( Collections.singletonList( new RecordHeader("some-key", "some-value".getBytes(UTF_8)))))); verify(statisticsManager, times(3)) .computePublish( eq("projects/project-1/topics/topic-2"), argThat(message -> message.toStringUtf8().matches(MESSAGE_CONTENT_REGEX)), anyLong()); verify(statisticsManager, never()).computePublishError(anyString()); }
Example #7
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
@Test public void publish_producerFails() { int messages = 5; PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/topic-1") .addAllMessages(generatePubsubMessages(messages)) .build(); PUBLISH_EXECUTOR.submit( () -> { MockProducer<String, ByteBuffer> producer = kafkaClientFactory.getCreatedProducers().get(0); while (producer.history().size() < messages) { Thread.yield(); } for (int i = 0; i < messages; i++) { producer.errorNext(new RuntimeException("Send Operation Failed")); } }); expectedException.expect(StatusRuntimeException.class); expectedException.expectMessage(Status.INTERNAL.getCode().toString()); blockingStub.publish(request); verify(statisticsManager).computePublishError(eq("projects/project-1/topics/topic-1")); verify(statisticsManager, never()) .computePublish(anyString(), any(ByteString.class), anyLong()); }
Example #8
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 5 votes |
@Test public void publish_producerTimesOut() { int messages = 5; PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/topic-1") .addAllMessages(generatePubsubMessages(messages)) .build(); PUBLISH_EXECUTOR.submit( () -> { MockProducer<String, ByteBuffer> producer = kafkaClientFactory.getCreatedProducers().get(0); while (producer.history().size() < messages) { Thread.yield(); } for (int i = 0; i < messages - 1; i++) { producer.completeNext(); } }); PublishResponse response = blockingStub.publish(request); assertThat(response.getMessageIdsList(), Matchers.contains("0-0", "0-1", "0-2", "0-3")); verify(statisticsManager, times(4)) .computePublish( eq("projects/project-1/topics/topic-1"), argThat(message -> message.toStringUtf8().matches(MESSAGE_CONTENT_REGEX)), anyLong()); verify(statisticsManager, never()).computePublishError(anyString()); }
Example #9
Source File: PublisherService.java From kafka-pubsub-emulator with Apache License 2.0 | 4 votes |
private void publishToKafka( PublishRequest request, Topic topic, StreamObserver<PublishResponse> responseObserver) { Instant start = Instant.now(); String kafkaTopic = topic.getLabelsOrDefault(KAFKA_TOPIC, ProjectTopicName.parse(topic.getName()).getTopic()); int producerIndex = nextProducerIndex.getAndUpdate((value) -> ++value % kafkaProducers.size()); Producer<String, ByteBuffer> producer = kafkaProducers.get(producerIndex); CountDownLatch callbacks = new CountDownLatch(request.getMessagesCount()); AtomicInteger failures = new AtomicInteger(); PublishResponse.Builder builder = PublishResponse.newBuilder(); request .getMessagesList() .forEach( m -> { ProducerRecord<String, ByteBuffer> producerRecord = buildProducerRecord(kafkaTopic, m); long publishedAt = System.currentTimeMillis(); producer.send( producerRecord, (recordMetadata, exception) -> { if (recordMetadata != null) { builder.addMessageIds( recordMetadata.partition() + "-" + recordMetadata.offset()); statisticsManager.computePublish(topic.getName(), m.getData(), publishedAt); } else { logger.atSevere().withCause(exception).log("Unable to Publish message"); statisticsManager.computePublishError(topic.getName()); failures.incrementAndGet(); } callbacks.countDown(); }); }); try { if (!callbacks.await(MAX_PUBLISH_WAIT, TimeUnit.SECONDS)) { logger.atWarning().log( "%d callbacks remain after %ds", callbacks.getCount(), MAX_PUBLISH_WAIT); } logger.atFine().log( "Published %d of %d messages to %s using KafkaProducer %d in %dms", builder.getMessageIdsCount(), request.getMessagesCount(), kafkaTopic, producerIndex, Duration.between(start, Instant.now()).toMillis()); if (failures.get() == 0) { responseObserver.onNext(builder.build()); responseObserver.onCompleted(); } else { String message = failures.get() + " of " + request.getMessagesCount() + " Messages failed to Publish"; logger.atWarning().log(message); responseObserver.onError(Status.INTERNAL.withDescription(message).asException()); } } catch (InterruptedException e) { responseObserver.onError(Status.INTERNAL.withCause(e).asException()); } }
Example #10
Source File: PublisherServiceTest.java From kafka-pubsub-emulator with Apache License 2.0 | 4 votes |
@Test public void publish_implicitKafkaTopic() { blockingStub.createTopic( Topic.newBuilder().setName("projects/project-1/topics/implicit-kafka-topic").build()); int messages = 5; PublishRequest request = PublishRequest.newBuilder() .setTopic("projects/project-1/topics/implicit-kafka-topic") .addAllMessages(generatePubsubMessages(messages)) .build(); MockProducer<String, ByteBuffer> mockProducer = startPublishExecutor(messages); PublishResponse response = blockingStub.publish(request); List<String> topics = new ArrayList<>(); List<String> data = new ArrayList<>(); for (ProducerRecord<String, ByteBuffer> producerRecord : mockProducer.history()) { topics.add(producerRecord.topic()); data.add(UTF_8.decode(producerRecord.value()).toString()); } assertThat(response.getMessageIdsList(), Matchers.contains("0-0", "0-1", "0-2", "0-3", "0-4")); assertThat( topics, Matchers.contains( "project-1" + KAFKA_TOPIC_SEPARATOR + "implicit-kafka-topic", "project-1" + KAFKA_TOPIC_SEPARATOR + "implicit-kafka-topic", "project-1" + KAFKA_TOPIC_SEPARATOR + "implicit-kafka-topic", "project-1" + KAFKA_TOPIC_SEPARATOR + "implicit-kafka-topic", "project-1" + KAFKA_TOPIC_SEPARATOR + "implicit-kafka-topic")); assertThat( data, Matchers.contains("message-0", "message-1", "message-2", "message-3", "message-4")); verify(statisticsManager, times(5)) .computePublish( eq("projects/project-1/topics/implicit-kafka-topic"), argThat(message -> message.toStringUtf8().matches(MESSAGE_CONTENT_REGEX)), anyLong()); verify(statisticsManager, never()).computePublishError(anyString()); }
Example #11
Source File: PubsubGrpcClientTest.java From beam with Apache License 2.0 | 4 votes |
@Test public void publishOneMessage() throws IOException { String expectedTopic = TOPIC.getPath(); PubsubMessage expectedPubsubMessage = PubsubMessage.newBuilder() .setData(ByteString.copyFrom(DATA.getBytes(StandardCharsets.UTF_8))) .putAllAttributes(ATTRIBUTES) .putAllAttributes( ImmutableMap.of( TIMESTAMP_ATTRIBUTE, String.valueOf(MESSAGE_TIME), ID_ATTRIBUTE, RECORD_ID)) .build(); final PublishRequest expectedRequest = PublishRequest.newBuilder() .setTopic(expectedTopic) .addAllMessages(ImmutableList.of(expectedPubsubMessage)) .build(); final PublishResponse response = PublishResponse.newBuilder().addAllMessageIds(ImmutableList.of(MESSAGE_ID)).build(); final List<PublishRequest> requestsReceived = new ArrayList<>(); PublisherImplBase publisherImplBase = new PublisherImplBase() { @Override public void publish( PublishRequest request, StreamObserver<PublishResponse> responseObserver) { requestsReceived.add(request); responseObserver.onNext(response); responseObserver.onCompleted(); } }; Server server = InProcessServerBuilder.forName(channelName).addService(publisherImplBase).build().start(); try { OutgoingMessage actualMessage = OutgoingMessage.of( com.google.pubsub.v1.PubsubMessage.newBuilder() .setData(ByteString.copyFromUtf8(DATA)) .putAllAttributes(ATTRIBUTES) .build(), MESSAGE_TIME, RECORD_ID); int n = client.publish(TOPIC, ImmutableList.of(actualMessage)); assertEquals(1, n); assertEquals(expectedRequest, Iterables.getOnlyElement(requestsReceived)); } finally { server.shutdownNow(); } }