Java Code Examples for io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde#serializer()

The following examples show how to use io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde#serializer() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FindDistinctEventsTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldFilterDistinctEvents() throws IOException, RestClientException {

  final FindDistinctEvents distinctifier  = new FindDistinctEvents();

  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final SpecificAvroSerde<Click> clickSerde = makeSerializer(envProps);

  Topology topology = distinctifier.buildTopology(envProps, clickSerde);
  TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps);

  Serializer<String> keySerializer = Serdes.String().serializer();

  ConsumerRecordFactory<String, Click> inputFactory = new ConsumerRecordFactory<>(
          keySerializer, clickSerde.serializer());

  final List<Click> clicks = asList(
          new Click("10.0.0.1",
                  "https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html",
          "2019-09-16T14:53:43+00:00"),
          new Click("10.0.0.2",
                  "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
          "2019-09-16T14:53:43+00:01"),
          new Click("10.0.0.3",
          "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
          "2019-09-16T14:53:43+00:03"),
          new Click("10.0.0.1",
          "https://docs.confluent.io/current/tutorials/examples/kubernetes/gke-base/docs/index.html",
          "2019-09-16T14:53:43+00:00"),
          new Click("10.0.0.2",
          "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
          "2019-09-16T14:53:43+00:01"),
          new Click("10.0.0.3",
          "https://www.confluent.io/hub/confluentinc/kafka-connect-datagen",
          "2019-09-16T14:53:43+00:03"));

  final List<Click> expectedOutput = asList(clicks.get(0),clicks.get(1),clicks.get(2));

  for (Click clk : clicks) {
    testDriver.pipeInput(inputFactory.create(inputTopic, clk.getIp(), clk));
  }

  Deserializer<String> keyDeserializer = Serdes.String().deserializer();
  List<Click> actualOutput = new ArrayList<>();
  while (true) {
    ProducerRecord<String, Click>
        record =
        testDriver.readOutput(outputTopic, keyDeserializer, clickSerde.deserializer());

    if (record != null) {
      actualOutput.add(record.value());
    } else {
      break;
    }
  }

  Assert.assertEquals(expectedOutput, actualOutput);
}
 
Example 2
Source File: AggregatingSumTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldSumTicketSales() throws IOException, RestClientException {
  AggregatingSum aggSum = new AggregatingSum();
  Properties envProps = aggSum.loadEnvProperties(TEST_CONFIG_FILE);
  Properties streamProps = aggSum.buildStreamsProperties(envProps);

  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final SpecificAvroSerde<TicketSale> ticketSaleSpecificAvroSerde = makeSerializer(envProps);

  Topology topology = aggSum.buildTopology(envProps, ticketSaleSpecificAvroSerde);
  TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps);

  Serializer<String> keySerializer = Serdes.String().serializer();
  Deserializer<String> keyDeserializer = Serdes.String().deserializer();

  ConsumerRecordFactory<String, TicketSale>
      inputFactory =
      new ConsumerRecordFactory<>(keySerializer, ticketSaleSpecificAvroSerde.serializer());

  final List<TicketSale>
      input = asList(
                new TicketSale("Die Hard", "2019-07-18T10:00:00Z", 12),
                new TicketSale("Die Hard", "2019-07-18T10:01:00Z", 12),
                new TicketSale("The Godfather", "2019-07-18T10:01:31Z", 12),
                new TicketSale("Die Hard", "2019-07-18T10:01:36Z", 24),
                new TicketSale("The Godfather", "2019-07-18T10:02:00Z", 18),
                new TicketSale("The Big Lebowski", "2019-07-18T11:03:21Z", 12),
                new TicketSale("The Big Lebowski", "2019-07-18T11:03:50Z", 12),
                new TicketSale("The Godfather", "2019-07-18T11:40:00Z", 36),
                new TicketSale("The Godfather", "2019-07-18T11:40:09Z", 18)
              );

  List<Integer> expectedOutput = new ArrayList<Integer>(Arrays.asList(12, 24, 12, 48, 30, 12, 24, 66, 84));

  for (TicketSale ticketSale : input) {
    testDriver.pipeInput(inputFactory.create(inputTopic, "", ticketSale));
  }

  List<Integer> actualOutput = new ArrayList<>();
  while (true) {
    ProducerRecord<String, Integer>
        record =
        testDriver.readOutput(outputTopic, keyDeserializer, Serdes.Integer().deserializer());

    if (record != null) {
      actualOutput.add(record.value());
    } else {
      break;
    }
  }

  System.out.println(actualOutput);
  Assert.assertEquals(expectedOutput, actualOutput);

}
 
Example 3
Source File: AggregatingCountTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldCountTicketSales() throws IOException, RestClientException {
  AggregatingCount aggCount = new AggregatingCount();
  Properties envProps = aggCount.loadEnvProperties(TEST_CONFIG_FILE);
  Properties streamProps = aggCount.buildStreamsProperties(envProps);

  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final SpecificAvroSerde<TicketSale> ticketSaleSpecificAvroSerde = makeSerializer(envProps);

  Topology topology = aggCount.buildTopology(envProps, ticketSaleSpecificAvroSerde);
  TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps);

  Serializer<String> keySerializer = Serdes.String().serializer();
  Deserializer<String> keyDeserializer = Serdes.String().deserializer();

  ConsumerRecordFactory<String, TicketSale>
      inputFactory =
      new ConsumerRecordFactory<>(keySerializer, ticketSaleSpecificAvroSerde.serializer());

  final List<TicketSale>
      input = asList(
                new TicketSale("Die Hard", "2019-07-18T10:00:00Z", 12),
                new TicketSale("Die Hard", "2019-07-18T10:01:00Z", 12),
                new TicketSale("The Godfather", "2019-07-18T10:01:31Z", 12),
                new TicketSale("Die Hard", "2019-07-18T10:01:36Z", 24),
                new TicketSale("The Godfather", "2019-07-18T10:02:00Z", 18),
                new TicketSale("The Big Lebowski", "2019-07-18T11:03:21Z", 12),
                new TicketSale("The Big Lebowski", "2019-07-18T11:03:50Z", 12),
                new TicketSale("The Godfather", "2019-07-18T11:40:00Z", 36),
                new TicketSale("The Godfather", "2019-07-18T11:40:09Z", 18)
              );

  List<Long> expectedOutput = new ArrayList<Long>(Arrays.asList(1L, 2L, 1L, 3L, 2L, 1L, 2L, 3L, 4L));

  for (TicketSale ticketSale : input) {
    testDriver.pipeInput(inputFactory.create(inputTopic, "", ticketSale));
  }

  List<Long> actualOutput = new ArrayList<>();
  while (true) {
    ProducerRecord<String, Long>
        record =
        testDriver.readOutput(outputTopic, keyDeserializer, Serdes.Long().deserializer());

    if (record != null) {
      actualOutput.add(record.value());
    } else {
      break;
    }
  }

  System.out.println(actualOutput);
  Assert.assertEquals(expectedOutput, actualOutput);

}
 
Example 4
Source File: StreamsIngestTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldCreateKeyedStream() throws IOException, RestClientException {
  StreamsIngest si = new StreamsIngest();
  Properties envProps = si.loadEnvProperties(TEST_CONFIG_FILE);
  Properties streamProps = si.buildStreamsProperties(envProps);

  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final SpecificAvroSerde<City> citySpecificAvroSerde = makeSerializer(envProps);

  Topology topology = si.buildTopology(envProps, citySpecificAvroSerde);
  TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps);

  Serializer<String> keySerializer = Serdes.String().serializer();
  Deserializer<Long> keyDeserializer = Serdes.Long().deserializer();

  ConsumerRecordFactory<String, City>
      inputFactory =
      new ConsumerRecordFactory<>(keySerializer, citySpecificAvroSerde.serializer());

  // Fixture
  City c1 = new City(1L, "Raleigh", "NC");
  City c2 = new City(2L, "Mountain View", "CA");
  City c3 = new City(3L, "Knoxville", "TN");
  City c4 = new City(4L, "Houston", "TX");
  City c5 = new City(5L, "Olympia", "WA");
  City c6 = new City(6L, "Bismarck", "ND");
  // end Fixture

  final List<City>
      input = asList(c1, c2, c3, c4, c5, c6);

  final List<Long> expectedOutput = asList(1L, 2L, 3L, 4L, 5L, 6L);

  for (City city : input) {
    testDriver.pipeInput(inputFactory.create(inputTopic, null, city));
  }

  List<Long> actualOutput = new ArrayList<>();
  while (true) {
    ProducerRecord<Long, City>
        record =
        testDriver.readOutput(outputTopic, keyDeserializer, citySpecificAvroSerde.deserializer());

    if (record != null) {
      actualOutput.add(record.key());
    } else {
      break;
    }
  }

  Assert.assertEquals(expectedOutput, actualOutput);
}
 
Example 5
Source File: CogroupingStreamsTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void cogroupingTest() throws IOException {
    final CogroupingStreams instance = new CogroupingStreams();
    final Properties envProps = instance.loadEnvProperties(TEST_CONFIG_FILE);

    final Properties streamProps = instance.buildStreamsProperties(envProps);

    final String appOneInputTopicName = envProps.getProperty("app-one.topic.name");
    final String appTwoInputTopicName = envProps.getProperty("app-two.topic.name");
    final String appThreeInputTopicName = envProps.getProperty("app-three.topic.name");
    final String totalResultOutputTopicName = envProps.getProperty("output.topic.name");
  
    final Topology topology = instance.buildTopology(envProps);
    try (final TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps)) {

        final Serde<String> stringAvroSerde = CogroupingStreams.getPrimitiveAvroSerde(envProps, true);
        final SpecificAvroSerde<LoginEvent> loginEventSerde = CogroupingStreams.getSpecificAvroSerde(envProps);
        final SpecificAvroSerde<LoginRollup> rollupSerde = CogroupingStreams.getSpecificAvroSerde(envProps);

        final Serializer<String> keySerializer = stringAvroSerde.serializer();
        final Deserializer<String> keyDeserializer = stringAvroSerde.deserializer();
        final Serializer<LoginEvent> loginEventSerializer = loginEventSerde.serializer();


        final TestInputTopic<String, LoginEvent>  appOneInputTopic = testDriver.createInputTopic(appOneInputTopicName, keySerializer, loginEventSerializer);
        final TestInputTopic<String, LoginEvent>  appTwoInputTopic = testDriver.createInputTopic(appTwoInputTopicName, keySerializer, loginEventSerializer);
        final TestInputTopic<String, LoginEvent>  appThreeInputTopic = testDriver.createInputTopic(appThreeInputTopicName, keySerializer, loginEventSerializer);

        final TestOutputTopic<String, LoginRollup> outputTopic = testDriver.createOutputTopic(totalResultOutputTopicName, keyDeserializer, rollupSerde.deserializer());


        final List<LoginEvent> appOneEvents = new ArrayList<>();
        appOneEvents.add(LoginEvent.newBuilder().setAppId("one").setUserId("foo").setTime(5L).build());
        appOneEvents.add(LoginEvent.newBuilder().setAppId("one").setUserId("bar").setTime(6l).build());
        appOneEvents.add(LoginEvent.newBuilder().setAppId("one").setUserId("bar").setTime(7L).build());

        final List<LoginEvent> appTwoEvents = new ArrayList<>();
        appTwoEvents.add(LoginEvent.newBuilder().setAppId("two").setUserId("foo").setTime(5L).build());
        appTwoEvents.add(LoginEvent.newBuilder().setAppId("two").setUserId("foo").setTime(6l).build());
        appTwoEvents.add(LoginEvent.newBuilder().setAppId("two").setUserId("bar").setTime(7L).build());

        final List<LoginEvent> appThreeEvents = new ArrayList<>();
        appThreeEvents.add(LoginEvent.newBuilder().setAppId("three").setUserId("foo").setTime(5L).build());
        appThreeEvents.add(LoginEvent.newBuilder().setAppId("three").setUserId("foo").setTime(6l).build());
        appThreeEvents.add(LoginEvent.newBuilder().setAppId("three").setUserId("bar").setTime(7L).build());
        appThreeEvents.add(LoginEvent.newBuilder().setAppId("three").setUserId("bar").setTime(9L).build());

        final Map<String, Map<String, Long>> expectedEventRollups = new TreeMap<>();
        final Map<String, Long> expectedAppOneRollup = new HashMap<>();
        final LoginRollup expectedLoginRollup = new LoginRollup(expectedEventRollups);
        expectedAppOneRollup.put("foo", 1L);
        expectedAppOneRollup.put("bar", 2L);
        expectedEventRollups.put("one", expectedAppOneRollup);

        final Map<String, Long> expectedAppTwoRollup = new HashMap<>();
        expectedAppTwoRollup.put("foo", 2L);
        expectedAppTwoRollup.put("bar", 1L);
        expectedEventRollups.put("two", expectedAppTwoRollup);

        final Map<String, Long> expectedAppThreeRollup = new HashMap<>();
        expectedAppThreeRollup.put("foo", 2L);
        expectedAppThreeRollup.put("bar", 2L);
        expectedEventRollups.put("three", expectedAppThreeRollup);

        sendEvents(appOneEvents, appOneInputTopic);
        sendEvents(appTwoEvents, appTwoInputTopic);
        sendEvents(appThreeEvents, appThreeInputTopic);

        final List<LoginRollup> actualLoginEventResults = outputTopic.readValuesToList();
        final Map<String, Map<String, Long>> actualRollupMap = new HashMap<>();
        for (LoginRollup actualLoginEventResult : actualLoginEventResults) {
              actualRollupMap.putAll(actualLoginEventResult.getLoginByAppAndUser());
        }
        final LoginRollup actualLoginRollup = new LoginRollup(actualRollupMap);

        assertEquals(expectedLoginRollup, actualLoginRollup);
    }
}
 
Example 6
Source File: FilterEventsTest.java    From kafka-tutorials with Apache License 2.0 4 votes vote down vote up
@Test
public void shouldFilterGRRMartinsBooks() throws IOException, RestClientException {
  FilterEvents fe = new FilterEvents();
  Properties envProps = fe.loadEnvProperties(TEST_CONFIG_FILE);
  Properties streamProps = fe.buildStreamsProperties(envProps);

  String inputTopic = envProps.getProperty("input.topic.name");
  String outputTopic = envProps.getProperty("output.topic.name");

  final SpecificAvroSerde<Publication> publicationSpecificAvroSerde = makeSerializer(envProps);

  Topology topology = fe.buildTopology(envProps, publicationSpecificAvroSerde);
  TopologyTestDriver testDriver = new TopologyTestDriver(topology, streamProps);

  Serializer<String> keySerializer = Serdes.String().serializer();
  Deserializer<String> keyDeserializer = Serdes.String().deserializer();

  ConsumerRecordFactory<String, Publication>
      inputFactory =
      new ConsumerRecordFactory<>(keySerializer, publicationSpecificAvroSerde.serializer());

  // Fixture
  Publication iceAndFire = new Publication("George R. R. Martin", "A Song of Ice and Fire");
  Publication silverChair = new Publication("C.S. Lewis", "The Silver Chair");
  Publication perelandra = new Publication("C.S. Lewis", "Perelandra");
  Publication fireAndBlood = new Publication("George R. R. Martin", "Fire & Blood");
  Publication theHobbit = new Publication("J. R. R. Tolkien", "The Hobbit");
  Publication lotr = new Publication("J. R. R. Tolkien", "The Lord of the Rings");
  Publication dreamOfSpring = new Publication("George R. R. Martin", "A Dream of Spring");
  Publication fellowship = new Publication("J. R. R. Tolkien", "The Fellowship of the Ring");
  Publication iceDragon = new Publication("George R. R. Martin", "The Ice Dragon");
  // end Fixture

  final List<Publication>
      input = asList(iceAndFire, silverChair, perelandra, fireAndBlood, theHobbit, lotr, dreamOfSpring, fellowship,
                     iceDragon);

  final List<Publication> expectedOutput = asList(iceAndFire, fireAndBlood, dreamOfSpring, iceDragon);

  for (Publication publication : input) {
    testDriver.pipeInput(inputFactory.create(inputTopic, publication.getName(), publication));
  }

  List<Publication> actualOutput = new ArrayList<>();
  while (true) {
    ProducerRecord<String, Publication>
        record =
        testDriver.readOutput(outputTopic, keyDeserializer, publicationSpecificAvroSerde.deserializer());

    if (record != null) {
      actualOutput.add(record.value());
    } else {
      break;
    }
  }

  Assert.assertEquals(expectedOutput, actualOutput);
}