com.github.jcustenborder.kafka.connect.utils.jackson.ObjectMapperFactory Java Examples
The following examples show how to use
com.github.jcustenborder.kafka.connect.utils.jackson.ObjectMapperFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BaseDocumentationTest.java From connect-utils with Apache License 2.0 | 6 votes |
@BeforeEach public void before() throws MalformedURLException { ObjectMapperFactory.INSTANCE.configure(SerializationFeature.INDENT_OUTPUT, true); Arrays.asList( this.targetDirectory, this.outputDirectory, this.sourcesDirectory, this.sourcesExamplesDirectory, this.sinksDirectory, this.sinksExamplesDirectory, this.transformationsDirectory, this.transformationsExampleDirectory, this.convertersDirectory ).stream() .filter(f -> !f.isDirectory()) .forEach(File::mkdirs); log.info("before() - {}", this.getClass()); Package pkg = this.getPackage(); this.plugin = pluginCache.computeIfAbsent(pkg, aPackage -> { PluginLoader loader = new PluginLoader(aPackage); return loader.load(); }); }
Example #2
Source File: SpoolDirJsonSourceTask.java From kafka-connect-spooldir with Apache License 2.0 | 6 votes |
@Override protected void configure(InputStream inputStream, Long lastOffset) throws IOException { if (null != jsonParser) { log.trace("configure() - Closing existing json parser."); jsonParser.close(); } this.jsonParser = this.jsonFactory.createParser(inputStream); this.iterator = ObjectMapperFactory.INSTANCE.readValues(this.jsonParser, JsonNode.class); this.offset = -1; if (null != lastOffset) { int skippedRecords = 1; while (this.iterator.hasNext() && skippedRecords <= lastOffset) { next(); skippedRecords++; } log.trace("configure() - Skipped {} record(s).", skippedRecords); log.info("configure() - Starting on offset {}", this.offset); } }
Example #3
Source File: AbstractSpoolDirSourceConnectorConfig.java From kafka-connect-spooldir with Apache License 2.0 | 6 votes |
Schema readSchema(final String key) { String schema = this.getString(key); Schema result; if (Strings.isNullOrEmpty(schema)) { result = null; } else { try { result = ObjectMapperFactory.INSTANCE.readValue(schema, Schema.class); } catch (IOException e) { throw new DataException("Could not read schema from '" + key + "'", e); } } return result; }
Example #4
Source File: JsonSchemaGenerator.java From kafka-connect-spooldir with Apache License 2.0 | 6 votes |
@Override protected Map<String, Schema.Type> determineFieldTypes(InputStream inputStream) throws IOException { Map<String, Schema.Type> typeMap = new LinkedHashMap<>(); JsonFactory factory = new JsonFactory(); try (JsonParser parser = factory.createParser(inputStream)) { Iterator<JsonNode> iterator = ObjectMapperFactory.INSTANCE.readValues(parser, JsonNode.class); while (iterator.hasNext()) { JsonNode node = iterator.next(); if (node.isObject()) { Iterator<String> fieldNames = node.fieldNames(); while (fieldNames.hasNext()) { typeMap.put(fieldNames.next(), Schema.Type.STRING); } break; } } } return typeMap; }
Example #5
Source File: SpoolDirSchemaLessJsonSourceTask.java From kafka-connect-spooldir with Apache License 2.0 | 6 votes |
@Override protected List<SourceRecord> process() throws IOException { int recordCount = 0; List<SourceRecord> records = new ArrayList<>(this.config.batchSize); while (recordCount < this.config.batchSize && this.nodeIterator.hasNext()) { JsonNode node = this.nodeIterator.next(); String value = ObjectMapperFactory.INSTANCE.writeValueAsString(node); SourceRecord record = record( null, new SchemaAndValue(Schema.STRING_SCHEMA, value), null ); records.add(record); recordCount++; recordOffset++; } return records; }
Example #6
Source File: SpoolDirSchemaLessJsonSourceTask.java From kafka-connect-spooldir with Apache License 2.0 | 5 votes |
@Override protected void configure(InputStream inputStream, Long lastOffset) throws IOException { if (null != this.parser) { this.parser.close(); } this.recordOffset = 0; this.parser = ObjectMapperFactory.INSTANCE.getJsonFactory().createParser(inputStream); this.nodeIterator = ObjectMapperFactory.INSTANCE.readValues(this.parser, JsonNode.class); }
Example #7
Source File: TestDataUtils.java From kafka-connect-spooldir with Apache License 2.0 | 5 votes |
public static <T extends NamedTest> List<T> loadJsonResourceFiles(String packageName, Class<T> cls) throws IOException { Preconditions.checkNotNull(packageName, "packageName cannot be null"); log.info("packageName = {}", packageName); // Preconditions.checkState(packageName.startsWith("/"), "packageName must start with a /."); Reflections reflections = new Reflections(packageName, new ResourcesScanner()); Set<String> resources = reflections.getResources(new FilterBuilder.Include("^.*\\.json$")); List<T> datas = new ArrayList<T>(resources.size()); Path packagePath = Paths.get("/" + packageName.replace(".", "/")); for (String resource : resources) { log.trace("Loading resource {}", resource); Path resourcePath = Paths.get("/" + resource); Path relativePath = packagePath.relativize(resourcePath); File resourceFile = new File("/" + resource); T data; try (InputStream inputStream = cls.getResourceAsStream(resourceFile.getAbsolutePath())) { data = ObjectMapperFactory.INSTANCE.readValue(inputStream, cls); } catch (IOException ex) { if (log.isErrorEnabled()) { log.error("Exception thrown while loading {}", resourcePath, ex); } throw ex; } if (null != relativePath.getParent()) { data.path(relativePath); } else { data.path(relativePath); } datas.add(data); } return datas; }
Example #8
Source File: RedisSinkTask.java From kafka-connect-redis with Apache License 2.0 | 5 votes |
static SinkOffsetState state(KeyValue<byte[], byte[]> input) { if (!input.hasValue()) { return null; } try { return ObjectMapperFactory.INSTANCE.readValue(input.getValue(), SinkOffsetState.class); } catch (IOException e) { throw new DataException(e); } }
Example #9
Source File: TestDataUtils.java From connect-utils with Apache License 2.0 | 5 votes |
public static <T extends NamedTest> List<T> loadJsonResourceFiles(String packageName, Class<T> cls) throws IOException { Preconditions.checkNotNull(packageName, "packageName cannot be null"); Reflections reflections = new Reflections(packageName, new ResourcesScanner()); Set<String> resources = reflections.getResources(new FilterBuilder.Include(".*")); List<T> datas = new ArrayList<>(resources.size()); Path packagePath = Paths.get("/" + packageName.replace(".", "/")); for (String resource : resources) { log.trace("Loading resource {}", resource); Path resourcePath = Paths.get("/" + resource); Path relativePath = packagePath.relativize(resourcePath); File resourceFile = new File("/" + resource); T data; try (InputStream inputStream = cls.getResourceAsStream(resourceFile.getAbsolutePath())) { data = ObjectMapperFactory.INSTANCE.readValue(inputStream, cls); } catch (IOException ex) { if (log.isErrorEnabled()) { log.error("Exception thrown while loading {}", resourcePath, ex); } throw ex; } String nameWithoutExtension = Files.getNameWithoutExtension(resource); if (null != relativePath.getParent()) { String parentName = relativePath.getParent().getFileName().toString(); data.testName(parentName + "/" + nameWithoutExtension); } else { data.testName(nameWithoutExtension); } datas.add(data); } return datas; }
Example #10
Source File: BaseDocumentationTest.java From connect-utils with Apache License 2.0 | 5 votes |
private String connectorConfig(Plugin.Connector connector, Plugin.ConnectorExample example) throws JsonProcessingException { ObjectNode config = ObjectMapperFactory.INSTANCE.createObjectNode(); config.put("connector.class", connector.getCls().getName()); if (connector instanceof Plugin.SinkConnector) { config.put("topic", "<required setting>"); } for (Map.Entry<String, String> e : example.getConfig().entrySet()) { config.put(e.getKey(), e.getValue()); } if (null != example.transformations() && !example.transformations().isEmpty()) { config.put("transforms", Joiner.on(',').join(example.transformations().keySet())); for (Map.Entry<String, Map<String, String>> transform : example.transformations().entrySet()) { assertTrue( transform.getValue().containsKey("type"), String.format("Transform '%s' does not have a type property.", transform.getKey()) ); for (Map.Entry<String, String> entry : transform.getValue().entrySet()) { String key = String.format("transforms.%s.%s", transform.getKey(), entry.getKey()); config.put(key, entry.getValue()); } } } return writeValueAsIndentedString(config); }
Example #11
Source File: JsonNodeTest.java From connect-utils with Apache License 2.0 | 4 votes |
@Test public void struct() { Map<String, Object> map = new LinkedHashMap<>(); map.put("first", (Object) 1234); map.put("second", true); map.put("third", "testing"); map.put("fourth", Arrays.asList("one", "two", "three")); map.put("fifth", ImmutableMap.of("a", "a", "b", "b", "c", "c")); map.put("sixth", ImmutableMap.of("a", "a", "b", "b", "c", "c")); Schema childSchema = SchemaBuilder.struct() .field("a", Schema.OPTIONAL_STRING_SCHEMA) .field("b", Schema.OPTIONAL_STRING_SCHEMA) .field("c", Schema.OPTIONAL_STRING_SCHEMA) .build(); Schema schema = SchemaBuilder.struct() .field("first", Schema.OPTIONAL_INT64_SCHEMA) .field("second", Schema.OPTIONAL_BOOLEAN_SCHEMA) .field("third", Schema.OPTIONAL_STRING_SCHEMA) .field("fourth", SchemaBuilder.array(Schema.STRING_SCHEMA)) .field("fifth", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA)) .field("sixth", childSchema) .build(); final Struct expected = new Struct(schema) .put("first", 1234L) .put("second", true) .put("third", "testing") .put("fourth", Arrays.asList("one", "two", "three")) .put("fifth", ImmutableMap.of("a", "a", "b", "b", "c", "c")) .put("sixth", new Struct(childSchema) .put("a", "a") .put("b", "b") .put("c", "c") ); JsonNode input = ObjectMapperFactory.INSTANCE.convertValue(map, ObjectNode.class); Object result = parser.parseJsonNode(schema, input); assertNotNull(result); assertTrue(result instanceof Struct, "result should be a struct"); assertStruct(expected, (Struct) result); }
Example #12
Source File: SinkOffsetStateTest.java From connect-utils with Apache License 2.0 | 4 votes |
SinkOffsetState roundTrip(SinkOffsetState input) throws IOException { final byte[] buffer = ObjectMapperFactory.INSTANCE.writeValueAsBytes(input); return ObjectMapperFactory.INSTANCE.readValue(buffer, SinkOffsetState.class); }
Example #13
Source File: PluginLoaderTest.java From connect-utils with Apache License 2.0 | 4 votes |
@BeforeAll public static void beforeAll() { ObjectMapperFactory.INSTANCE.configure(SerializationFeature.INDENT_OUTPUT, true); }
Example #14
Source File: BaseDocumentationTest.java From connect-utils with Apache License 2.0 | 4 votes |
private <T> T loadExample(Map.Entry<File, ?> e, Class<T> cls) throws IOException { log.info("loadExample() - file = '{}'", e.getKey().getAbsolutePath()); try (InputStream inputStream = this.getClass().getResourceAsStream(e.getKey().getAbsolutePath())) { return ObjectMapperFactory.INSTANCE.readValue(inputStream, cls); } }
Example #15
Source File: BaseDocumentationTest.java From connect-utils with Apache License 2.0 | 4 votes |
private String writeValueAsIndentedString(Object o) throws JsonProcessingException { String result = ObjectMapperFactory.INSTANCE.writeValueAsString(o); return result.replaceAll("(?m)^", " "); }
Example #16
Source File: AbstractSpoolDirSourceTaskTest.java From kafka-connect-spooldir with Apache License 2.0 | 4 votes |
@BeforeEach public void configureIndent() { ObjectMapperFactory.INSTANCE.configure(SerializationFeature.INDENT_OUTPUT, true); }
Example #17
Source File: AbstractSpoolDirSourceTaskTest.java From kafka-connect-spooldir with Apache License 2.0 | 4 votes |
protected void poll(final String packageName, TestCase testCase) throws InterruptedException, IOException { String keySchemaConfig = ObjectMapperFactory.INSTANCE.writeValueAsString(testCase.keySchema); String valueSchemaConfig = ObjectMapperFactory.INSTANCE.writeValueAsString(testCase.valueSchema); Map<String, String> settings = this.settings(); settings.put(AbstractSourceConnectorConfig.INPUT_FILE_PATTERN_CONF, String.format("^.*\\.%s", packageName)); settings.put(AbstractSpoolDirSourceConnectorConfig.KEY_SCHEMA_CONF, keySchemaConfig); settings.put(AbstractSpoolDirSourceConnectorConfig.VALUE_SCHEMA_CONF, valueSchemaConfig); if (null != testCase.settings && !testCase.settings.isEmpty()) { settings.putAll(testCase.settings); } this.task = createTask(); SourceTaskContext sourceTaskContext = mock(SourceTaskContext.class); OffsetStorageReader offsetStorageReader = mock(OffsetStorageReader.class); when(offsetStorageReader.offset(anyMap())).thenReturn(testCase.offset); when(sourceTaskContext.offsetStorageReader()).thenReturn(offsetStorageReader); this.task.initialize(sourceTaskContext); this.task.start(settings); String dataFile = new File(packageName, Files.getNameWithoutExtension(testCase.path.toString())) + ".data"; log.trace("poll(String, TestCase) - dataFile={}", dataFile); String inputFileName = String.format("%s.%s", Files.getNameWithoutExtension(testCase.path.toString()), packageName ); final File inputFile = new File(this.inputPath, inputFileName); log.trace("poll(String, TestCase) - inputFile = {}", inputFile); final File processingFile = InputFileDequeue.processingFile(AbstractSourceConnectorConfig.PROCESSING_FILE_EXTENSION_DEFAULT, inputFile); try (InputStream inputStream = this.getClass().getResourceAsStream(dataFile)) { try (OutputStream outputStream = new FileOutputStream(inputFile)) { ByteStreams.copy(inputStream, outputStream); } } assertFalse(processingFile.exists(), String.format("processingFile %s should not exist before first poll().", processingFile)); assertTrue(inputFile.exists(), String.format("inputFile %s should exist.", inputFile)); List<SourceRecord> records = this.task.poll(); assertTrue(inputFile.exists(), String.format("inputFile %s should exist after first poll().", inputFile)); assertTrue(processingFile.exists(), String.format("processingFile %s should exist after first poll().", processingFile)); assertNotNull(records, "records should not be null."); assertFalse(records.isEmpty(), "records should not be empty"); assertEquals(testCase.expected.size(), records.size(), "records.size() does not match."); /* The following headers will change. Lets ensure they are there but we don't care about their values since they are driven by things that will change such as lastModified dates and paths. */ List<String> headersToRemove = Arrays.asList( Metadata.HEADER_LAST_MODIFIED, Metadata.HEADER_PATH, Metadata.HEADER_LENGTH ); for (int i = 0; i < testCase.expected.size(); i++) { SourceRecord expectedRecord = testCase.expected.get(i); SourceRecord actualRecord = records.get(i); for (String headerToRemove : headersToRemove) { assertNotNull( actualRecord.headers().lastWithName(headerToRemove), String.format("index:%s should have the header '%s'", i, headerToRemove) ); actualRecord.headers().remove(headerToRemove); expectedRecord.headers().remove(headerToRemove); } assertSourceRecord(expectedRecord, actualRecord, String.format("index:%s", i)); } records = this.task.poll(); assertNull(records, "records should be null after first poll."); records = this.task.poll(); assertNull(records, "records should be null after first poll."); assertFalse(inputFile.exists(), String.format("inputFile %s should not exist.", inputFile)); assertFalse(processingFile.exists(), String.format("processingFile %s should not exist.", processingFile)); final File finishedFile = new File(this.finishedPath, inputFileName); assertTrue(finishedFile.exists(), String.format("finishedFile %s should exist.", finishedFile)); }
Example #18
Source File: SpoolDirCsvSourceTaskTest.java From kafka-connect-spooldir with Apache License 2.0 | 4 votes |
@Test public void rebalance() throws IOException, InterruptedException { Schema schema = SchemaBuilder.struct() .field("id", Schema.INT32_SCHEMA) .build(); final int count = 100; List<Struct> values = new ArrayList<>(count); for (int i = 0; i < count; i++) { values.add( new Struct(schema) .put("id", i) ); } File inputFile = new File(this.inputPath, "input.csv"); writeCSV(inputFile, schema, values); Map<String, String> settings = settings(); settings.put(SpoolDirCsvSourceConnectorConfig.KEY_SCHEMA_CONF, ObjectMapperFactory.INSTANCE.writeValueAsString(schema)); settings.put(SpoolDirCsvSourceConnectorConfig.VALUE_SCHEMA_CONF, ObjectMapperFactory.INSTANCE.writeValueAsString(schema)); settings.put(SpoolDirCsvSourceConnectorConfig.BATCH_SIZE_CONF, "50"); settings.put(SpoolDirCsvSourceConnectorConfig.INPUT_FILE_PATTERN_CONF, ".*"); SpoolDirCsvSourceTask task = new SpoolDirCsvSourceTask(); SourceTaskContext sourceTaskContext = mock(SourceTaskContext.class); OffsetStorageReader offsetStorageReader = mock(OffsetStorageReader.class); when(offsetStorageReader.offset(anyMap())) .thenReturn(null); when(sourceTaskContext.offsetStorageReader()).thenReturn(offsetStorageReader); task.initialize(sourceTaskContext); task.start(settings); List<SourceRecord> records = new ArrayList<>(); records.addAll(task.poll()); assertEquals(50, records.size()); SourceRecord lastRecord = records.get(49); when(offsetStorageReader.offset(anyMap())).thenReturn((Map<String, Object>) lastRecord.sourceOffset()); task.stop(); task.start(settings); records.addAll(task.poll()); assertEquals(count, records.size(), "Expected number of records does not match."); assertNull(task.poll(), "Polling should be finished with the file by now."); assertNull(task.poll(), "Polling should be finished with the file by now."); }