org.apache.flink.api.common.serialization.SimpleStringEncoder Java Examples
The following examples show how to use
org.apache.flink.api.common.serialization.SimpleStringEncoder.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BucketAssignerITCases.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAssembleBucketPath() throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path basePath = new Path(outDir.toURI()); final long time = 1000L; final RollingPolicy<String, String> rollingPolicy = DefaultRollingPolicy .create() .withMaxPartSize(7L) .build(); final Buckets<String, String> buckets = new Buckets<>( basePath, new BasePathBucketAssigner<>(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicy, 0 ); Bucket<String, String> bucket = buckets.onElement("abc", new TestUtils.MockSinkContext(time, time, time)); Assert.assertEquals(new Path(basePath.toUri()), bucket.getBucketPath()); }
Example #2
Source File: BucketsTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private void testCorrectTimestampPassingInContext(Long timestamp, long watermark, long processingTime) throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path path = new Path(outDir.toURI()); final Buckets<String, String> buckets = new Buckets<>( path, new VerifyingBucketAssigner(timestamp, watermark, processingTime), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), DefaultRollingPolicy.create().build(), 2 ); buckets.onElement( "test", new TestUtils.MockSinkContext( timestamp, watermark, processingTime) ); }
Example #3
Source File: BucketAssignerITCases.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAssembleBucketPath() throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path basePath = new Path(outDir.toURI()); final long time = 1000L; final RollingPolicy<String, String> rollingPolicy = DefaultRollingPolicy .create() .withMaxPartSize(7L) .build(); final Buckets<String, String> buckets = new Buckets<>( basePath, new BasePathBucketAssigner<>(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicy, 0, new PartFileConfig() ); Bucket<String, String> bucket = buckets.onElement("abc", new TestUtils.MockSinkContext(time, time, time)); Assert.assertEquals(new Path(basePath.toUri()), bucket.getBucketPath()); }
Example #4
Source File: BucketsTest.java From flink with Apache License 2.0 | 6 votes |
private void testCorrectTimestampPassingInContext(Long timestamp, long watermark, long processingTime) throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path path = new Path(outDir.toURI()); final Buckets<String, String> buckets = new Buckets<>( path, new VerifyingBucketAssigner(timestamp, watermark, processingTime), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), DefaultRollingPolicy.create().build(), 2, new PartFileConfig() ); buckets.onElement( "test", new TestUtils.MockSinkContext( timestamp, watermark, processingTime) ); }
Example #5
Source File: KafkaToHDFSSimpleJob.java From flink-tutorials with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { ParameterTool params = Utils.parseArgs(args); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<>(params.getRequired("kafkaTopic"), new SimpleStringSchema(), Utils.readKafkaProperties(params)); DataStream<String> source = env.addSource(consumer).name("Kafka Source").uid("Kafka Source"); StreamingFileSink<String> sink = StreamingFileSink .forRowFormat(new Path(params.getRequired("hdfsOutput")), new SimpleStringEncoder<String>("UTF-8")) .build(); source.addSink(sink).name("FS Sink").uid("FS Sink"); source.print(); env.execute("Flink Streaming Secured Job Sample"); }
Example #6
Source File: BucketAssignerITCases.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAssembleBucketPath() throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path basePath = new Path(outDir.toURI()); final long time = 1000L; final RollingPolicy<String, String> rollingPolicy = DefaultRollingPolicy .builder() .withMaxPartSize(7L) .build(); final Buckets<String, String> buckets = new Buckets<>( basePath, new BasePathBucketAssigner<>(), new DefaultBucketFactoryImpl<>(), new RowWiseBucketWriter<>(FileSystem.get(basePath.toUri()).createRecoverableWriter(), new SimpleStringEncoder<>()), rollingPolicy, 0, OutputFileConfig.builder().build() ); Bucket<String, String> bucket = buckets.onElement("abc", new TestUtils.MockSinkContext(time, time, time)); Assert.assertEquals(new Path(basePath.toUri()), bucket.getBucketPath()); }
Example #7
Source File: BucketsTest.java From flink with Apache License 2.0 | 6 votes |
private void testCorrectTimestampPassingInContext(Long timestamp, long watermark, long processingTime) throws Exception { final File outDir = TEMP_FOLDER.newFolder(); final Path path = new Path(outDir.toURI()); final Buckets<String, String> buckets = new Buckets<>( path, new VerifyingBucketAssigner(timestamp, watermark, processingTime), new DefaultBucketFactoryImpl<>(), new RowWiseBucketWriter<>(FileSystem.get(path.toUri()).createRecoverableWriter(), new SimpleStringEncoder<>()), DefaultRollingPolicy.builder().build(), 2, OutputFileConfig.builder().build() ); buckets.onElement( "test", new TestUtils.MockSinkContext( timestamp, watermark, processingTime) ); }
Example #8
Source File: BucketsTest.java From flink with Apache License 2.0 | 6 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final RollingPolicy<String, String> rollingPolicy, final BucketLifeCycleListener<String, String> bucketLifeCycleListener, final int subtaskIdx, final OutputFileConfig outputFileConfig) throws IOException { Buckets<String, String> buckets = new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWiseBucketWriter<>(FileSystem.get(basePath.toUri()).createRecoverableWriter(), new SimpleStringEncoder<>()), rollingPolicy, subtaskIdx, outputFileConfig); if (bucketLifeCycleListener != null) { buckets.setBucketLifeCycleListener(bucketLifeCycleListener); } return buckets; }
Example #9
Source File: RollingPolicyTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final MethodCallCountingPolicyWrapper<String, String> rollingPolicyToTest ) throws IOException { return new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicyToTest, 0 ); }
Example #10
Source File: BucketsTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final RollingPolicy<String, String> rollingPolicy, final int subtaskIdx ) throws IOException { return new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicy, subtaskIdx ); }
Example #11
Source File: RollingPolicyTest.java From flink with Apache License 2.0 | 5 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final MethodCallCountingPolicyWrapper<String, String> rollingPolicyToTest ) throws IOException { return new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicyToTest, 0, new PartFileConfig() ); }
Example #12
Source File: BucketsTest.java From flink with Apache License 2.0 | 5 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final RollingPolicy<String, String> rollingPolicy, final int subtaskIdx, final PartFileConfig partFileConfig) throws IOException { return new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWisePartWriter.Factory<>(new SimpleStringEncoder<>()), rollingPolicy, subtaskIdx, partFileConfig ); }
Example #13
Source File: KafkaToHDFSAvroJob.java From flink-tutorials with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { ParameterTool params = Utils.parseArgs(args); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); KafkaDeserializationSchema<Message> schema = ClouderaRegistryKafkaDeserializationSchema .builder(Message.class) .setConfig(Utils.readSchemaRegistryProperties(params)) .build(); FlinkKafkaConsumer<Message> consumer = new FlinkKafkaConsumer<Message>(params.getRequired(K_KAFKA_TOPIC), schema, Utils.readKafkaProperties(params)); DataStream<String> source = env.addSource(consumer) .name("Kafka Source") .uid("Kafka Source") .map(record -> record.getId() + "," + record.getName() + "," + record.getDescription()) .name("ToOutputString"); StreamingFileSink<String> sink = StreamingFileSink .forRowFormat(new Path(params.getRequired(K_HDFS_OUTPUT)), new SimpleStringEncoder<String>("UTF-8")) .build(); source.addSink(sink) .name("FS Sink") .uid("FS Sink"); source.print(); env.execute("Flink Streaming Secured Job Sample"); }
Example #14
Source File: RollingPolicyTest.java From flink with Apache License 2.0 | 5 votes |
private static Buckets<String, String> createBuckets( final Path basePath, final MethodCallCountingPolicyWrapper<String, String> rollingPolicyToTest ) throws IOException { return new Buckets<>( basePath, new TestUtils.StringIdentityBucketAssigner(), new DefaultBucketFactoryImpl<>(), new RowWiseBucketWriter<>(FileSystem.get(basePath.toUri()).createRecoverableWriter(), new SimpleStringEncoder<>()), rollingPolicyToTest, 0, OutputFileConfig.builder().build() ); }
Example #15
Source File: MatrixVectorMul.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { // Checking input parameters final ParameterTool params = ParameterTool.fromArgs(args); System.out.println("Usage: MatrixVectorMul [--output <path>] [--dimension <dimension> --data-size <data_size>] [--resource-name <resource_name>]"); // Set up the execution environment final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // Make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); final int dimension = params.getInt("dimension", DEFAULT_DIM); final int dataSize = params.getInt("data-size", DEFAULT_DATA_SIZE); final String resourceName = params.get("resource-name", DEFAULT_RESOURCE_NAME); DataStream<List<Float>> result = env.addSource(new RandomVectorSource(dimension, dataSize)) .map(new Multiplier(dimension, resourceName)); // Emit result if (params.has("output")) { result.addSink(StreamingFileSink.forRowFormat(new Path(params.get("output")), new SimpleStringEncoder<List<Float>>()).build()); } else { System.out.println("Printing result to stdout. Use --output to specify output path."); result.print(); } // Execute program env.execute("Matrix-Vector Multiplication"); }
Example #16
Source File: BucketStateSerializerTest.java From flink with Apache License 2.0 | 4 votes |
private static RowWiseBucketWriter<String, String> createBucketWriter() throws IOException { return new RowWiseBucketWriter<>(FileSystem.getLocalFileSystem().createRecoverableWriter(), new SimpleStringEncoder<>()); }