Java Code Examples for org.apache.flink.api.java.utils.ParameterTool#getLong()
The following examples show how to use
org.apache.flink.api.java.utils.ParameterTool#getLong() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LongParameter.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void configure(ParameterTool parameterTool) { if (hasDefaultValue && !parameterTool.has(name)) { // skip checks for min and max when using default value value = defaultValue; } else { value = parameterTool.getLong(name); if (hasMinimumValue) { Util.checkParameter(value >= minimumValue, name + " must be greater than or equal to " + minimumValue); } if (hasMaximumValue) { Util.checkParameter(value <= maximumValue, name + " must be less than or equal to " + maximumValue); } } }
Example 2
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 6 votes |
static WindowedStream<Event, Integer, TimeWindow> applyTumblingWindows( KeyedStream<Event, Integer> keyedStream, ParameterTool pt) { long eventTimeProgressPerEvent = pt.getLong( SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.key(), SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.defaultValue()); return keyedStream.timeWindow( Time.milliseconds( pt.getLong( TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.key(), TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.defaultValue() ) * eventTimeProgressPerEvent ) ); }
Example 3
Source File: LongParameter.java From flink with Apache License 2.0 | 6 votes |
@Override public void configure(ParameterTool parameterTool) { if (hasDefaultValue && !parameterTool.has(name)) { // skip checks for min and max when using default value value = defaultValue; } else { value = parameterTool.getLong(name); if (hasMinimumValue) { Util.checkParameter(value >= minimumValue, name + " must be greater than or equal to " + minimumValue); } if (hasMaximumValue) { Util.checkParameter(value <= maximumValue, name + " must be less than or equal to " + maximumValue); } } }
Example 4
Source File: LongParameter.java From flink with Apache License 2.0 | 6 votes |
@Override public void configure(ParameterTool parameterTool) { if (hasDefaultValue && !parameterTool.has(name)) { // skip checks for min and max when using default value value = defaultValue; } else { value = parameterTool.getLong(name); if (hasMinimumValue) { Util.checkParameter(value >= minimumValue, name + " must be greater than or equal to " + minimumValue); } if (hasMaximumValue) { Util.checkParameter(value <= maximumValue, name + " must be less than or equal to " + maximumValue); } } }
Example 5
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 6 votes |
static WindowedStream<Event, Integer, TimeWindow> applyTumblingWindows( KeyedStream<Event, Integer> keyedStream, ParameterTool pt) { long eventTimeProgressPerEvent = pt.getLong( SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.key(), SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.defaultValue()); return keyedStream.timeWindow( Time.milliseconds( pt.getLong( TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.key(), TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.defaultValue() ) * eventTimeProgressPerEvent ) ); }
Example 6
Source File: WindowJoin.java From flink-learning with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { final ParameterTool params = ParameterTool.fromArgs(args); final long windowSize = params.getLong("windowSize", 2000); final long rate = params.getLong("rate", 3L); System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); env.getConfig().setGlobalJobParameters(params); DataStream<Tuple2<String, Integer>> grades = WindowJoinSampleData.GradeSource.getSource(env, rate); DataStream<Tuple2<String, Integer>> salaries = WindowJoinSampleData.SalarySource.getSource(env, rate); // grades.print(); // salaries.print(); runWindowJoin(grades, salaries, windowSize).print().setParallelism(1); env.execute("Windowed Join Example"); }
Example 7
Source File: DataStreamAllroundTestJobFactory.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
static WindowedStream<Event, Integer, TimeWindow> applyTumblingWindows( KeyedStream<Event, Integer> keyedStream, ParameterTool pt) { long eventTimeProgressPerEvent = pt.getLong( SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.key(), SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.defaultValue()); return keyedStream.timeWindow( Time.milliseconds( pt.getLong( TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.key(), TUMBLING_WINDOW_OPERATOR_NUM_EVENTS.defaultValue() ) * eventTimeProgressPerEvent ) ); }
Example 8
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 6 votes |
static SourceFunction<Event> createEventSource(ParameterTool pt) { return new SequenceGeneratorSource( pt.getInt( SEQUENCE_GENERATOR_SRC_KEYSPACE.key(), SEQUENCE_GENERATOR_SRC_KEYSPACE.defaultValue()), pt.getInt( SEQUENCE_GENERATOR_SRC_PAYLOAD_SIZE.key(), SEQUENCE_GENERATOR_SRC_PAYLOAD_SIZE.defaultValue()), pt.getLong( SEQUENCE_GENERATOR_SRC_EVENT_TIME_MAX_OUT_OF_ORDERNESS.key(), SEQUENCE_GENERATOR_SRC_EVENT_TIME_MAX_OUT_OF_ORDERNESS.defaultValue()), pt.getLong( SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.key(), SEQUENCE_GENERATOR_SRC_EVENT_TIME_CLOCK_PROGRESS.defaultValue()), pt.getLong( SEQUENCE_GENERATOR_SRC_SLEEP_TIME.key(), SEQUENCE_GENERATOR_SRC_SLEEP_TIME.defaultValue()), pt.getLong( SEQUENCE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.key(), SEQUENCE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.defaultValue())); }
Example 9
Source File: TtlTestConfig.java From flink with Apache License 2.0 | 5 votes |
static TtlTestConfig fromArgs(ParameterTool pt) { int keySpace = pt.getInt(UPDATE_GENERATOR_SRC_KEYSPACE.key(), UPDATE_GENERATOR_SRC_KEYSPACE.defaultValue()); long sleepAfterElements = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.key(), UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.defaultValue()); long sleepTime = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_TIME.key(), UPDATE_GENERATOR_SRC_SLEEP_TIME.defaultValue()); Time ttl = Time.milliseconds(pt.getLong(STATE_TTL_VERIFIER_TTL_MILLI.key(), STATE_TTL_VERIFIER_TTL_MILLI.defaultValue())); long reportStatAfterUpdatesNum = pt.getLong(REPORT_STAT_AFTER_UPDATES_NUM.key(), REPORT_STAT_AFTER_UPDATES_NUM.defaultValue()); return new TtlTestConfig(keySpace, sleepAfterElements, sleepTime, ttl, reportStatAfterUpdatesNum); }
Example 10
Source File: TtlTestConfig.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
static TtlTestConfig fromArgs(ParameterTool pt) { int keySpace = pt.getInt(UPDATE_GENERATOR_SRC_KEYSPACE.key(), UPDATE_GENERATOR_SRC_KEYSPACE.defaultValue()); long sleepAfterElements = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.key(), UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.defaultValue()); long sleepTime = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_TIME.key(), UPDATE_GENERATOR_SRC_SLEEP_TIME.defaultValue()); Time ttl = Time.milliseconds(pt.getLong(STATE_TTL_VERIFIER_TTL_MILLI.key(), STATE_TTL_VERIFIER_TTL_MILLI.defaultValue())); long reportStatAfterUpdatesNum = pt.getLong(REPORT_STAT_AFTER_UPDATES_NUM.key(), REPORT_STAT_AFTER_UPDATES_NUM.defaultValue()); return new TtlTestConfig(keySpace, sleepAfterElements, sleepTime, ttl, reportStatAfterUpdatesNum); }
Example 11
Source File: WindowJoin.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { // parse the parameters final ParameterTool params = ParameterTool.fromArgs(args); final long windowSize = params.getLong("windowSize", 2000); final long rate = params.getLong("rate", 3L); System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate); System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]"); // obtain execution environment, run this example in "ingestion time" StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); // make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); // create the data sources for both grades and salaries DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate); DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate); // run the actual window join program // for testability, this functionality is in a separate method. DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize); // print the results with a single thread, rather than in parallel joinedStream.print().setParallelism(1); // execute program env.execute("Windowed Join Example"); }
Example 12
Source File: TtlTestConfig.java From flink with Apache License 2.0 | 5 votes |
static TtlTestConfig fromArgs(ParameterTool pt) { int keySpace = pt.getInt(UPDATE_GENERATOR_SRC_KEYSPACE.key(), UPDATE_GENERATOR_SRC_KEYSPACE.defaultValue()); long sleepAfterElements = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.key(), UPDATE_GENERATOR_SRC_SLEEP_AFTER_ELEMENTS.defaultValue()); long sleepTime = pt.getLong(UPDATE_GENERATOR_SRC_SLEEP_TIME.key(), UPDATE_GENERATOR_SRC_SLEEP_TIME.defaultValue()); Time ttl = Time.milliseconds(pt.getLong(STATE_TTL_VERIFIER_TTL_MILLI.key(), STATE_TTL_VERIFIER_TTL_MILLI.defaultValue())); long reportStatAfterUpdatesNum = pt.getLong(REPORT_STAT_AFTER_UPDATES_NUM.key(), REPORT_STAT_AFTER_UPDATES_NUM.defaultValue()); return new TtlTestConfig(keySpace, sleepAfterElements, sleepTime, ttl, reportStatAfterUpdatesNum); }
Example 13
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 5 votes |
private static void setupCheckpointing(final StreamExecutionEnvironment env, final ParameterTool pt) { String semantics = pt.get(TEST_SEMANTICS.key(), TEST_SEMANTICS.defaultValue()); long checkpointInterval = pt.getLong(ENVIRONMENT_CHECKPOINT_INTERVAL.key(), ENVIRONMENT_CHECKPOINT_INTERVAL.defaultValue()); CheckpointingMode checkpointingMode = semantics.equalsIgnoreCase("exactly-once") ? CheckpointingMode.EXACTLY_ONCE : CheckpointingMode.AT_LEAST_ONCE; env.enableCheckpointing(checkpointInterval, checkpointingMode); boolean enableExternalizedCheckpoints = pt.getBoolean( ENVIRONMENT_EXTERNALIZE_CHECKPOINT.key(), ENVIRONMENT_EXTERNALIZE_CHECKPOINT.defaultValue()); if (enableExternalizedCheckpoints) { String cleanupModeConfig = pt.get( ENVIRONMENT_EXTERNALIZE_CHECKPOINT_CLEANUP.key(), ENVIRONMENT_EXTERNALIZE_CHECKPOINT_CLEANUP.defaultValue()); CheckpointConfig.ExternalizedCheckpointCleanup cleanupMode; switch (cleanupModeConfig) { case "retain": cleanupMode = CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION; break; case "delete": cleanupMode = CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION; break; default: throw new IllegalArgumentException("Unknown clean up mode for externalized checkpoints: " + cleanupModeConfig); } env.getCheckpointConfig().enableExternalizedCheckpoints(cleanupMode); final int tolerableDeclinedCheckpointNumber = pt.getInt( ENVIRONMENT_TOLERABLE_DECLINED_CHECKPOINT_NUMBER.key(), ENVIRONMENT_TOLERABLE_DECLINED_CHECKPOINT_NUMBER.defaultValue()); env.getCheckpointConfig().setTolerableCheckpointFailureNumber(tolerableDeclinedCheckpointNumber); } }
Example 14
Source File: DataStreamAllroundTestJobFactory.java From flink with Apache License 2.0 | 5 votes |
static SlidingEventTimeWindows createSlidingWindow(ParameterTool pt) { long slideSize = pt.getLong( TEST_SLIDE_SIZE.key(), TEST_SLIDE_SIZE.defaultValue()); long slideFactor = pt.getInt( TEST_SLIDE_FACTOR.key(), TEST_SLIDE_FACTOR.defaultValue() ); return SlidingEventTimeWindows.of(Time.milliseconds(slideSize * slideFactor), Time.milliseconds(slideSize)); }
Example 15
Source File: WindowJoin.java From flink with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws Exception { // parse the parameters final ParameterTool params = ParameterTool.fromArgs(args); final long windowSize = params.getLong("windowSize", 2000); final long rate = params.getLong("rate", 3L); System.out.println("Using windowSize=" + windowSize + ", data rate=" + rate); System.out.println("To customize example, use: WindowJoin [--windowSize <window-size-in-millis>] [--rate <elements-per-second>]"); // obtain execution environment, run this example in "ingestion time" StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); // make parameters available in the web interface env.getConfig().setGlobalJobParameters(params); // create the data sources for both grades and salaries DataStream<Tuple2<String, Integer>> grades = GradeSource.getSource(env, rate); DataStream<Tuple2<String, Integer>> salaries = SalarySource.getSource(env, rate); // run the actual window join program // for testability, this functionality is in a separate method. DataStream<Tuple3<String, Integer, Integer>> joinedStream = runWindowJoin(grades, salaries, windowSize); // print the results with a single thread, rather than in parallel joinedStream.print().setParallelism(1); // execute program env.execute("Windowed Join Example"); }
Example 16
Source File: KafkaConfigUtil.java From flink-learning with Apache License 2.0 | 4 votes |
public static DataStreamSource<MetricEvent> buildSource(StreamExecutionEnvironment env) throws IllegalAccessException { ParameterTool parameter = (ParameterTool) env.getConfig().getGlobalJobParameters(); String topic = parameter.getRequired(PropertiesConstants.METRICS_TOPIC); Long time = parameter.getLong(PropertiesConstants.CONSUMER_FROM_TIME, 0L); return buildSource(env, topic, time); }
Example 17
Source File: StickyAllocationAndLocalRecoveryTestJob.java From flink with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws Exception { final ParameterTool pt = ParameterTool.fromArgs(args); final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(pt.getInt("parallelism", 1)); env.setMaxParallelism(pt.getInt("maxParallelism", pt.getInt("parallelism", 1))); env.enableCheckpointing(pt.getInt("checkpointInterval", 1000)); env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, pt.getInt("restartDelay", 0))); if (pt.getBoolean("externalizedCheckpoints", false)) { env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); } String stateBackend = pt.get("stateBackend", "file"); String checkpointDir = pt.getRequired("checkpointDir"); boolean killJvmOnFail = pt.getBoolean("killJvmOnFail", false); if ("file".equals(stateBackend)) { boolean asyncCheckpoints = pt.getBoolean("asyncCheckpoints", true); env.setStateBackend(new FsStateBackend(checkpointDir, asyncCheckpoints)); } else if ("rocks".equals(stateBackend)) { boolean incrementalCheckpoints = pt.getBoolean("incrementalCheckpoints", false); env.setStateBackend(new RocksDBStateBackend(checkpointDir, incrementalCheckpoints)); } else { throw new IllegalArgumentException("Unknown backend: " + stateBackend); } // make parameters available in the web interface env.getConfig().setGlobalJobParameters(pt); // delay to throttle down the production of the source long delay = pt.getLong("delay", 0L); // the maximum number of attempts, before the job finishes with success int maxAttempts = pt.getInt("maxAttempts", 3); // size of one artificial value int valueSize = pt.getInt("valueSize", 10); env.addSource(new RandomLongSource(maxAttempts, delay)) .keyBy((KeySelector<Long, Long>) aLong -> aLong) .flatMap(new StateCreatingFlatMap(valueSize, killJvmOnFail)) .addSink(new PrintSinkFunction<>()); env.execute("Sticky Allocation And Local Recovery Test"); }
Example 18
Source File: KafkaConfigUtil.java From flink-learning with Apache License 2.0 | 4 votes |
public static DataStreamSource<MetricEvent> buildSource(StreamExecutionEnvironment env) throws IllegalAccessException { ParameterTool parameter = (ParameterTool) env.getConfig().getGlobalJobParameters(); String topic = parameter.getRequired(PropertiesConstants.METRICS_TOPIC); Long time = parameter.getLong(PropertiesConstants.CONSUMER_FROM_TIME, 0L); return buildSource(env, topic, time); }
Example 19
Source File: DataStreamAllroundTestJobFactory.java From Flink-CEPplus with Apache License 2.0 | 4 votes |
public static void setupEnvironment(StreamExecutionEnvironment env, ParameterTool pt) throws Exception { // set checkpointing semantics String semantics = pt.get(TEST_SEMANTICS.key(), TEST_SEMANTICS.defaultValue()); long checkpointInterval = pt.getLong(ENVIRONMENT_CHECKPOINT_INTERVAL.key(), ENVIRONMENT_CHECKPOINT_INTERVAL.defaultValue()); CheckpointingMode checkpointingMode = semantics.equalsIgnoreCase("exactly-once") ? CheckpointingMode.EXACTLY_ONCE : CheckpointingMode.AT_LEAST_ONCE; env.enableCheckpointing(checkpointInterval, checkpointingMode); // use event time env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); // parallelism env.setParallelism(pt.getInt(ENVIRONMENT_PARALLELISM.key(), ENVIRONMENT_PARALLELISM.defaultValue())); env.setMaxParallelism(pt.getInt(ENVIRONMENT_MAX_PARALLELISM.key(), ENVIRONMENT_MAX_PARALLELISM.defaultValue())); // restart strategy String restartStrategyConfig = pt.get(ENVIRONMENT_RESTART_STRATEGY.key()); if (restartStrategyConfig != null) { RestartStrategies.RestartStrategyConfiguration restartStrategy; switch (restartStrategyConfig) { case "fixed_delay": restartStrategy = RestartStrategies.fixedDelayRestart( pt.getInt( ENVIRONMENT_RESTART_STRATEGY_FIXED_ATTEMPTS.key(), ENVIRONMENT_RESTART_STRATEGY_FIXED_ATTEMPTS.defaultValue()), pt.getLong( ENVIRONMENT_RESTART_STRATEGY_FIXED_DELAY.key(), ENVIRONMENT_RESTART_STRATEGY_FIXED_DELAY.defaultValue())); break; case "no_restart": restartStrategy = RestartStrategies.noRestart(); break; default: throw new IllegalArgumentException("Unkown restart strategy: " + restartStrategyConfig); } env.setRestartStrategy(restartStrategy); } // state backend final String stateBackend = pt.get( STATE_BACKEND.key(), STATE_BACKEND.defaultValue()); final String checkpointDir = pt.getRequired(STATE_BACKEND_CHECKPOINT_DIR.key()); if ("file".equalsIgnoreCase(stateBackend)) { boolean asyncCheckpoints = pt.getBoolean( STATE_BACKEND_FILE_ASYNC.key(), STATE_BACKEND_FILE_ASYNC.defaultValue()); env.setStateBackend((StateBackend) new FsStateBackend(checkpointDir, asyncCheckpoints)); } else if ("rocks".equalsIgnoreCase(stateBackend)) { boolean incrementalCheckpoints = pt.getBoolean( STATE_BACKEND_ROCKS_INCREMENTAL.key(), STATE_BACKEND_ROCKS_INCREMENTAL.defaultValue()); env.setStateBackend((StateBackend) new RocksDBStateBackend(checkpointDir, incrementalCheckpoints)); } else { throw new IllegalArgumentException("Unknown backend requested: " + stateBackend); } boolean enableExternalizedCheckpoints = pt.getBoolean( ENVIRONMENT_EXTERNALIZE_CHECKPOINT.key(), ENVIRONMENT_EXTERNALIZE_CHECKPOINT.defaultValue()); if (enableExternalizedCheckpoints) { String cleanupModeConfig = pt.get( ENVIRONMENT_EXTERNALIZE_CHECKPOINT_CLEANUP.key(), ENVIRONMENT_EXTERNALIZE_CHECKPOINT_CLEANUP.defaultValue()); CheckpointConfig.ExternalizedCheckpointCleanup cleanupMode; switch (cleanupModeConfig) { case "retain": cleanupMode = CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION; break; case "delete": cleanupMode = CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION; break; default: throw new IllegalArgumentException("Unknown clean up mode for externalized checkpoints: " + cleanupModeConfig); } env.getCheckpointConfig().enableExternalizedCheckpoints(cleanupMode); } // make parameters available in the web interface env.getConfig().setGlobalJobParameters(pt); }
Example 20
Source File: ItemTransactionGeneratorSource.java From flink-tutorials with Apache License 2.0 | 4 votes |
public ItemTransactionGeneratorSource(ParameterTool params) { this.numItems = params.getInt(NUM_ITEMS_KEY, DEFAULT_NUM_ITEMS); this.sleep = params.getLong(SLEEP_KEY, 0); this.shape = params.getInt(PARETO_SHAPE_KEY, DEFAULT_SHAPE); }