Java Code Examples for org.apache.beam.sdk.metrics.Metrics#counter()
The following examples show how to use
org.apache.beam.sdk.metrics.Metrics#counter() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StringToHopFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { inputCounter = Metrics.counter( Pipeline.METRIC_NAME_INPUT, transformName ); writtenCounter = Metrics.counter( Pipeline.METRIC_NAME_WRITTEN, transformName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, transformName ).inc(); } catch ( Exception e ) { Metrics.counter( Pipeline.METRIC_NAME_ERROR, transformName ).inc(); LOG.error( "Error in setup of converting input data into Hop rows : " + e.getMessage() ); throw new RuntimeException( "Error in setup of converting input data into Hop rows", e ); } }
Example 2
Source File: AssemblerFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { writtenCounter = Metrics.counter( Pipeline.METRIC_NAME_WRITTEN, counterName ); errorCounter = Metrics.counter( Pipeline.METRIC_NAME_ERROR, counterName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); outputRowMeta = JsonRowMeta.fromJson( outputRowMetaJson ); leftKRowMeta = JsonRowMeta.fromJson( leftKRowMetaJson ); leftVRowMeta = JsonRowMeta.fromJson( leftVRowMetaJson ); rightVRowMeta = JsonRowMeta.fromJson( rightVRowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, counterName ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error( "Error initializing assembling rows", e); throw new RuntimeException( "Error initializing assembling output KV<row, KV<row, row>>", e ); } }
Example 3
Source File: WindowInfoFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { readCounter = Metrics.counter( Pipeline.METRIC_NAME_READ, transformName ); writtenCounter = Metrics.counter( Pipeline.METRIC_NAME_WRITTEN, transformName ); errorCounter = Metrics.counter( Pipeline.METRIC_NAME_ERROR, transformName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); inputRowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, transformName ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error( "Error in setup of adding window information to rows : " + e.getMessage() ); throw new RuntimeException( "Error in setup of adding window information to rows", e ); } }
Example 4
Source File: PubsubMessageToKettleRowFn.java From kettle-beam with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { inputCounter = Metrics.counter( "input", stepname ); writtenCounter = Metrics.counter( "written", stepname ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( "init", stepname ).inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in setup of pub/sub publish messages function", e ); throw new RuntimeException( "Error in setup of pub/sub publish messages function", e ); } }
Example 5
Source File: LimitPayloadSize.java From gcp-ingestion with Mozilla Public License 2.0 | 6 votes |
/** Factory method to create mapper instance. */ public static MapWithFailures<PubsubMessage, PubsubMessage, PubsubMessage> of(int maxBytes) { final Counter countPayloadTooLarge = Metrics.counter(LimitPayloadSize.class, "payload_too_large"); return MapElements.into(TypeDescriptor.of(PubsubMessage.class)).via((PubsubMessage msg) -> { msg = PubsubConstraints.ensureNonNull(msg); int numBytes = msg.getPayload().length; if (numBytes > maxBytes) { countPayloadTooLarge.inc(); throw new PayloadTooLargeException("Message payload is " + numBytes + "bytes, larger than the" + " configured limit of " + maxBytes); } return msg; }).exceptionsInto(TypeDescriptor.of(PubsubMessage.class)) .exceptionsVia((ExceptionElement<PubsubMessage> ee) -> { try { throw ee.exception(); } catch (PayloadTooLargeException e) { return FailureMessage.of(LimitPayloadSize.class.getSimpleName(), ee.element(), ee.exception()); } }); }
Example 6
Source File: StringToKettleFn.java From kettle-beam with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { inputCounter = Metrics.counter( "input", stepname ); writtenCounter = Metrics.counter( "written", stepname ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( "init", stepname ).inc(); } catch ( Exception e ) { Metrics.counter( "error", stepname ).inc(); LOG.error( "Error in setup of converting input data into Kettle rows : " + e.getMessage() ); throw new RuntimeException( "Error in setup of converting input data into Kettle rows", e ); } }
Example 7
Source File: BeamJdbcAvroSchema.java From dbeam with Apache License 2.0 | 6 votes |
/** Generate Avro schema by reading one row. Expose Beam metrics via a Beam PTransform. */ public static Schema createSchema( final Pipeline pipeline, final JdbcExportArgs args, final Connection connection) throws Exception { final long startTime = System.nanoTime(); final Schema generatedSchema = generateAvroSchema(args, connection); final long elapsedTimeSchema = (System.nanoTime() - startTime) / 1000000; LOGGER.info("Elapsed time to schema {} seconds", elapsedTimeSchema / 1000.0); final Counter cnt = Metrics.counter(BeamJdbcAvroSchema.class.getCanonicalName(), "schemaElapsedTimeMs"); pipeline .apply( "ExposeSchemaCountersSeed", Create.of(Collections.singletonList(0)).withType(TypeDescriptors.integers())) .apply( "ExposeSchemaCounters", MapElements.into(TypeDescriptors.integers()) .via( v -> { cnt.inc(elapsedTimeSchema); return v; })); return generatedSchema; }
Example 8
Source File: HopRowToKVStringStringFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { inputCounter = Metrics.counter( Pipeline.METRIC_NAME_INPUT, transformName ); writtenCounter = Metrics.counter( Pipeline.METRIC_NAME_WRITTEN, transformName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, transformName ).inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in setup of HopRow to KV<String,String> function", e ); throw new RuntimeException( "Error in setup of HopRow to KV<String,String> function", e ); } }
Example 9
Source File: PublishStringsFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { readCounter = Metrics.counter( Pipeline.METRIC_NAME_READ, transformName ); outputCounter = Metrics.counter( Pipeline.METRIC_NAME_OUTPUT, transformName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, transformName ).inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in setup of pub/sub publish messages function", e ); throw new RuntimeException( "Error in setup of pub/sub publish messages function", e ); } }
Example 10
Source File: PublishMessagesFn.java From hop with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { readCounter = Metrics.counter( Pipeline.METRIC_NAME_READ, transformName ); outputCounter = Metrics.counter( Pipeline.METRIC_NAME_OUTPUT, transformName ); // Initialize Hop Beam // BeamHop.init( transformPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); Metrics.counter( Pipeline.METRIC_NAME_INIT, transformName ).inc(); } catch ( Exception e ) { numErrors.inc(); LOG.error( "Error in setup of pub/sub publish messages function", e ); throw new RuntimeException( "Error in setup of pub/sub publish messages function", e ); } }
Example 11
Source File: AssemblerFn.java From kettle-beam with Apache License 2.0 | 6 votes |
@Setup public void setUp() { try { writtenCounter = Metrics.counter( "written", counterName ); errorCounter = Metrics.counter( "error", counterName ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); outputRowMeta = JsonRowMeta.fromJson( outputRowMetaJson ); leftKRowMeta = JsonRowMeta.fromJson( leftKRowMetaJson ); leftVRowMeta = JsonRowMeta.fromJson( leftVRowMetaJson ); rightVRowMeta = JsonRowMeta.fromJson( rightVRowMetaJson ); Metrics.counter( "init", counterName ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error( "Error initializing assembling rows", e); throw new RuntimeException( "Error initializing assembling output KV<row, KV<row, row>>", e ); } }
Example 12
Source File: LateDataDroppingDoFnRunner.java From beam with Apache License 2.0 | 5 votes |
public LateDataFilter( WindowingStrategy<?, ?> windowingStrategy, TimerInternals timerInternals) { this.windowingStrategy = windowingStrategy; this.timerInternals = timerInternals; this.droppedDueToLateness = Metrics.counter(LateDataDroppingDoFnRunner.class, DROPPED_DUE_TO_LATENESS); }
Example 13
Source File: GcsFileSystem.java From beam with Apache License 2.0 | 5 votes |
GcsFileSystem(GcsOptions options) { this.options = checkNotNull(options, "options"); if (options.getGcsPerformanceMetrics()) { numCopies = Metrics.counter(GcsFileSystem.class, "num_copies"); copyTimeMsec = Metrics.counter(GcsFileSystem.class, "copy_time_msec"); } }
Example 14
Source File: PipelineRunnerTest.java From beam with Apache License 2.0 | 5 votes |
@Test @Category({NeedsRunner.class, UsesCommittedMetrics.class, UsesCounterMetrics.class}) public void testRunPTransform() { final String namespace = PipelineRunnerTest.class.getName(); final Counter counter = Metrics.counter(namespace, "count"); final PipelineResult result = PipelineRunner.fromOptions(p.getOptions()) .run( new PTransform<PBegin, POutput>() { @Override public POutput expand(PBegin input) { PCollection<Double> output = input .apply(Create.of(1, 2, 3, 4)) .apply("ScaleByTwo", MapElements.via(new ScaleFn<>(2.0, counter))); PAssert.that(output).containsInAnyOrder(2.0, 4.0, 6.0, 8.0); return output; } }); // Checking counters to verify the pipeline actually ran. assertThat( result .metrics() .queryMetrics( MetricsFilter.builder() .addNameFilter(MetricNameFilter.inNamespace(namespace)) .build()) .getCounters(), hasItem(metricsResult(namespace, "count", "ScaleByTwo", 4L, true))); }
Example 15
Source File: Query3.java From beam with Apache License 2.0 | 5 votes |
private JoinDoFn(String name, int maxAuctionsWaitingTime) { this.name = name; this.maxAuctionsWaitingTime = maxAuctionsWaitingTime; newAuctionCounter = Metrics.counter(name, "newAuction"); newPersonCounter = Metrics.counter(name, "newPerson"); newNewOutputCounter = Metrics.counter(name, "newNewOutput"); newOldOutputCounter = Metrics.counter(name, "newOldOutput"); oldNewOutputCounter = Metrics.counter(name, "oldNewOutput"); fatalCounter = Metrics.counter(name, "fatal"); }
Example 16
Source File: TimestampFn.java From kettle-beam with Apache License 2.0 | 5 votes |
@Setup public void setUp() { try { // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); inputRowMeta = JsonRowMeta.fromJson( rowMetaJson ); readCounter = Metrics.counter( "read", stepname ); writtenCounter = Metrics.counter( "written", stepname ); errorCounter = Metrics.counter( "error", stepname ); fieldIndex = -1; if ( !getTimestamp && StringUtils.isNotEmpty( fieldName ) ) { fieldIndex = inputRowMeta.indexOfValue( fieldName ); if ( fieldIndex < 0 ) { throw new RuntimeException( "Field '" + fieldName + "' couldn't be found in put : " + inputRowMeta.toString() ); } fieldValueMeta = inputRowMeta.getValueMeta( fieldIndex ); } Metrics.counter( "init", stepname ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error( "Error in setup of adding timestamp to rows : " + e.getMessage() ); throw new RuntimeException( "Error setup of adding timestamp to rows", e ); } }
Example 17
Source File: KettleToBQTableRowFn.java From kettle-beam with Apache License 2.0 | 4 votes |
@Override public TableRow apply( KettleRow inputRow ) { try { if ( rowMeta == null ) { readCounter = Metrics.counter( "read", counterName ); outputCounter = Metrics.counter( "output", counterName ); errorCounter = Metrics.counter( "error", counterName ); // Initialize Kettle Beam // BeamKettle.init( stepPluginClasses, xpPluginClasses ); rowMeta = JsonRowMeta.fromJson( rowMetaJson ); simpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS" ); Metrics.counter( "init", counterName ).inc(); } readCounter.inc(); TableRow tableRow = new TableRow(); for (int i=0;i<rowMeta.size();i++) { ValueMetaInterface valueMeta = rowMeta.getValueMeta( i ); Object valueData = inputRow.getRow()[i]; if (!valueMeta.isNull( valueData )) { switch ( valueMeta.getType() ) { case ValueMetaInterface.TYPE_STRING: tableRow.put( valueMeta.getName(), valueMeta.getString( valueData ) ); break; case ValueMetaInterface.TYPE_INTEGER: tableRow.put( valueMeta.getName(), valueMeta.getInteger( valueData ) ); break; case ValueMetaInterface.TYPE_DATE: Date date = valueMeta.getDate( valueData ); String formattedDate = simpleDateFormat.format( date ); tableRow.put( valueMeta.getName(), formattedDate); break; case ValueMetaInterface.TYPE_BOOLEAN: tableRow.put( valueMeta.getName(), valueMeta.getBoolean( valueData ) ); break; case ValueMetaInterface.TYPE_NUMBER: tableRow.put( valueMeta.getName(), valueMeta.getNumber( valueData ) ); break; default: throw new RuntimeException( "Data type conversion from Kettle to BigQuery TableRow not supported yet: " +valueMeta.toString()); } } } // Pass the row to the process context // outputCounter.inc(); return tableRow; } catch ( Exception e ) { errorCounter.inc(); LOG.info( "Conversion error KettleRow to BigQuery TableRow : " + e.getMessage() ); throw new RuntimeException( "Error converting KettleRow to BigQuery TableRow", e ); } }
Example 18
Source File: HopKeyValueFn.java From hop with Apache License 2.0 | 4 votes |
@Setup public void setUp() { try { readCounter = Metrics.counter( Pipeline.METRIC_NAME_READ, counterName ); errorCounter = Metrics.counter( Pipeline.METRIC_NAME_ERROR, counterName ); // Initialize Hop Beam // BeamHop.init(transformPluginClasses, xpPluginClasses); inputRowMeta = JsonRowMeta.fromJson( inputRowMetaJson ); // Calculate key indexes // if ( keyFields.length==0) { throw new HopException( "There are no group fields" ); } keyIndexes = new int[ keyFields.length]; for ( int i = 0; i< keyFields.length; i++) { keyIndexes[i]=inputRowMeta.indexOfValue( keyFields[i] ); if ( keyIndexes[i]<0) { throw new HopException( "Unable to find group by field '"+ keyFields[i]+"' in input "+inputRowMeta.toString() ); } } // Calculate the value indexes // valueIndexes =new int[ valueFields.length]; for ( int i = 0; i< valueFields.length; i++) { valueIndexes[i] = inputRowMeta.indexOfValue( valueFields[i] ); if ( valueIndexes[i]<0) { throw new HopException( "Unable to find subject by field '"+ valueFields[i]+"' in input "+inputRowMeta.toString() ); } } // Now that we know everything, we can split the row... // Metrics.counter( Pipeline.METRIC_NAME_INIT, counterName ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error("Error setup of splitting row into key and value", e); throw new RuntimeException( "Unable to setup of split row into key and value", e ); } }
Example 19
Source File: KettleKeyValueFn.java From kettle-beam with Apache License 2.0 | 4 votes |
@Setup public void setUp() { try { readCounter = Metrics.counter( "read", counterName ); errorCounter = Metrics.counter( "error", counterName ); // Initialize Kettle Beam // BeamKettle.init(stepPluginClasses, xpPluginClasses); inputRowMeta = JsonRowMeta.fromJson( inputRowMetaJson ); // Calculate key indexes // if ( keyFields.length==0) { throw new KettleException( "There are no group fields" ); } keyIndexes = new int[ keyFields.length]; for ( int i = 0; i< keyFields.length; i++) { keyIndexes[i]=inputRowMeta.indexOfValue( keyFields[i] ); if ( keyIndexes[i]<0) { throw new KettleException( "Unable to find group by field '"+ keyFields[i]+"' in input "+inputRowMeta.toString() ); } } // Calculate the value indexes // valueIndexes =new int[ valueFields.length]; for ( int i = 0; i< valueFields.length; i++) { valueIndexes[i] = inputRowMeta.indexOfValue( valueFields[i] ); if ( valueIndexes[i]<0) { throw new KettleException( "Unable to find subject by field '"+ valueFields[i]+"' in input "+inputRowMeta.toString() ); } } // Now that we know everything, we can split the row... // Metrics.counter( "init", counterName ).inc(); } catch(Exception e) { errorCounter.inc(); LOG.error("Error setup of splitting row into key and value", e); throw new RuntimeException( "Unable to setup of split row into key and value", e ); } }
Example 20
Source File: ReduceFnRunner.java From beam with Apache License 2.0 | 4 votes |
public ReduceFnRunner( K key, WindowingStrategy<?, W> windowingStrategy, ExecutableTriggerStateMachine triggerStateMachine, StateInternals stateInternals, TimerInternals timerInternals, OutputWindowedValue<KV<K, OutputT>> outputter, @Nullable SideInputReader sideInputReader, ReduceFn<K, InputT, OutputT, W> reduceFn, @Nullable PipelineOptions options) { this.key = key; this.timerInternals = timerInternals; this.paneInfoTracker = new PaneInfoTracker(timerInternals); this.stateInternals = stateInternals; this.outputter = outputter; this.reduceFn = reduceFn; this.droppedDueToClosedWindow = Metrics.counter(ReduceFnRunner.class, DROPPED_DUE_TO_CLOSED_WINDOW); @SuppressWarnings("unchecked") WindowingStrategy<Object, W> objectWindowingStrategy = (WindowingStrategy<Object, W>) windowingStrategy; this.windowingStrategy = objectWindowingStrategy; this.nonEmptyPanes = NonEmptyPanes.create(this.windowingStrategy, this.reduceFn); // Note this may incur I/O to load persisted window set data. this.activeWindows = createActiveWindowSet(); this.contextFactory = new ReduceFnContextFactory<>( key, reduceFn, this.windowingStrategy, stateInternals, this.activeWindows, timerInternals, sideInputReader, options); this.watermarkHold = new WatermarkHold<>(timerInternals, windowingStrategy); this.triggerRunner = new TriggerStateMachineRunner<>( triggerStateMachine, new TriggerStateMachineContextFactory<>( windowingStrategy.getWindowFn(), stateInternals, activeWindows)); }