Java Code Examples for com.streamsets.pipeline.api.Record#has()
The following examples show how to use
com.streamsets.pipeline.api.Record#has() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TensorFlowProcessor.java From datacollector with Apache License 2.0 | 6 votes |
private <T extends TensorDataTypeSupport> void writeRecord( Record r, List<String> fields, Buffer b, T dtSupport ) throws OnRecordErrorException { for (String fieldName : fields) { if (r.has(fieldName)) { dtSupport.writeField(b, r.get(fieldName)); } else { // the field does not exist. throw new OnRecordErrorException(r, Errors.TENSOR_FLOW_03, r.getHeader().getSourceId(), fieldName ); } } }
Example 2
Source File: JdbcMongoDBOplogRecordReader.java From datacollector with Apache License 2.0 | 6 votes |
/** * Records from MongoDB Oplog origin have a bit unique structure. * * Records for Insert have all data in the field "o", which is a map and contains all data for columns * Records for Delete have objectId in the field "o", which is a map and contains only objectId. * Records for Update have a field "o2", which is a map and contains only objectId, * and a field "o" where it stores data for updating columns and values. * * @param record * @param op * @param parameters * @param columnsToFields * @return */ @Override public SortedMap<String, String> getColumnsToParameters( final Record record, int op, Map<String, String> parameters, Map<String, String> columnsToFields) { SortedMap<String, String> columnsToParameters = new TreeMap<>(); for (Map.Entry<String, String> entry : columnsToFields.entrySet()) { String columnName = entry.getKey(); String fieldPath = getFieldPath(columnName, columnsToFields, op); if (record.has(fieldPath)) { columnsToParameters.put(columnName, parameters.get(columnName)); } else { LOG.trace("Record is missing a field for column {} for the operation code {}", columnName, op); } } return columnsToParameters; }
Example 3
Source File: HiveTarget.java From datacollector with Apache License 2.0 | 5 votes |
private List<String> getMissingRequiredFields(Record record, Map<String, String> mappings) { List<String> missingFields = new ArrayList<>(mappings.size()); for (Map.Entry<String, String> mapping : mappings.entrySet()) { if (!record.has(mapping.getValue())) { missingFields.add(mapping.getValue()); } } return missingFields; }
Example 4
Source File: RecordConverterUtil.java From datacollector with Apache License 2.0 | 5 votes |
public static Map<String, String> getTags(List<String> tagFields, Record record) throws OnRecordErrorException { Map<String, String> tags = new HashMap<>(); for (String fieldPath : tagFields) { if (!record.has(fieldPath)) { continue; } Field tagField = record.get(fieldPath); switch (tagField.getType()) { case MAP: // fall through case LIST_MAP: for (Map.Entry<String, Field> entry : tagField.getValueAsMap().entrySet()) { tags.put(entry.getKey(), entry.getValue().getValueAsString()); } break; case LIST: throw new OnRecordErrorException(Errors.INFLUX_08, fieldPath); default: tags.put(CollectdRecordConverter.stripPathPrefix(fieldPath), tagField.getValueAsString()); break; } } return tags; }
Example 5
Source File: CollectdRecordConverter.java From datacollector with Apache License 2.0 | 5 votes |
@VisibleForTesting static long getTime(Record record) throws OnRecordErrorException { if (record.has(FIELD_PATH_PREFIX + TIME_HIRES)) { return fromCollectdHighResTime(record.get(FIELD_PATH_PREFIX + TIME_HIRES).getValueAsLong()); } else if (record.has(FIELD_PATH_PREFIX + TIME)) { return record.get(FIELD_PATH_PREFIX + TIME).getValueAsLong(); } throw new OnRecordErrorException(Errors.INFLUX_03); }
Example 6
Source File: CollectdRecordConverter.java From datacollector with Apache License 2.0 | 5 votes |
@VisibleForTesting static String getMeasurementBaseName(Record record) throws OnRecordErrorException { if (!record.has(FIELD_PATH_PREFIX + PLUGIN)) { throw new OnRecordErrorException(Errors.INFLUX_03); } return record.get(FIELD_PATH_PREFIX + PLUGIN).getValueAsString(); }
Example 7
Source File: RecordEL.java From datacollector with Apache License 2.0 | 5 votes |
@ElFunction( prefix = RECORD_EL_PREFIX, name = "exists", description = "Checks if the field represented by path 'fieldPath' exists in the record") public static boolean exists( @ElParam("fieldPath") String fieldPath) { Record record = getRecordInContext(); if (record != null) { return record.has(fieldPath); } return false; }
Example 8
Source File: GenericRecordConverter.java From datacollector with Apache License 2.0 | 5 votes |
private void verifyRequireFieldsPresent(Record record) throws OnRecordErrorException { if (!record.has(conf.measurementField)) { throw new OnRecordErrorException(Errors.INFLUX_07, conf.measurementField); } if (!conf.timeField.isEmpty() && !record.has(conf.timeField)) { throw new OnRecordErrorException(Errors.INFLUX_07, conf.timeField); } }
Example 9
Source File: RedisTarget.java From datacollector with Apache License 2.0 | 5 votes |
private String getDeleteKey(Record record, RedisFieldMappingConfig parameters) { if (record.has(parameters.keyExpr)) { return record.get(parameters.keyExpr).getValueAsString(); } else { return null; } }
Example 10
Source File: HiveMetastoreUtil.java From datacollector with Apache License 2.0 | 5 votes |
/** * Get DataFormat from Metadata Record. * @param metadataRecord the metadata record * @return the label of dataFormat */ public static String getDataFormat(Record metadataRecord) throws HiveStageCheckedException { if (metadataRecord.get(SEP + VERSION).getValueAsInteger() == 1) { return DEFAULT_DATA_FORMAT; } if (metadataRecord.has(SEP + DATA_FORMAT)) { return metadataRecord.get(SEP + DATA_FORMAT).getValueAsString(); } throw new HiveStageCheckedException(Errors.HIVE_17, DATA_FORMAT, metadataRecord); }
Example 11
Source File: FieldZipProcessor.java From datacollector with Apache License 2.0 | 5 votes |
private boolean checkConfigs(Record record) throws OnRecordErrorException { for (FieldZipConfig zipConfig : fieldZipConfigs) { List<String> missingFields = Lists.newArrayList(); List<String> nonListFields = Lists.newArrayList(); if (!record.has(zipConfig.firstField)) { missingFields.add(zipConfig.firstField); } else if (!record.get(zipConfig.firstField).getType().isOneOf(Field.Type.LIST, Field.Type.LIST_MAP)) { nonListFields.add(zipConfig.firstField); } if (!record.has(zipConfig.secondField)) { missingFields.add(zipConfig.secondField); } else if (!record.get(zipConfig.secondField).getType().isOneOf(Field.Type.LIST, Field.Type.LIST_MAP)) { nonListFields.add(zipConfig.secondField); } switch (onStagePreConditionFailure) { case TO_ERROR: if (!missingFields.isEmpty()) { throw new OnRecordErrorException(Errors.ZIP_01, missingFields); } else if (!nonListFields.isEmpty()) { throw new OnRecordErrorException(Errors.ZIP_00, nonListFields); } break; case CONTINUE: if(!missingFields.isEmpty() || !nonListFields.isEmpty()) { return false; } break; default: throw new IllegalStateException("Invalid value for on stage pre-condition failure"); } } return true; }
Example 12
Source File: RequiredFieldsPredicate.java From datacollector with Apache License 2.0 | 5 votes |
@Override public boolean evaluate(Record record) { boolean eval = true; if (requiredFields != null && !requiredFields.isEmpty()) { missingFields.clear(); for (String field : requiredFields) { if (!record.has(field)) { missingFields.add(field); } } eval = missingFields.isEmpty(); } return eval; }
Example 13
Source File: HiveMetastoreUtil.java From datacollector with Apache License 2.0 | 5 votes |
/** * Get Location from the metadata record. * @param metadataRecord the metadata record * @return location * @throws HiveStageCheckedException if the location field does not exist in the metadata record. */ public static String getLocation(Record metadataRecord) throws HiveStageCheckedException{ if (metadataRecord.has(SEP + LOCATION_FIELD)) { return metadataRecord.get(SEP + LOCATION_FIELD).getValueAsString(); } throw new HiveStageCheckedException(Errors.HIVE_17, LOCATION_FIELD, metadataRecord); }
Example 14
Source File: FieldFlattenerProcessor.java From datacollector with Apache License 2.0 | 5 votes |
private void flattenSpecificFields(Record record) throws OnRecordErrorException { Field flattenTarget = null; if(!config.flattenInPlace) { if(!record.has(config.flattenTargetField)) { throw new OnRecordErrorException(record, Errors.FIELD_FLATTENER_02, config.flattenTargetField); } flattenTarget = record.get(config.flattenTargetField); if(!flattenTarget.getType().isOneOf(Field.Type.MAP, Field.Type.LIST_MAP)) { throw new OnRecordErrorException(record, Errors.FIELD_FLATTENER_03, config.flattenTargetField, flattenTarget.getType().name()); } } for (String flattenField : config.fields) { if (record.has(flattenField)) { final Map<String, Field> flattened = flattenEntireRecord(record.get(flattenField)); if(config.flattenInPlace) { record.set(flattenField, Field.create(Field.Type.MAP, flattened)); } else { appendFieldsToRecord(flattened, record, flattenTarget); if(config.removeFlattenedField) { record.delete(flattenField); } } } else { throw new OnRecordErrorException(record, Errors.FIELD_FLATTENER_01, flattenField); } } }
Example 15
Source File: ElasticsearchSource.java From datacollector with Apache License 2.0 | 5 votes |
private ElasticsearchSourceOffset updateTimeOffset(ElasticsearchSourceOffset offset, Record record) { if (!conf.isIncrementalMode) { return offset; } final String offsetFieldPath = SOURCE_FIELD + conf.offsetField.replaceAll("\\.", "/"); if (!record.has(offsetFieldPath)) { getContext().reportError(Errors.ELASTICSEARCH_24, offsetFieldPath); } offset.setTimeOffset(record.get(offsetFieldPath).getValueAsString()); return offset; }
Example 16
Source File: SampleExecutor.java From datacollector with Apache License 2.0 | 5 votes |
/** * Executes an action for given record. * * @param record the record that will parametrize the action * @throws OnRecordErrorException when action can't be executed */ private void execute(Record record) throws OnRecordErrorException { // This is a contrived example, normally you may be performing an operation that could throw // an exception or produce an error condition. In that case you can throw an OnRecordErrorException // to send this record to the error pipeline with some details. if (!record.has("/someField")) { throw new OnRecordErrorException(Errors.SAMPLE_01, record, "exception detail message."); } // TODO: execute action }
Example 17
Source File: HiveMetastoreUtil.java From datacollector with Apache License 2.0 | 5 votes |
public static void validateMetadataRecordForRecordTypeAndVersion(Record metadataRecord) throws HiveStageCheckedException { if (!metadataRecord.has(SEP + METADATA_RECORD_TYPE)) { throw new HiveStageCheckedException(Errors.HIVE_17, METADATA_RECORD_TYPE, metadataRecord); } if(!metadataRecord.has(SEP + VERSION)) { throw new HiveStageCheckedException(Errors.HIVE_17, VERSION, metadataRecord); } }
Example 18
Source File: MongoDBProcessor.java From datacollector with Apache License 2.0 | 4 votes |
@Override protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException { // Construct a document for lookup filter Document query = new Document(); for (MongoDBFieldColumnMapping mapping: configBean.fieldMapping) { // if record doesn't have a field specified in the mapping, or value is null, // exclude the field from filter, instead of sending to error. if (record.has(mapping.sdcField) && record.get(mapping.sdcField) != null) { query.append(mapping.keyName, record.get(mapping.sdcField).getValue()); } } // If all of the filters are missing in record, we cannot perform lookup. if (query.isEmpty()) { throw new OnRecordErrorException(Errors.MONGODB_42, record); } Optional<List<Map<String, Field>>> entry; try { entry = cache.get(query); } catch (ExecutionException e) { Throwables.propagateIfPossible(e.getCause(), StageException.class); throw new IllegalStateException(e); // The cache loader shouldn't throw anything that isn't a StageException. } if (entry.isPresent()) { List<Map<String, Field>> values = entry.get(); switch (configBean.multipleValuesBehavior) { case FIRST_ONLY: setFieldsInRecord(record, values.get(0)); batchMaker.addRecord(record); break; case SPLIT_INTO_MULTIPLE_RECORDS: for (Map<String, Field> lookupItem : values) { Record newRecord = getContext().cloneRecord(record); setFieldsInRecord(newRecord, lookupItem); batchMaker.addRecord(newRecord); } break; default: throw new IllegalStateException("Unknown multiple value behavior: " + configBean.multipleValuesBehavior); } } else { // No results switch (configBean.missingValuesBehavior) { case SEND_TO_ERROR: LOG.error(Errors.MONGODB_40.getMessage(), query.toJson()); errorRecordHandler.onError(new OnRecordErrorException(record, Errors.MONGODB_40, query.toJson())); break; case PASS_RECORD_ON: batchMaker.addRecord(record); break; default: throw new IllegalStateException("Unknown missing value behavior: " + configBean.missingValuesBehavior); } } }
Example 19
Source File: RedisTarget.java From datacollector with Apache License 2.0 | 4 votes |
private void doBatch(Batch batch) throws StageException { Iterator<Record> records = batch.getRecords(); List<ErrorRecord> tempRecord = new ArrayList<>(); Pipeline p; try { p = jedis.pipelined(); while (records.hasNext()) { Record record = records.next(); for (RedisFieldMappingConfig parameters : conf.redisFieldMapping) { String key = null; // Special treatment is only given to deletes - // all other records will be handled as an upsert. if (OperationType.DELETE_CODE == getOperationFromHeader(record)) { key = getDeleteKey(record, parameters); doDeleteRecord(record, tempRecord, p, key); continue; } if (record.has(parameters.keyExpr)) { key = record.get(parameters.keyExpr).getValueAsString(); } Field value = record.get(parameters.valExpr); if (key != null && value != null) { switch (parameters.dataType) { case STRING: doUpsertString(record, tempRecord, p, key, value); break; case LIST: doUpsertList(record, tempRecord, p, key, value); break; case SET: doUpsertSet(record, tempRecord, p, key, value); break; case HASH: doUpsertHash(record, tempRecord, p, key, value); break; default: LOG.error(Errors.REDIS_05.getMessage(), parameters.dataType); errorRecordHandler.onError(new OnRecordErrorException(record, Errors.REDIS_05, parameters.dataType)); break; } } else { LOG.warn(Errors.REDIS_07.getMessage(), parameters.keyExpr, parameters.valExpr, record); } // set the expire time if (parameters.ttl > 0) { p.expire(key, parameters.ttl); } } } List<Object> results = p.syncAndReturnAll(); int index = 0; for (Object result : results) { if (!("OK".equals(result) || Long.class.equals(result == null ? null : result.getClass()))) { LOG.error( Errors.REDIS_03.getMessage(), tempRecord.get(index).operation, tempRecord.get(index).key, tempRecord.get(index).value ); errorRecordHandler.onError(new OnRecordErrorException( tempRecord.get(index).record, Errors.REDIS_03, tempRecord.get(index).operation, tempRecord.get(index).key, tempRecord.get(index).value, result.toString() )); } index++; } retries = 0; } catch (JedisException ex) { handleException(ex, batch, tempRecord); } }
Example 20
Source File: ForceBulkWriter.java From datacollector with Apache License 2.0 | 4 votes |
private void writeAndFlushRecord(DataGenerator gen, Record record, OperationEnum op) throws IOException, DataGeneratorException { // Make a record with just the fields we need Record outRecord = context.createRecord(record.getHeader().getSourceId()); LinkedHashMap<String, Field> map = new LinkedHashMap<>(); for (Map.Entry<String, String> mapping : fieldMappings.entrySet()) { String sFieldName = mapping.getKey(); String fieldPath = mapping.getValue(); // If we're missing fields, skip them. if (!record.has(fieldPath)) { continue; } // We only need Id for deletes if (op == OperationEnum.delete && !("Id".equalsIgnoreCase(sFieldName))) { continue; } Field field = record.get(fieldPath); if (field.getValue() == null && (op == OperationEnum.update || op == OperationEnum.upsert)) { field = Field.create(NA); } switch (field.getType()) { case DATE: map.put(sFieldName, Field.create(dateFormat.format(field.getValue()))); break; case TIME: map.put(sFieldName, Field.create(timeFormat.format(field.getValue()))); break; case DATETIME: map.put(sFieldName, Field.create(datetimeFormat.format(field.getValue()))); break; default: map.put(sFieldName, field); break; } } outRecord.set(Field.createListMap(map)); gen.write(outRecord); gen.flush(); }