org.apache.avro.generic.GenericContainer Java Examples
The following examples show how to use
org.apache.avro.generic.GenericContainer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AvroSchemaUtils.java From apicurio-registry with Apache License 2.0 | 6 votes |
static Schema getSchema(Object object) { if (object == null) { return primitiveSchemas.get("Null"); } else if (object instanceof Boolean) { return primitiveSchemas.get("Boolean"); } else if (object instanceof Integer) { return primitiveSchemas.get("Integer"); } else if (object instanceof Long) { return primitiveSchemas.get("Long"); } else if (object instanceof Float) { return primitiveSchemas.get("Float"); } else if (object instanceof Double) { return primitiveSchemas.get("Double"); } else if (object instanceof CharSequence) { return primitiveSchemas.get("String"); } else if (object instanceof byte[]) { return primitiveSchemas.get("Bytes"); } else if (object instanceof GenericContainer) { return ((GenericContainer) object).getSchema(); } else { throw new IllegalArgumentException( "Unsupported Avro type. Supported types are null, Boolean, Integer, Long, " + "Float, Double, String, byte[], ReflectData and GenericContainer"); } }
Example #2
Source File: ExtractAvroPathsBuilder.java From kite with Apache License 2.0 | 6 votes |
@Override protected boolean doProcess(Record inputRecord) { // Preconditions.checkState(ReadAvroBuilder.AVRO_MEMORY_MIME_TYPE.equals(inputRecord.getFirstValue(Fields.ATTACHMENT_MIME_TYPE))); GenericContainer datum = (GenericContainer) inputRecord.getFirstValue(Fields.ATTACHMENT_BODY); Preconditions.checkNotNull(datum); Preconditions.checkNotNull(datum.getSchema()); Record outputRecord = inputRecord.copy(); for (Map.Entry<String, Collection<String>> entry : stepMap.entrySet()) { String fieldName = entry.getKey(); List<String> steps = (List<String>) entry.getValue(); extractPath(datum, datum.getSchema(), fieldName, steps, outputRecord, 0); } // pass record to next command in chain: return getChild().process(outputRecord); }
Example #3
Source File: ReadAvroBuilder.java From kite with Apache License 2.0 | 6 votes |
private Decoder prepare(InputStream in) throws IOException { Decoder decoder; if (isJson) { if (jsonDecoder == null) { jsonDecoder = DecoderFactory.get().jsonDecoder(writerSchema, in); } else { jsonDecoder.configure(in); // reuse for performance } decoder = jsonDecoder; } else { binaryDecoder = DecoderFactory.get().binaryDecoder(in, binaryDecoder); // reuse for performance decoder = binaryDecoder; } if (datumReader == null) { // reuse for performance Schema readSchema = readerSchema != null ? readerSchema : writerSchema; datumReader = new FastGenericDatumReader<GenericContainer>(writerSchema, readSchema); datumReader.setResolver(createResolver(writerSchema, readSchema)); } return decoder; }
Example #4
Source File: ReadAvroBuilder.java From kite with Apache License 2.0 | 6 votes |
@Override protected boolean doProcess(Record inputRecord, InputStream in) throws IOException { Record template = inputRecord.copy(); removeAttachments(template); template.put(Fields.ATTACHMENT_MIME_TYPE, ReadAvroBuilder.AVRO_MEMORY_MIME_TYPE); Decoder decoder = prepare(in); try { while (true) { GenericContainer datum = datumReader.read(null, decoder); if (!extract(datum, template)) { return false; } } } catch (EOFException e) { ; // ignore } finally { in.close(); } return true; }
Example #5
Source File: ServerWriter.java From SPADE with GNU General Public License v3.0 | 6 votes |
public void writeRecord(GenericContainer genericContainer) throws Exception{ /** * Publish the records in Kafka. Note how the serialization framework doesn't care about * the record type (any type from the union schema may be sent) */ ProducerRecord<String, GenericContainer> record = new ProducerRecord<>(kafkaTopic, genericContainer); serverWriter.send(record, sendCallback); //asynchronous send if(reportingEnabled){ recordCount++; long currentTime = System.currentTimeMillis(); if((currentTime - lastReportedTime) >= reportEveryMs){ printStats(); lastReportedTime = currentTime; lastReportedRecordCount = recordCount; } } }
Example #6
Source File: Kafka.java From SPADE with GNU General Public License v3.0 | 6 votes |
protected int publishRecords(List<GenericContainer> genericContainers) { int recordCount = 0; if(genericContainers != null){ for (GenericContainer genericContainer : genericContainers) { // logger.log(Level.INFO, "Attempting to publish record {0}", genericContainer.toString()); for(DataWriter dataWriter : dataWriters){ try { dataWriter.writeRecord(genericContainer); recordCount += 1; // logger.log(Level.INFO, "Sent record: ({0})", recordCount); } catch (Exception exception) { logger.log(Level.INFO, "Failed to publish record {0}", genericContainer.toString()); logger.log(Level.WARNING, "{0}", exception); } } } } return (recordCount / dataWriters.size()); }
Example #7
Source File: Kafka.java From SPADE with GNU General Public License v3.0 | 6 votes |
@Override public boolean putEdge(AbstractEdge edge){ try{ List<GenericContainer> recordsToPublish = new ArrayList<GenericContainer>(); Edge.Builder edgeBuilder = Edge.newBuilder(); edgeBuilder.setAnnotations(edge.getCopyOfAnnotations()); edgeBuilder.setChildVertexHash(String.valueOf(edge.getChildVertex().bigHashCode())); edgeBuilder.setParentVertexHash(String.valueOf(edge.getParentVertex().bigHashCode())); edgeBuilder.setHash(String.valueOf(edge.bigHashCode())); Edge kafkaEdge = edgeBuilder.build(); recordsToPublish.add(GraphElement.newBuilder().setElement(kafkaEdge).build()); return publishRecords(recordsToPublish) > 0; }catch(Exception e){ logger.log(Level.SEVERE, "Failed to publish edge : " + edge); return false; } }
Example #8
Source File: AvroCodec.java From schema-evolution-samples with Apache License 2.0 | 6 votes |
private Schema getSchema(Object payload){ Schema schema = null; logger.debug("Obtaining schema for class {}", payload.getClass()); if(GenericContainer.class.isAssignableFrom(payload.getClass())) { schema = ((GenericContainer) payload).getSchema(); logger.debug("Avro type detected, using schema from object"); }else{ Integer id = localSchemaMap.get(payload.getClass().getName()); if(id == null){ if(!properties.isDynamicSchemaGenerationEnabled()) { throw new SchemaNotFoundException(String.format("No schema found on local cache for %s", payload.getClass())); } else{ Schema localSchema = ReflectData.get().getSchema(payload.getClass()); id = schemaRegistryClient.register(localSchema); } } schema = schemaRegistryClient.fetch(id); } return schema; }
Example #9
Source File: AvroSchemaUtils.java From apicurio-registry with Apache License 2.0 | 6 votes |
static Schema getSchema(Object object) { if (object == null) { return primitiveSchemas.get("Null"); } else if (object instanceof Boolean) { return primitiveSchemas.get("Boolean"); } else if (object instanceof Integer) { return primitiveSchemas.get("Integer"); } else if (object instanceof Long) { return primitiveSchemas.get("Long"); } else if (object instanceof Float) { return primitiveSchemas.get("Float"); } else if (object instanceof Double) { return primitiveSchemas.get("Double"); } else if (object instanceof CharSequence) { return primitiveSchemas.get("String"); } else if (object instanceof byte[]) { return primitiveSchemas.get("Bytes"); } else if (object instanceof GenericContainer) { return ((GenericContainer) object).getSchema(); } else { throw new IllegalArgumentException( "Unsupported Avro type. Supported types are null, Boolean, Integer, Long, " + "Float, Double, String, byte[] and GenericContainer"); } }
Example #10
Source File: AvroDrillTable.java From Bats with Apache License 2.0 | 5 votes |
public AvroDrillTable(String storageEngineName, FileSystemPlugin plugin, SchemaConfig schemaConfig, FormatSelection selection) { super(storageEngineName, plugin, schemaConfig.getUserName(), selection); List<Path> asFiles = selection.getAsFiles(); Path path = asFiles.get(0); this.schemaConfig = schemaConfig; try { reader = new DataFileReader<>(new FsInput(path, plugin.getFsConf()), new GenericDatumReader<GenericContainer>()); } catch (IOException e) { throw UserException.dataReadError(e).build(logger); } }
Example #11
Source File: AvroRecordReader.java From Bats with Apache License 2.0 | 5 votes |
private DataFileReader<GenericContainer> getReader(final Path hadoop, final FileSystem fs) throws ExecutionSetupException { try { final UserGroupInformation ugi = ImpersonationUtil.createProxyUgi(this.opUserName, this.queryUserName); return ugi.doAs((PrivilegedExceptionAction<DataFileReader<GenericContainer>>) () -> new DataFileReader<>(new FsInput(hadoop, fs.getConf()), new GenericDatumReader<GenericContainer>())); } catch (IOException | InterruptedException e) { throw new ExecutionSetupException( String.format("Error in creating avro reader for file: %s", hadoop), e); } }
Example #12
Source File: CDM.java From SPADE with GNU General Public License v3.0 | 5 votes |
/** * Creates a TCCDMDatum object with the given source and the value * @param value the CDM object instance * @param source the source value for that value * @return GenericContainer instance */ private GenericContainer buildTcCDMDatum(Object value, InstrumentationSource source){ TCCDMDatum.Builder tccdmDatumBuilder = TCCDMDatum.newBuilder(); tccdmDatumBuilder.setDatum(value); tccdmDatumBuilder.setSource(source); tccdmDatumBuilder.setHostId(hostUUID); tccdmDatumBuilder.setSessionNumber(sessionNumber); tccdmDatumBuilder.setType(getRecordType(value)); return tccdmDatumBuilder.build(); }
Example #13
Source File: AbstractAvroSerializer.java From jstorm with Apache License 2.0 | 5 votes |
public void write(Kryo kryo, Output output, GenericContainer record) { String fingerPrint = this.getFingerprint(record.getSchema()); output.writeString(fingerPrint); GenericDatumWriter<GenericContainer> writer = new GenericDatumWriter<>(record.getSchema()); BinaryEncoder encoder = EncoderFactory .get() .directBinaryEncoder(output, null); try { writer.write(record, encoder); } catch (IOException e) { throw new RuntimeException(e); } }
Example #14
Source File: AbstractAvroSerializer.java From jstorm with Apache License 2.0 | 5 votes |
public GenericContainer read(Kryo kryo, Input input, Class<GenericContainer> aClass) { Schema theSchema = this.getSchema(input.readString()); GenericDatumReader<GenericContainer> reader = new GenericDatumReader<>(theSchema); Decoder decoder = DecoderFactory .get() .directBinaryDecoder(input, null); GenericContainer foo; try { foo = reader.read(null, decoder); } catch (IOException e) { throw new RuntimeException(e); } return foo; }
Example #15
Source File: AvroRecordReader.java From Bats with Apache License 2.0 | 5 votes |
@Override public int next() { final Stopwatch watch = Stopwatch.createStarted(); if (reader == null) { throw new IllegalStateException("Avro reader is not open."); } if (!reader.hasNext()) { return 0; } int recordCount = 0; writer.allocate(); writer.reset(); try { for (GenericContainer container = null; recordCount < DEFAULT_BATCH_SIZE && reader.hasNext() && !reader.pastSync(end); recordCount++) { writer.setPosition(recordCount); container = reader.next(container); processRecord(container, container.getSchema()); } writer.setValueCount(recordCount); } catch (IOException e) { throw new DrillRuntimeException(e); } logger.debug("Read {} records in {} ms", recordCount, watch.elapsed(TimeUnit.MILLISECONDS)); return recordCount; }
Example #16
Source File: AvroRecordReader.java From Bats with Apache License 2.0 | 5 votes |
private void processRecord(final GenericContainer container, final Schema schema) { final Schema.Type type = schema.getType(); switch (type) { case RECORD: process(container, schema, null, new MapOrListWriterImpl(writer.rootAsMap()), fieldSelection); break; default: throw new DrillRuntimeException("Root object must be record type. Found: " + type); } }
Example #17
Source File: Kafka.java From SPADE with GNU General Public License v3.0 | 5 votes |
@Override public boolean putVertex(AbstractVertex vertex){ try{ List<GenericContainer> recordsToPublish = new ArrayList<GenericContainer>(); Vertex.Builder vertexBuilder = Vertex.newBuilder(); vertexBuilder.setAnnotations(vertex.getCopyOfAnnotations()); vertexBuilder.setHash(String.valueOf(vertex.bigHashCode())); Vertex kafkaVertex = vertexBuilder.build(); recordsToPublish.add(GraphElement.newBuilder().setElement(kafkaVertex).build()); return publishRecords(recordsToPublish) > 0; }catch(Exception e){ logger.log(Level.SEVERE, "Failed to publish vertex : " + vertex); return false; } }
Example #18
Source File: AvroUtils.java From registry with Apache License 2.0 | 5 votes |
public static Schema computeSchema(Object input) { Schema schema = null; if (input instanceof GenericContainer) { schema = ((GenericContainer) input).getSchema(); } else { schema = AvroUtils.getSchemaForPrimitives(input); } return schema; }
Example #19
Source File: AvroConverter.java From apicurio-registry with Apache License 2.0 | 5 votes |
@Override protected SchemaAndValue toSchemaAndValue(T result) { if (result instanceof GenericContainer) { GenericContainer container = (GenericContainer) result; Object value = container; Integer version = null; // TODO if (result instanceof NonRecordContainer) { @SuppressWarnings("rawtypes") NonRecordContainer nrc = (NonRecordContainer) result; value = nrc.getValue(); } return avroData.toConnectData(container.getSchema(), value, version); } return new SchemaAndValue(null, result); }
Example #20
Source File: ReadAvroContainerBuilder.java From kite with Apache License 2.0 | 5 votes |
protected boolean extract(GenericContainer datum, Record inputRecord) { incrementNumRecords(); Record outputRecord = inputRecord.copy(); outputRecord.put(Fields.ATTACHMENT_BODY, datum); // pass record to next command in chain: return getChild().process(outputRecord); }
Example #21
Source File: WriteAvroToByteArrayBuilder.java From kite with Apache License 2.0 | 5 votes |
private Schema getSchema(GenericContainer datum, Schema lastSchema) { Schema schema = datum.getSchema(); if (lastSchema != null && lastSchema != schema) { throw new MorphlineRuntimeException("Schemas must be identical: " + schema + ", lastSchema: " + lastSchema); } return schema; }
Example #22
Source File: ExtractAvroTreeBuilder.java From kite with Apache License 2.0 | 5 votes |
@Override protected boolean doProcess(Record inputRecord) { // Preconditions.checkState(ReadAvroBuilder.AVRO_MEMORY_MIME_TYPE.equals(inputRecord.getFirstValue(Fields.ATTACHMENT_MIME_TYPE))); GenericContainer datum = (GenericContainer) inputRecord.getFirstValue(Fields.ATTACHMENT_BODY); Preconditions.checkNotNull(datum); Preconditions.checkNotNull(datum.getSchema()); Record outputRecord = inputRecord.copy(); extractTree(datum, datum.getSchema(), outputRecord, outputFieldPrefix); // pass record to next command in chain: return getChild().process(outputRecord); }
Example #23
Source File: ReadAvroParquetFileBuilder.java From kite with Apache License 2.0 | 5 votes |
private boolean extract(GenericContainer datum, Record inputRecord) { incrementNumRecords(); Record outputRecord = inputRecord.copy(); outputRecord.put(Fields.ATTACHMENT_BODY, datum); // pass record to next command in chain: return getChild().process(outputRecord); }
Example #24
Source File: BasicCollector.java From ksql-fork-with-deep-learning-function with Apache License 2.0 | 5 votes |
@Override public GenericContainer collectMetrics() { KsqlVersionMetrics metricsRecord = new KsqlVersionMetrics(); metricsRecord.setTimestamp(timeUtils.nowInUnixTime()); metricsRecord.setConfluentPlatformVersion(Version.getVersion()); metricsRecord.setKsqlComponentType(moduleType.name()); return metricsRecord; }
Example #25
Source File: SimpleAvroArchiveBuilder.java From occurrence with Apache License 2.0 | 4 votes |
/** * Merges the content of sourceFS:sourcePath into targetFS:outputPath in a file called downloadKey.avro. */ public static void mergeToSingleAvro(final FileSystem sourceFS, FileSystem targetFS, String sourcePath, String targetPath, String downloadKey) throws IOException { Path outputPath = new Path(targetPath, downloadKey + AVRO_EXTENSION); ReflectDatumWriter<GenericContainer> rdw = new ReflectDatumWriter<>(GenericContainer.class); ReflectDatumReader<GenericContainer> rdr = new ReflectDatumReader<>(GenericContainer.class); boolean first = false; try ( FSDataOutputStream zipped = targetFS.create(outputPath, true); DataFileWriter<GenericContainer> dfw = new DataFileWriter<>(rdw) ) { final Path inputPath = new Path(sourcePath); FileStatus[] hdfsFiles = sourceFS.listStatus(inputPath); for (FileStatus fs : hdfsFiles) { try(InputStream is = sourceFS.open(fs.getPath()); DataFileStream<GenericContainer> dfs = new DataFileStream<>(is, rdr)) { if (!first) { dfw.setCodec(CodecFactory.deflateCodec(-1)); dfw.setFlushOnEveryBlock(false); dfw.create(dfs.getSchema(), zipped); first = true; } dfw.appendAllFrom(dfs, false); } } dfw.flush(); dfw.close(); zipped.flush(); } catch (Exception ex) { LOG.error("Error combining Avro files", ex); throw Throwables.propagate(ex); } }
Example #26
Source File: AvroNestedReader.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
/** * Processes a map at this point in the path. * * @param map the map to process * @param s the current schema at this point in the path * @param ignoreMissing true if null is to be returned for user fields that don't appear in the schema * @return the field value or null for out-of-bounds array indexes, non-existent map keys or unsupported avro types. * @throws KettleException if a problem occurs */ public Object convertToKettleValue( AvroInputField avroInputField, Map<Utf8, Object> map, Schema s, Schema defaultSchema, boolean ignoreMissing ) throws KettleException { if ( map == null ) { return null; } if ( avroInputField.getTempParts().size() == 0 ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.MalformedPathMap" ) ); } String part = avroInputField.getTempParts().remove( 0 ); if ( !( part.charAt( 0 ) == '[' ) ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.MalformedPathMap2", part ) ); } String key = part.substring( 1, part.indexOf( ']' ) ); if ( part.indexOf( ']' ) < part.length() - 1 ) { // more dimensions to the array/map part = part.substring( part.indexOf( ']' ) + 1, part.length() ); avroInputField.getTempParts().add( 0, part ); } Object value = map.get( new Utf8( key ) ); if ( value == null ) { return null; } Schema valueType = s.getValueType(); if ( valueType.getType() == Schema.Type.UNION ) { if ( value instanceof GenericContainer ) { // we can ask these things for their schema (covers // records, arrays, enums and fixed) valueType = ( (GenericContainer) value ).getSchema(); } else { // either have a map or primitive here if ( value instanceof Map ) { // now have to look for the schema of the map Schema mapSchema = null; for ( Schema ts : valueType.getTypes() ) { if ( ts.getType() == Schema.Type.MAP ) { mapSchema = ts; break; } } if ( mapSchema == null ) { throw new KettleException( BaseMessages.getString( PKG, "AvroInput.Error.UnableToFindSchemaForUnionMap" ) ); } valueType = mapSchema; } else { if ( avroInputField.getTempValueMeta().getType() != ValueMetaInterface.TYPE_STRING ) { // we have a two element union, where one element is the type // "null". So in this case we actually have just one type and can // output specific values of it (instead of using String as a // catch all for varying primitive types in the union) valueType = checkUnion( valueType ); } else { // use the string representation of the value valueType = Schema.create( Schema.Type.STRING ); } } } } // what have we got? if ( valueType.getType() == Schema.Type.RECORD ) { return convertToKettleValue( avroInputField, (GenericData.Record) value, valueType, defaultSchema, ignoreMissing ); } else if ( valueType.getType() == Schema.Type.ARRAY ) { return convertToKettleValue( avroInputField, (GenericData.Array) value, valueType, defaultSchema, ignoreMissing ); } else if ( valueType.getType() == Schema.Type.MAP ) { return convertToKettleValue( avroInputField, (Map<Utf8, Object>) value, valueType, defaultSchema, ignoreMissing ); } else { // assume a primitive return getPrimitive( avroInputField, value, valueType ); } }
Example #27
Source File: JsonFileWriter.java From SPADE with GNU General Public License v3.0 | 4 votes |
@Override public void writeRecord(GenericContainer genericContainer) throws Exception { datumWriter.write(genericContainer, jsonEncoder); }
Example #28
Source File: FileWriter.java From SPADE with GNU General Public License v3.0 | 4 votes |
public void writeRecord(GenericContainer genericContainer) throws Exception{ fileWriter.append(genericContainer); checkTransactions(); }
Example #29
Source File: CDM.java From SPADE with GNU General Public License v3.0 | 4 votes |
@Override /** * Calls the superclass's publishRecords method after updating the object * type count. */ protected int publishRecords(List<GenericContainer> genericContainers){ if(genericContainers != null){ for(GenericContainer genericContainer : genericContainers){ try{ if(genericContainer instanceof TCCDMDatum){ Object cdmObject = ((TCCDMDatum) genericContainer).getDatum(); if(cdmObject != null){ if(cdmObject.getClass().equals(Subject.class)){ SubjectType subjectType = ((Subject)cdmObject).getType(); if(subjectType != null){ incrementStatsCount(subjectType.name()); } }else if(cdmObject.getClass().equals(Event.class)){ EventType eventType = ((Event)cdmObject).getType(); if(eventType != null){ String keyName = eventType.name(); if(eventType.equals(EventType.EVENT_OTHER)){ CharSequence keyNameCharSeq = ((Event)cdmObject).getProperties().get(OPMConstants.EDGE_OPERATION); if(keyNameCharSeq != null){ keyName = String.valueOf(keyNameCharSeq); } } incrementStatsCount(keyName); } }else if(cdmObject.getClass().equals(SrcSinkObject.class)){ SrcSinkObject sso = ((SrcSinkObject)cdmObject); if(sso.getType().equals(SrcSinkType.SRCSINK_UNKNOWN)){ String subtype = SrcSinkObject.class.getSimpleName(); if(sso.getBaseObject() != null){ if(sso.getBaseObject().getProperties() != null){ CharSequence subtypeCharSeq = sso.getBaseObject().getProperties(). get(OPMConstants.ARTIFACT_SUBTYPE); if(subtypeCharSeq != null){ subtype = subtypeCharSeq.toString(); } } } incrementStatsCount(subtype); } }else if(cdmObject.getClass().equals(FileObject.class)){ FileObjectType fileObjectType = ((FileObject)cdmObject).getType(); if(fileObjectType != null){ incrementStatsCount(fileObjectType.name()); } }else if(cdmObject.getClass().equals(IpcObject.class)){ IpcObjectType ipcObjectType = ((IpcObject)cdmObject).getType(); if(ipcObjectType != null){ incrementStatsCount(ipcObjectType.name()); } }else{ incrementStatsCount(cdmObject.getClass().getSimpleName()); } } } }catch (Exception e) { logger.log(Level.WARNING, "Failed to collect stats", e); } } return super.publishRecords(genericContainers); }else{ return 0; } }
Example #30
Source File: CDM.java From SPADE with GNU General Public License v3.0 | 4 votes |
private boolean publishSubjectAndPrincipal(AbstractVertex process){ if(isProcessVertex(process)){ String pid = process.getAnnotation(OPMConstants.PROCESS_PID); InstrumentationSource subjectSource = getInstrumentationSource(process.getAnnotation(OPMConstants.SOURCE)); if(subjectSource != null){ // Agents cannot come from BEEP source. Added just in case. InstrumentationSource principalSource = subjectSource.equals(InstrumentationSource.SOURCE_LINUX_BEEP_TRACE) ? InstrumentationSource.SOURCE_LINUX_SYSCALL_TRACE : subjectSource; List<GenericContainer> objectsToPublish = new ArrayList<GenericContainer>(); Principal principal = getPrincipalFromProcess(process); if(principal != null){ UUID principalUUID = principal.getUuid(); if(!publishedPrincipals.contains(principalUUID)){ objectsToPublish.add(buildTcCDMDatum(principal, principalSource)); publishedPrincipals.add(principalUUID); } Subject subject = getSubjectFromProcess(process, principalUUID); if(subject != null){ objectsToPublish.add(buildTcCDMDatum(subject, subjectSource)); if(subject.getType().equals(SubjectType.SUBJECT_PROCESS)){ // not in case of unit // The map is used only for getting the parent subject UUID i.e. equivalent // of ppid in OPM and that can only be the containing process as in Audit pidSubjectUUID.put(pid, subject.getUuid()); } return publishRecords(objectsToPublish) > 0; }else { return false; } }else{ return false; } }else{ logger.log(Level.WARNING, "Failed to publish '{0}' vertex because of missing/invalid source", new Object[]{process}); } } return false; }