Java Code Examples for org.apache.hadoop.io.ArrayWritable#get()
The following examples show how to use
org.apache.hadoop.io.ArrayWritable#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ArrayWritableObjectInspector.java From parquet-mr with Apache License 2.0 | 6 votes |
@Override public Object getStructFieldData(final Object data, final StructField fieldRef) { if (data == null) { return null; } if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; return arr.get()[((StructFieldImpl) fieldRef).getIndex()]; } //since setStructFieldData and create return a list, getStructFieldData should be able to //handle list data. This is required when table serde is ParquetHiveSerDe and partition serde //is something else. if (data instanceof List) { return ((List) data).get(((StructFieldImpl) fieldRef).getIndex()); } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 2
Source File: TestParquetSerDe.java From parquet-mr with Apache License 2.0 | 6 votes |
public static boolean arrayWritableEquals(final ArrayWritable a1, final ArrayWritable a2) { final Writable[] a1Arr = a1.get(); final Writable[] a2Arr = a2.get(); if (a1Arr.length != a2Arr.length) { return false; } for (int i = 0; i < a1Arr.length; ++i) { if (a1Arr[i] instanceof ArrayWritable) { if (!(a2Arr[i] instanceof ArrayWritable)) { return false; } if (!arrayWritableEquals((ArrayWritable) a1Arr[i], (ArrayWritable) a2Arr[i])) { return false; } } else { if (!a1Arr[i].equals(a2Arr[i])) { return false; } } } return true; }
Example 3
Source File: AbstractSpreadSheetDocumentRecordWriter.java From hadoopoffice with Apache License 2.0 | 6 votes |
/** * * Write SpreadSheetDAO into a table document. Note this does not necessarily mean it is already written in the OutputStream, but usually the in-memory representation. * @param key is ignored * @param value is a SpreadSheet Cell to be inserted into the table document * */ @Override public synchronized void write(NullWritable key, K value) throws IOException { try { if (value==null) { return; } if (value instanceof ArrayWritable) { ArrayWritable row = (ArrayWritable)value; Writable[] rowCellDAO = row.get(); for (int i=0;i<rowCellDAO.length;i++) { this.officeWriter.write(rowCellDAO[i]); } } else { this.officeWriter.write(value); } } catch (OfficeWriterException e) { LOG.error(e); } }
Example 4
Source File: HoodieRealtimeRecordReaderUtils.java From hudi with Apache License 2.0 | 6 votes |
/** * Prints a JSON representation of the ArrayWritable for easier debuggability. */ public static String arrayWritableToString(ArrayWritable writable) { if (writable == null) { return "null"; } StringBuilder builder = new StringBuilder(); Writable[] values = writable.get(); builder.append("\"values_" + Math.random() + "_" + values.length + "\": {"); int i = 0; for (Writable w : values) { if (w instanceof ArrayWritable) { builder.append(arrayWritableToString((ArrayWritable) w)).append(","); } else { builder.append("\"value" + i + "\":\"" + w + "\"").append(","); if (w == null) { builder.append("\"type" + i + "\":\"unknown\"").append(","); } else { builder.append("\"type" + i + "\":\"" + w.getClass().getSimpleName() + "\"").append(","); } } i++; } builder.deleteCharAt(builder.length() - 1); builder.append("}"); return builder.toString(); }
Example 5
Source File: StandardParquetHiveMapInspector.java From parquet-mr with Apache License 2.0 | 6 votes |
@Override public Object getMapValueElement(final Object data, final Object key) { if (data == null || key == null) { return null; } if (data instanceof ArrayWritable) { final Writable[] mapContainer = ((ArrayWritable) data).get(); if (mapContainer == null || mapContainer.length == 0) { return null; } final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get(); for (final Writable obj : mapArray) { final ArrayWritable mapObj = (ArrayWritable) obj; final Writable[] arr = mapObj.get(); if (key.equals(arr[0])) { return arr[1]; } } return null; } if (data instanceof Map) { return ((Map) data).get(key); } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 6
Source File: TestWritableUtil.java From datawave with Apache License 2.0 | 5 votes |
/** * Reads a collection of Strings back from a DataInput. * * @param input * @return collection of strings * @throws IOException */ public static Collection<String> readCollection(DataInput input) throws IOException { ArrayWritable aw = new ArrayWritable(Text.class); aw.readFields(input); Collection<String> coll = new LinkedList<>(); Writable[] arr = aw.get(); for (int i = 0; i < arr.length; ++i) { coll.add(arr[i].toString()); } return coll; }
Example 7
Source File: TypedBytesWritableOutput.java From hadoop with Apache License 2.0 | 5 votes |
public void writeArray(ArrayWritable aw) throws IOException { Writable[] writables = aw.get(); out.writeVectorHeader(writables.length); for (Writable writable : writables) { write(writable); } }
Example 8
Source File: TypedBytesWritableOutput.java From big-c with Apache License 2.0 | 5 votes |
public void writeArray(ArrayWritable aw) throws IOException { Writable[] writables = aw.get(); out.writeVectorHeader(writables.length); for (Writable writable : writables) { write(writable); } }
Example 9
Source File: ParquetHiveSerDe.java From parquet-mr with Apache License 2.0 | 5 votes |
@Override public Writable serialize(final Object obj, final ObjectInspector objInspector) throws SerDeException { if (!objInspector.getCategory().equals(Category.STRUCT)) { throw new SerDeException("Cannot serialize " + objInspector.getCategory() + ". Can only serialize a struct"); } final ArrayWritable serializeData = createStruct(obj, (StructObjectInspector) objInspector); serializedSize = serializeData.get().length; status = LAST_OPERATION.SERIALIZE; return serializeData; }
Example 10
Source File: AbstractParquetMapInspector.java From parquet-mr with Apache License 2.0 | 5 votes |
@Override public Map<?, ?> getMap(final Object data) { if (data == null) { return null; } if (data instanceof ArrayWritable) { final Writable[] mapContainer = ((ArrayWritable) data).get(); if (mapContainer == null || mapContainer.length == 0) { return null; } final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get(); final Map<Writable, Writable> map = new HashMap<Writable, Writable>(); for (final Writable obj : mapArray) { final ArrayWritable mapObj = (ArrayWritable) obj; final Writable[] arr = mapObj.get(); map.put(arr[0], arr[1]); } return map; } if (data instanceof Map) { return (Map) data; } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 11
Source File: ArrayWritableObjectInspector.java From parquet-mr with Apache License 2.0 | 5 votes |
@Override public List<Object> getStructFieldsDataAsList(final Object data) { if (data == null) { return null; } if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; final Object[] arrWritable = arr.get(); return new ArrayList<Object>(Arrays.asList(arrWritable)); } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 12
Source File: ArrayWritableObjectInspector.java From indexr with Apache License 2.0 | 5 votes |
@Override public List<Object> getStructFieldsDataAsList(final Object data) { if (data == null) { return null; } if (data instanceof ArrayWritable) { final ArrayWritable arr = (ArrayWritable) data; final Object[] arrWritable = arr.get(); return new ArrayList<Object>(Arrays.asList(arrWritable)); } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 13
Source File: ParquetRecordReaderWrapper.java From parquet-mr with Apache License 2.0 | 5 votes |
@Override public boolean next(final Void key, final ArrayWritable value) throws IOException { if (eof) { return false; } try { if (firstRecord) { // key & value are already read. firstRecord = false; } else if (!realReader.nextKeyValue()) { eof = true; // strictly not required, just for consistency return false; } final ArrayWritable tmpCurValue = realReader.getCurrentValue(); if (value != tmpCurValue) { final Writable[] arrValue = value.get(); final Writable[] arrCurrent = tmpCurValue.get(); if (value != null && arrValue.length == arrCurrent.length) { System.arraycopy(arrCurrent, 0, arrValue, 0, arrCurrent.length); } else { if (arrValue.length != arrCurrent.length) { throw new IOException("DeprecatedParquetHiveInput : size of object differs. Value" + " size : " + arrValue.length + ", Current Object size : " + arrCurrent.length); } else { throw new IOException("DeprecatedParquetHiveInput can not support RecordReaders that" + " don't return same key & value & value is null"); } } } return true; } catch (final InterruptedException e) { throw new IOException(e); } }
Example 14
Source File: UtilES.java From deep-spark with Apache License 2.0 | 5 votes |
private static <T> Object subDocumentListCase(Type type, ArrayWritable arrayWritable) throws IllegalAccessException, InstantiationException, InvocationTargetException, NoSuchMethodException { ParameterizedType listType = (ParameterizedType) type; Class<?> listClass = (Class<?>) listType.getActualTypeArguments()[0]; List list = new ArrayList(); Writable[] writetable = arrayWritable.get(); for (int i = 0; i < writetable.length; i++) { list.add(getObjectFromJson(listClass, (LinkedMapWritable) writetable[i])); } return list; }
Example 15
Source File: DataWritableWriter.java From parquet-mr with Apache License 2.0 | 5 votes |
private void writeData(final ArrayWritable arr, final GroupType type) { if (arr == null) { return; } final int fieldCount = type.getFieldCount(); Writable[] values = arr.get(); for (int field = 0; field < fieldCount; ++field) { final Type fieldType = type.getType(field); final String fieldName = fieldType.getName(); final Writable value = values[field]; if (value == null) { continue; } recordConsumer.startField(fieldName, field); if (fieldType.isPrimitive()) { writePrimitive(value); } else { recordConsumer.startGroup(); if (value instanceof ArrayWritable) { if (fieldType.asGroupType().getRepetition().equals(Type.Repetition.REPEATED)) { writeArray((ArrayWritable) value, fieldType.asGroupType()); } else { writeData((ArrayWritable) value, fieldType.asGroupType()); } } else if (value != null) { throw new ParquetEncodingException("This should be an ArrayWritable or MapWritable: " + value); } recordConsumer.endGroup(); } recordConsumer.endField(fieldName, field); } }
Example 16
Source File: DeepParquetHiveMapInspector.java From parquet-mr with Apache License 2.0 | 4 votes |
@Override public Object getMapValueElement(final Object data, final Object key) { if (data == null || key == null) { return null; } if (data instanceof ArrayWritable) { final Writable[] mapContainer = ((ArrayWritable) data).get(); if (mapContainer == null || mapContainer.length == 0) { return null; } final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get(); for (final Writable obj : mapArray) { final ArrayWritable mapObj = (ArrayWritable) obj; final Writable[] arr = mapObj.get(); if (key.equals(arr[0]) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(arr[0])) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(arr[0]))) { return arr[1]; } } return null; } if (data instanceof Map) { final Map<?, ?> map = (Map<?, ?>) data; if (map.containsKey(key)) { return map.get(key); } for (final Map.Entry<?, ?> entry : map.entrySet()) { if (key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(entry.getKey())) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(entry.getKey()))) { return entry.getValue(); } } return null; } throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName()); }
Example 17
Source File: PriorityQueueWritable.java From laser with Apache License 2.0 | 4 votes |
public void readFields(DataInput in) throws IOException { ArrayWritable arr = new ArrayWritable(IntDoublePairWritable.class); arr.readFields(in); queue = new PriorityQueue(arr.get()); }
Example 18
Source File: IndexRRecordWriter.java From indexr with Apache License 2.0 | 4 votes |
@Override public void write(Writable w) throws IOException { ArrayWritable datas = (ArrayWritable) w; for (int colId = 0; colId < sqlTypes.length; colId++) { SQLType type = sqlTypes[colId]; Writable currentValue = datas.get()[colId]; switch (type) { case INT: if (currentValue == null) { rowBuilder.appendInt(0); } else { rowBuilder.appendInt(((IntWritable) currentValue).get()); } break; case BIGINT: if (currentValue == null) { rowBuilder.appendLong(0L); } else { rowBuilder.appendLong(((LongWritable) currentValue).get()); } break; case FLOAT: if (currentValue == null) { rowBuilder.appendFloat(0f); } else { rowBuilder.appendFloat(((FloatWritable) currentValue).get()); } break; case DOUBLE: if (currentValue == null) { rowBuilder.appendDouble(0d); } else { rowBuilder.appendDouble(((DoubleWritable) currentValue).get()); } break; case VARCHAR: if (currentValue == null) { rowBuilder.appendString(""); } else { Text v = (Text) currentValue; rowBuilder.appendUTF8String(v.getBytes(), 0, v.getLength()); } break; case DATE: if (currentValue == null) { rowBuilder.appendLong(0); } else { rowBuilder.appendLong(DateTimeUtil.getEpochMillisecond(((DateWritable) currentValue).get())); } break; case DATETIME: if (currentValue == null) { rowBuilder.appendLong(0); } else { rowBuilder.appendLong(DateTimeUtil.getEpochMillisecond(((TimestampWritable) currentValue).getTimestamp())); } break; default: throw new IOException("can't recognize this type [" + type + "]"); } } segmentGen.add(rowBuilder.buildAndReset()); }
Example 19
Source File: RealtimeCompactedRecordReader.java From hudi with Apache License 2.0 | 4 votes |
@Override public boolean next(NullWritable aVoid, ArrayWritable arrayWritable) throws IOException { // Call the underlying parquetReader.next - which may replace the passed in ArrayWritable // with a new block of values boolean result = this.parquetReader.next(aVoid, arrayWritable); if (!result) { // if the result is false, then there are no more records return false; } else { // TODO(VC): Right now, we assume all records in log, have a matching base record. (which // would be true until we have a way to index logs too) // return from delta records map if we have some match. String key = arrayWritable.get()[HoodieInputFormatUtils.HOODIE_RECORD_KEY_COL_POS].toString(); if (deltaRecordMap.containsKey(key)) { // TODO(NA): Invoke preCombine here by converting arrayWritable to Avro. This is required since the // deltaRecord may not be a full record and needs values of columns from the parquet Option<GenericRecord> rec; if (usesCustomPayload) { rec = deltaRecordMap.get(key).getData().getInsertValue(getWriterSchema()); } else { rec = deltaRecordMap.get(key).getData().getInsertValue(getReaderSchema()); } if (!rec.isPresent()) { // If the record is not present, this is a delete record using an empty payload so skip this base record // and move to the next record return next(aVoid, arrayWritable); } GenericRecord recordToReturn = rec.get(); if (usesCustomPayload) { // If using a custom payload, return only the projection fields. The readerSchema is a schema derived from // the writerSchema with only the projection fields recordToReturn = HoodieAvroUtils.rewriteRecordWithOnlyNewSchemaFields(rec.get(), getReaderSchema()); } // we assume, a later safe record in the log, is newer than what we have in the map & // replace it. Since we want to return an arrayWritable which is the same length as the elements in the latest // schema, we use writerSchema to create the arrayWritable from the latest generic record ArrayWritable aWritable = (ArrayWritable) HoodieRealtimeRecordReaderUtils.avroToArrayWritable(recordToReturn, getHiveSchema()); Writable[] replaceValue = aWritable.get(); if (LOG.isDebugEnabled()) { LOG.debug(String.format("key %s, base values: %s, log values: %s", key, HoodieRealtimeRecordReaderUtils.arrayWritableToString(arrayWritable), HoodieRealtimeRecordReaderUtils.arrayWritableToString(aWritable))); } Writable[] originalValue = arrayWritable.get(); try { System.arraycopy(replaceValue, 0, originalValue, 0, originalValue.length); arrayWritable.set(originalValue); } catch (RuntimeException re) { LOG.error("Got exception when doing array copy", re); LOG.error("Base record :" + HoodieRealtimeRecordReaderUtils.arrayWritableToString(arrayWritable)); LOG.error("Log record :" + HoodieRealtimeRecordReaderUtils.arrayWritableToString(aWritable)); String errMsg = "Base-record :" + HoodieRealtimeRecordReaderUtils.arrayWritableToString(arrayWritable) + " ,Log-record :" + HoodieRealtimeRecordReaderUtils.arrayWritableToString(aWritable) + " ,Error :" + re.getMessage(); throw new RuntimeException(errMsg, re); } } return true; } }
Example 20
Source File: DiscoveryIteratorTest.java From datawave with Apache License 2.0 | 4 votes |
@Test public void testReverseIndex() throws Throwable { Connector con = new InMemoryInstance("DiscoveryIteratorTest").getConnector("root", new PasswordToken("")); con.tableOperations().create("reverseIndex"); writeSample(con.createBatchWriter("reverseIndex", new BatchWriterConfig().setMaxLatency(0, TimeUnit.SECONDS).setMaxMemory(0).setMaxWriteThreads(1)), true); Scanner s = con.createScanner("reverseIndex", new Authorizations("FOO")); IteratorSetting setting = new IteratorSetting(50, DiscoveryIterator.class); setting.addOption(DiscoveryLogic.REVERSE_INDEX, "true"); s.addScanIterator(setting); s.setRange(new Range()); Iterator<Map.Entry<Key,Value>> itr = s.iterator(); assertTrue(itr.hasNext()); Map.Entry<Key,Value> e = itr.next(); assertFalse(itr.hasNext()); Key key = e.getKey(); assertEquals("mret", key.getRow().toString()); assertEquals("field", key.getColumnFamily().toString()); // see DiscoveryIterator for why this has a max unsigned char tacked on the end assertEquals("20130101\uffff", key.getColumnQualifier().toString()); Value value = e.getValue(); assertTrue(value.getSize() > 0); DataInputBuffer in = new DataInputBuffer(); in.reset(value.get(), value.getSize()); ArrayWritable valWrapper = new ArrayWritable(DiscoveredThing.class); valWrapper.readFields(in); Writable[] values = valWrapper.get(); assertEquals(3, values.length); Set<String> types = Sets.newHashSet("t1", "t2", "t3"); for (int i = 0; i < 3; ++i) { DiscoveredThing thing = (DiscoveredThing) values[i]; assertEquals("term", thing.getTerm()); assertEquals("field", thing.getField()); assertTrue(types.remove(thing.getType())); assertEquals("20130101", thing.getDate()); assertEquals("FOO", thing.getColumnVisibility()); assertEquals(240L, thing.getCount()); } }