org.apache.spark.sql.catalyst.util.ArrayData Java Examples
The following examples show how to use
org.apache.spark.sql.catalyst.util.ArrayData.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 6 votes |
public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; MapData map = data.getMap(column); ArrayData key = map.keyArray(); ArrayData value = map.valueArray(); MapColumnVector cv = (MapColumnVector) output; // record the length and start of the list elements cv.lengths[rowId] = value.numElements(); cv.offsets[rowId] = cv.childCount; cv.childCount += cv.lengths[rowId]; // make sure the child is big enough cv.keys.ensureSize(cv.childCount, true); cv.values.ensureSize(cv.childCount, true); // Add each element for(int e=0; e < cv.lengths[rowId]; ++e) { int pos = (int)(e + cv.offsets[rowId]); keyConverter.addValue(pos, e, key, cv.keys); valueConverter.addValue(pos, e, value, cv.values); } } }
Example #2
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 6 votes |
private static void assertEquals(String context, MapType map, MapData expected, MapData actual) { Assert.assertEquals("Should have the same number of elements", expected.numElements(), actual.numElements()); DataType keyType = map.keyType(); ArrayData expectedKeys = expected.keyArray(); ArrayData expectedValues = expected.valueArray(); DataType valueType = map.valueType(); ArrayData actualKeys = actual.keyArray(); ArrayData actualValues = actual.valueArray(); for (int i = 0; i < actual.numElements(); i += 1) { assertEquals(context + ".key", keyType, expectedKeys.get(i, keyType), actualKeys.get(i, keyType)); assertEquals(context + ".value", valueType, expectedValues.get(i, valueType), actualValues.get(i, valueType)); } }
Example #3
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 6 votes |
public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ArrayData value = data.getArray(column); ListColumnVector cv = (ListColumnVector) output; // record the length and start of the list elements cv.lengths[rowId] = value.numElements(); cv.offsets[rowId] = cv.childCount; cv.childCount += cv.lengths[rowId]; // make sure the child is big enough cv.child.ensureSize(cv.childCount, true); // Add each element for(int e=0; e < cv.lengths[rowId]; ++e) { children.addValue((int) (e + cv.offsets[rowId]), e, value, cv.child); } } }
Example #4
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 6 votes |
private static void assertEqualsUnsafe(Types.MapType map, Map<?, ?> expected, MapData actual) { Type keyType = map.keyType(); Type valueType = map.valueType(); List<Map.Entry<?, ?>> expectedElements = Lists.newArrayList(expected.entrySet()); ArrayData actualKeys = actual.keyArray(); ArrayData actualValues = actual.valueArray(); for (int i = 0; i < expectedElements.size(); i += 1) { Map.Entry<?, ?> expectedPair = expectedElements.get(i); Object actualKey = actualKeys.get(i, convert(keyType)); Object actualValue = actualValues.get(i, convert(keyType)); assertEqualsUnsafe(keyType, expectedPair.getKey(), actualKey); assertEqualsUnsafe(valueType, expectedPair.getValue(), actualValue); } }
Example #5
Source File: GenericsHelpers.java From iceberg with Apache License 2.0 | 6 votes |
private static void assertEqualsUnsafe(Types.MapType map, Map<?, ?> expected, MapData actual) { Type keyType = map.keyType(); Type valueType = map.valueType(); List<Map.Entry<?, ?>> expectedElements = Lists.newArrayList(expected.entrySet()); ArrayData actualKeys = actual.keyArray(); ArrayData actualValues = actual.valueArray(); for (int i = 0; i < expectedElements.size(); i += 1) { Map.Entry<?, ?> expectedPair = expectedElements.get(i); Object actualKey = actualKeys.get(i, convert(keyType)); Object actualValue = actualValues.get(i, convert(keyType)); assertEqualsUnsafe(keyType, expectedPair.getKey(), actualKey); assertEqualsUnsafe(valueType, expectedPair.getValue(), actualValue); } }
Example #6
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 6 votes |
private static void assertEqualsUnsafe(Types.MapType map, Map<?, ?> expected, MapData actual) { Type keyType = map.keyType(); Type valueType = map.valueType(); List<Map.Entry<?, ?>> expectedElements = Lists.newArrayList(expected.entrySet()); ArrayData actualKeys = actual.keyArray(); ArrayData actualValues = actual.valueArray(); for (int i = 0; i < expectedElements.size(); i += 1) { Map.Entry<?, ?> expectedPair = expectedElements.get(i); Object actualKey = actualKeys.get(i, convert(keyType)); Object actualValue = actualValues.get(i, convert(keyType)); assertEqualsUnsafe(keyType, expectedPair.getKey(), actualKey); assertEqualsUnsafe(valueType, expectedPair.getValue(), actualValue); } }
Example #7
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 6 votes |
@Override public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ArrayData value = data.getArray(column); ListColumnVector cv = (ListColumnVector) output; // record the length and start of the list elements cv.lengths[rowId] = value.numElements(); cv.offsets[rowId] = cv.childCount; cv.childCount += cv.lengths[rowId]; // make sure the child is big enough cv.child.ensureSize(cv.childCount, true); // Add each element for (int e = 0; e < cv.lengths[rowId]; ++e) { children.addValue((int) (e + cv.offsets[rowId]), e, value, cv.child); } } }
Example #8
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 5 votes |
private static void assertEqualsUnsafe(Types.ListType list, Collection<?> expected, ArrayData actual) { Type elementType = list.elementType(); List<?> expectedElements = Lists.newArrayList(expected); for (int i = 0; i < expectedElements.size(); i += 1) { Object expectedValue = expectedElements.get(i); Object actualValue = actual.get(i, convert(elementType)); assertEqualsUnsafe(elementType, expectedValue, actualValue); } }
Example #9
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 5 votes |
private static void assertEquals(String context, ArrayType array, ArrayData expected, ArrayData actual) { Assert.assertEquals("Should have the same number of elements", expected.numElements(), actual.numElements()); DataType type = array.elementType(); for (int i = 0; i < actual.numElements(); i += 1) { assertEquals(context + ".element", type, expected.get(i, type), actual.get(i, type)); } }
Example #10
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 5 votes |
private static void assertEquals(String context, DataType type, Object expected, Object actual) { if (expected == null && actual == null) { return; } if (type instanceof StructType) { Assert.assertTrue("Expected should be an InternalRow: " + context, expected instanceof InternalRow); Assert.assertTrue("Actual should be an InternalRow: " + context, actual instanceof InternalRow); assertEquals(context, (StructType) type, (InternalRow) expected, (InternalRow) actual); } else if (type instanceof ArrayType) { Assert.assertTrue("Expected should be an ArrayData: " + context, expected instanceof ArrayData); Assert.assertTrue("Actual should be an ArrayData: " + context, actual instanceof ArrayData); assertEquals(context, (ArrayType) type, (ArrayData) expected, (ArrayData) actual); } else if (type instanceof MapType) { Assert.assertTrue("Expected should be a MapData: " + context, expected instanceof MapData); Assert.assertTrue("Actual should be a MapData: " + context, actual instanceof MapData); assertEquals(context, (MapType) type, (MapData) expected, (MapData) actual); } else if (type instanceof BinaryType) { assertEqualBytes(context, (byte[]) expected, (byte[]) actual); } else { Assert.assertEquals("Value should match expected: " + context, expected, actual); } }
Example #11
Source File: TestHelpers.java From iceberg with Apache License 2.0 | 5 votes |
private static void assertEqualsUnsafe(Types.ListType list, Collection<?> expected, ArrayData actual) { Type elementType = list.elementType(); List<?> expectedElements = Lists.newArrayList(expected); for (int i = 0; i < expectedElements.size(); i += 1) { Object expectedValue = expectedElements.get(i); Object actualValue = actual.get(i, convert(elementType)); assertEqualsUnsafe(elementType, expectedValue, actualValue); } }
Example #12
Source File: GenericsHelpers.java From iceberg with Apache License 2.0 | 5 votes |
private static void assertEqualsUnsafe(Types.ListType list, Collection<?> expected, ArrayData actual) { Type elementType = list.elementType(); List<?> expectedElements = Lists.newArrayList(expected); for (int i = 0; i < expectedElements.size(); i += 1) { Object expectedValue = expectedElements.get(i); Object actualValue = actual.get(i, convert(elementType)); assertEqualsUnsafe(elementType, expectedValue, actualValue); } }
Example #13
Source File: StructInternalRow.java From iceberg with Apache License 2.0 | 5 votes |
private ArrayData collectionToArrayData(Type elementType, Collection<?> values) { switch (elementType.typeId()) { case BOOLEAN: case INTEGER: case DATE: case TIME: case LONG: case TIMESTAMP: case FLOAT: case DOUBLE: return fillArray(values, array -> (pos, value) -> array[pos] = value); case STRING: return fillArray(values, array -> (BiConsumer<Integer, CharSequence>) (pos, seq) -> array[pos] = UTF8String.fromString(seq.toString())); case FIXED: case BINARY: return fillArray(values, array -> (BiConsumer<Integer, ByteBuffer>) (pos, buf) -> array[pos] = ByteBuffers.toByteArray(buf)); case DECIMAL: return fillArray(values, array -> (BiConsumer<Integer, BigDecimal>) (pos, dec) -> array[pos] = Decimal.apply(dec)); case STRUCT: return fillArray(values, array -> (BiConsumer<Integer, StructLike>) (pos, tuple) -> array[pos] = new StructInternalRow(elementType.asStructType(), tuple)); case LIST: return fillArray(values, array -> (BiConsumer<Integer, Collection<?>>) (pos, list) -> array[pos] = collectionToArrayData(elementType.asListType(), list)); case MAP: return fillArray(values, array -> (BiConsumer<Integer, Map<?, ?>>) (pos, map) -> array[pos] = mapToMapData(elementType.asMapType(), map)); default: throw new UnsupportedOperationException("Unsupported array element type: " + elementType); } }
Example #14
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(ArrayData array, Encoder encoder) throws IOException { encoder.writeArrayStart(); int numElements = array.numElements(); encoder.setItemCount(numElements); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); elementWriter.write((T) array.get(i, elementType), encoder); } encoder.writeArrayEnd(); }
Example #15
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; MapData map = data.getMap(column); ArrayData key = map.keyArray(); ArrayData value = map.valueArray(); MapColumnVector cv = (MapColumnVector) output; // record the length and start of the list elements cv.lengths[rowId] = value.numElements(); cv.offsets[rowId] = cv.childCount; cv.childCount += cv.lengths[rowId]; // make sure the child is big enough cv.keys.ensureSize(cv.childCount, true); cv.values.ensureSize(cv.childCount, true); // Add each element for (int e = 0; e < cv.lengths[rowId]; ++e) { int pos = (int) (e + cv.offsets[rowId]); keyConverter.addValue(pos, e, key, cv.keys); valueConverter.addValue(pos, e, value, cv.values); } } }
Example #16
Source File: SparkOrcValueReaders.java From iceberg with Apache License 2.0 | 5 votes |
@Override public ArrayData nonNullRead(ColumnVector vector, int row) { ListColumnVector listVector = (ListColumnVector) vector; int offset = (int) listVector.offsets[row]; int length = (int) listVector.lengths[row]; List<Object> elements = Lists.newArrayListWithExpectedSize(length); for (int c = 0; c < length; ++c) { elements.add(elementReader.read(listVector.child, offset + c)); } return new GenericArrayData(elements.toArray()); }
Example #17
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(MapData map, Encoder encoder) throws IOException { encoder.writeArrayStart(); int numElements = map.numElements(); encoder.setItemCount(numElements); ArrayData keyArray = map.keyArray(); ArrayData valueArray = map.valueArray(); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); keyWriter.write((K) keyArray.get(i, keyType), encoder); valueWriter.write((V) valueArray.get(i, valueType), encoder); } encoder.writeArrayEnd(); }
Example #18
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(MapData map, Encoder encoder) throws IOException { encoder.writeMapStart(); int numElements = map.numElements(); encoder.setItemCount(numElements); ArrayData keyArray = map.keyArray(); ArrayData valueArray = map.valueArray(); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); keyWriter.write((K) keyArray.get(i, keyType), encoder); valueWriter.write((V) valueArray.get(i, valueType), encoder); } encoder.writeMapEnd(); }
Example #19
Source File: SparkParquetReaders.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") protected ReusableArrayData newListData(ArrayData reuse) { this.readPos = 0; this.writePos = 0; if (reuse instanceof ReusableArrayData) { return (ReusableArrayData) reuse; } else { return new ReusableArrayData(); } }
Example #20
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(MapData map, Encoder encoder) throws IOException { encoder.writeMapStart(); int numElements = map.numElements(); encoder.setItemCount(numElements); ArrayData keyArray = map.keyArray(); ArrayData valueArray = map.valueArray(); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); keyWriter.write((K) keyArray.get(i, keyType), encoder); valueWriter.write((V) valueArray.get(i, valueType), encoder); } encoder.writeMapEnd(); }
Example #21
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(MapData map, Encoder encoder) throws IOException { encoder.writeArrayStart(); int numElements = map.numElements(); encoder.setItemCount(numElements); ArrayData keyArray = map.keyArray(); ArrayData valueArray = map.valueArray(); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); keyWriter.write((K) keyArray.get(i, keyType), encoder); valueWriter.write((V) valueArray.get(i, valueType), encoder); } encoder.writeArrayEnd(); }
Example #22
Source File: SparkValueWriters.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") public void write(ArrayData array, Encoder encoder) throws IOException { encoder.writeArrayStart(); int numElements = array.numElements(); encoder.setItemCount(numElements); for (int i = 0; i < numElements; i += 1) { encoder.startItem(); elementWriter.write((T) array.get(i, elementType), encoder); } encoder.writeArrayEnd(); }
Example #23
Source File: SparkParquetReaders.java From iceberg with Apache License 2.0 | 5 votes |
@Override @SuppressWarnings("unchecked") protected ReusableArrayData newListData(ArrayData reuse) { this.readPos = 0; this.writePos = 0; if (reuse instanceof ReusableArrayData) { return (ReusableArrayData) reuse; } else { return new ReusableArrayData(); } }
Example #24
Source File: SparkParquetReaders.java From iceberg with Apache License 2.0 | 4 votes |
@Override public ArrayData copy() { return new GenericArrayData(array()); }
Example #25
Source File: SparkParquetReaders.java From iceberg with Apache License 2.0 | 4 votes |
@Override public ArrayData getArray(int ordinal) { return (ArrayData) values[ordinal]; }
Example #26
Source File: ColumnarBatchRow.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
@Override public <T> List<T> getList(int i) { ArrayData arrayData = row.getArray(i); // Slow but functional return Arrays.<T>asList((T[])arrayData.array()); }
Example #27
Source File: SparkValueReaders.java From iceberg with Apache License 2.0 | 4 votes |
static ValueReader<ArrayData> array(ValueReader<?> elementReader) { return new ArrayReader(elementReader); }
Example #28
Source File: CodegenExamples.java From iceberg with Apache License 2.0 | 4 votes |
public UnsafeRow apply(InternalRow i) { holder.reset(); rowWriter.zeroOutNullBytes(); boolean isNull = i.isNullAt(0); MapData value = isNull ? null : (i.getMap(0)); if (isNull) { rowWriter.setNullAt(0); } else { // Remember the current cursor so that we can calculate how many bytes are // written later. final int tmpCursor = holder.cursor; if (value instanceof UnsafeMapData) { final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes); ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes; } else { final ArrayData keys = value.keyArray(); final ArrayData values = value.valueArray(); // preserve 8 bytes to write the key array numBytes later. holder.grow(8); holder.cursor += 8; // Remember the current cursor so that we can write numBytes of key array later. final int tmpCursor1 = holder.cursor; if (keys instanceof UnsafeArrayData) { final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes1); ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes1; } else { final int numElements = keys.numElements(); arrayWriter.initialize(holder, numElements, 8); for (int index = 0; index < numElements; index++) { if (keys.isNullAt(index)) { arrayWriter.setNull(index); } else { final UTF8String element = keys.getUTF8String(index); arrayWriter.write(index, element); } } } // Write the numBytes of key array into the first 8 bytes. Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1); if (values instanceof UnsafeArrayData) { final int sizeInBytes2 = ((UnsafeArrayData) values).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes2); ((UnsafeArrayData) values).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes2; } else { final int numElements1 = values.numElements(); arrayWriter1.initialize(holder, numElements1, 8); for (int index1 = 0; index1 < numElements1; index1++) { if (values.isNullAt(index1)) { arrayWriter1.setNull(index1); } else { final UTF8String element1 = values.getUTF8String(index1); arrayWriter1.write(index1, element1); } } } } rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor); } result.setTotalSize(holder.totalSize()); return result; }
Example #29
Source File: CodegenExamples.java From iceberg with Apache License 2.0 | 4 votes |
public UnsafeRow apply(InternalRow i) { holder.reset(); rowWriter.zeroOutNullBytes(); boolean isNull = i.isNullAt(0); ArrayData value = isNull ? null : (i.getArray(0)); if (isNull) { rowWriter.setNullAt(0); } else { // Remember the current cursor so that we can calculate how many bytes are // written later. final int tmpCursor = holder.cursor; if (value instanceof UnsafeArrayData) { final int sizeInBytes1 = ((UnsafeArrayData) value).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes1); ((UnsafeArrayData) value).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes1; } else { final int numElements = value.numElements(); arrayWriter.initialize(holder, numElements, 8); for (int index = 0; index < numElements; index++) { if (value.isNullAt(index)) { arrayWriter.setNull(index); } else { final InternalRow element = value.getStruct(index, 2); final int tmpCursor1 = holder.cursor; if (element instanceof UnsafeRow) { final int sizeInBytes = ((UnsafeRow) element).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes); ((UnsafeRow) element).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes; } else { rowWriter1.reset(); boolean isNull1 = element.isNullAt(0); int value1 = isNull1 ? -1 : element.getInt(0); if (isNull1) { rowWriter1.setNullAt(0); } else { rowWriter1.write(0, value1); } boolean isNull2 = element.isNullAt(1); int value2 = isNull2 ? -1 : element.getInt(1); if (isNull2) { rowWriter1.setNullAt(1); } else { rowWriter1.write(1, value2); } } arrayWriter.setOffsetAndSize(index, tmpCursor1, holder.cursor - tmpCursor1); } } } rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor); } result.setTotalSize(holder.totalSize()); return result; }
Example #30
Source File: CodegenExamples.java From iceberg with Apache License 2.0 | 4 votes |
public UnsafeRow apply(InternalRow i) { holder.reset(); rowWriter.zeroOutNullBytes(); boolean isNull = i.isNullAt(0); MapData value = isNull ? null : (i.getMap(0)); if (isNull) { rowWriter.setNullAt(0); } else { // Remember the current cursor so that we can calculate how many bytes are // written later. final int tmpCursor = holder.cursor; if (value instanceof UnsafeMapData) { final int sizeInBytes = ((UnsafeMapData) value).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes); ((UnsafeMapData) value).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes; } else { final ArrayData keys = value.keyArray(); final ArrayData values = value.valueArray(); // preserve 8 bytes to write the key array numBytes later. holder.grow(8); holder.cursor += 8; // Remember the current cursor so that we can write numBytes of key array later. final int tmpCursor1 = holder.cursor; if (keys instanceof UnsafeArrayData) { final int sizeInBytes1 = ((UnsafeArrayData) keys).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes1); ((UnsafeArrayData) keys).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes1; } else { final int numElements = keys.numElements(); arrayWriter.initialize(holder, numElements, 8); for (int index = 0; index < numElements; index++) { if (keys.isNullAt(index)) { arrayWriter.setNull(index); } else { final UTF8String element = keys.getUTF8String(index); arrayWriter.write(index, element); } } } // Write the numBytes of key array into the first 8 bytes. Platform.putLong(holder.buffer, tmpCursor1 - 8, holder.cursor - tmpCursor1); if (values instanceof UnsafeArrayData) { final int sizeInBytes2 = ((UnsafeArrayData) values).getSizeInBytes(); // grow the global buffer before writing data. holder.grow(sizeInBytes2); ((UnsafeArrayData) values).writeToMemory(holder.buffer, holder.cursor); holder.cursor += sizeInBytes2; } else { final int numElements1 = values.numElements(); arrayWriter1.initialize(holder, numElements1, 8); for (int index1 = 0; index1 < numElements1; index1++) { if (values.isNullAt(index1)) { arrayWriter1.setNull(index1); } else { final UTF8String element1 = values.getUTF8String(index1); arrayWriter1.write(index1, element1); } } } } rowWriter.setOffsetAndSize(0, tmpCursor, holder.cursor - tmpCursor); } result.setTotalSize(holder.totalSize()); return result; }