Java Code Examples for it.unimi.dsi.fastutil.ints.IntList#toIntArray()
The following examples show how to use
it.unimi.dsi.fastutil.ints.IntList#toIntArray() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PrimitiveColumnReader.java From presto with Apache License 2.0 | 6 votes |
public ColumnChunk readPrimitive(Field field) { IntList definitionLevels = new IntArrayList(); IntList repetitionLevels = new IntArrayList(); seek(); BlockBuilder blockBuilder = field.getType().createBlockBuilder(null, nextBatchSize); int valueCount = 0; while (valueCount < nextBatchSize) { if (page == null) { readNextPage(); } int valuesToRead = Math.min(remainingValueCountInPage, nextBatchSize - valueCount); readValues(blockBuilder, valuesToRead, field.getType(), definitionLevels, repetitionLevels); valueCount += valuesToRead; } checkArgument(valueCount == nextBatchSize, "valueCount %s not equals to batchSize %s", valueCount, nextBatchSize); readOffset = 0; nextBatchSize = 0; return new ColumnChunk(blockBuilder.build(), definitionLevels.toIntArray(), repetitionLevels.toIntArray()); }
Example 2
Source File: GreedyReranker.java From RankSys with Mozilla Public License 2.0 | 6 votes |
/** * Returns the permutation to obtain the re-ranking * * @return permutation to obtain the re-ranking */ public int[] rerankPermutation() { List<Tuple2od<I>> list = recommendation.getItems(); IntList perm = new IntArrayList(); IntLinkedOpenHashSet remainingI = new IntLinkedOpenHashSet(); IntStream.range(0, list.size()).forEach(remainingI::add); while (!remainingI.isEmpty() && perm.size() < min(maxLength, cutoff)) { int bestI = selectItem(remainingI, list); perm.add(bestI); remainingI.remove(bestI); update(list.get(bestI)); } while (perm.size() < min(maxLength, list.size())) { perm.add(remainingI.removeFirstInt()); } return perm.toIntArray(); }
Example 3
Source File: TestIndexIterator.java From parquet-mr with Apache License 2.0 | 5 votes |
static void assertEquals(PrimitiveIterator.OfInt actualIt, int... expectedValues) { IntList actualList = new IntArrayList(); actualIt.forEachRemaining((int value) -> actualList.add(value)); int[] actualValues = actualList.toIntArray(); assertArrayEquals( "ExpectedValues: " + Arrays.toString(expectedValues) + " ActualValues: " + Arrays.toString(actualValues), expectedValues, actualValues); }
Example 4
Source File: ColumnIndexFilterUtils.java From parquet-mr with Apache License 2.0 | 5 votes |
static OffsetIndex filterOffsetIndex(OffsetIndex offsetIndex, RowRanges rowRanges, long totalRowCount) { IntList indexMap = new IntArrayList(); for (int i = 0, n = offsetIndex.getPageCount(); i < n; ++i) { long from = offsetIndex.getFirstRowIndex(i); if (rowRanges.isOverlapping(from, offsetIndex.getLastRowIndex(i, totalRowCount))) { indexMap.add(i); } } return new FilteredOffsetIndex(offsetIndex, indexMap.toIntArray()); }
Example 5
Source File: ValueInTransformFunction.java From incubator-pinot with Apache License 2.0 | 5 votes |
private static int[] filterInts(IntSet intSet, int[] source) { IntList intList = new IntArrayList(); for (int value : source) { if (intSet.contains(value)) { intList.add(value); } } if (intList.size() == source.length) { return source; } else { return intList.toIntArray(); } }
Example 6
Source File: RegexpLikePredicateEvaluatorFactory.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Override public int[] getMatchingDictIds() { if (_matchingDictIds == null) { IntList matchingDictIds = new IntArrayList(); int dictionarySize = _dictionary.length(); for (int dictId = 0; dictId < dictionarySize; dictId++) { if (applySV(dictId)) { matchingDictIds.add(dictId); } } _matchingDictIds = matchingDictIds.toIntArray(); } return _matchingDictIds; }
Example 7
Source File: ValueInTransformFunctionTest.java From incubator-pinot with Apache License 2.0 | 5 votes |
@Test(dataProvider = "testValueInTransformFunction") public void testValueInTransformFunction(String expressionStr) { ExpressionContext expression = QueryContextConverterUtils.getExpression(expressionStr); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); Assert.assertTrue(transformFunction instanceof ValueInTransformFunction); Assert.assertEquals(transformFunction.getName(), ValueInTransformFunction.FUNCTION_NAME); Assert.assertTrue(transformFunction.getResultMetadata().hasDictionary()); int[][] dictIds = transformFunction.transformToDictIdsMV(_projectionBlock); int[][] intValues = transformFunction.transformToIntValuesMV(_projectionBlock); long[][] longValues = transformFunction.transformToLongValuesMV(_projectionBlock); float[][] floatValues = transformFunction.transformToFloatValuesMV(_projectionBlock); double[][] doubleValues = transformFunction.transformToDoubleValuesMV(_projectionBlock); String[][] stringValues = transformFunction.transformToStringValuesMV(_projectionBlock); Dictionary dictionary = transformFunction.getDictionary(); for (int i = 0; i < NUM_ROWS; i++) { IntList expectedList = new IntArrayList(); for (int value : _intMVValues[i]) { if (value == 1 || value == 2 || value == 9 || value == 5) { expectedList.add(value); } } int[] expectedValues = expectedList.toIntArray(); int numValues = expectedValues.length; for (int j = 0; j < numValues; j++) { int expected = expectedValues[j]; Assert.assertEquals(dictIds[i][j], dictionary.indexOf(Integer.toString(expected))); Assert.assertEquals(intValues[i][j], expected); Assert.assertEquals(longValues[i][j], (long) expected); Assert.assertEquals(floatValues[i][j], (float) expected); Assert.assertEquals(doubleValues[i][j], (double) expected); Assert.assertEquals(stringValues[i][j], Integer.toString(expected)); } } }
Example 8
Source File: TensorFlowModel.java From samantha with MIT License | 4 votes |
public LearningInstance featurize(JsonNode entity, boolean update) { Map<String, List<Feature>> feaMap = FeaturizerUtilities.getFeatureMap(entity, true, featureExtractors, indexSpace); if (equalSizeChecks != null) { for (List<String> features : equalSizeChecks) { int size = -1; for (String fea : features) { if (size < 0) { if (feaMap.containsKey(fea)) { size = feaMap.get(fea).size(); } else { throw new BadRequestException( "Feature " + fea + " is not present in the extracted feature map with keys " + feaMap.keySet().toString()); } } else if (size != feaMap.get(fea).size()) { throw new ConfigurationException( "Equal size checks with " + features.toString() + " failed for " + entity.toString()); } } } } String group = null; if (groupKeys != null && groupKeys.size() > 0) { group = FeatureExtractorUtilities.composeConcatenatedKey(entity, groupKeys); } TensorFlowInstance instance = new TensorFlowInstance(group); for (Map.Entry<String, List<Feature>> entry : feaMap.entrySet()) { DoubleList doubles = new DoubleArrayList(); IntList ints = new IntArrayList(); for (Feature feature : entry.getValue()) { doubles.add(feature.getValue()); ints.add(feature.getIndex()); } double[] darr = doubles.toDoubleArray(); instance.putValues(entry.getKey(), darr); int[] iarr = ints.toIntArray(); instance.putIndices(entry.getKey(), iarr); } return instance; }