Java Code Examples for org.apache.lucene.store.ChecksumIndexInput#readInt()
The following examples show how to use
org.apache.lucene.store.ChecksumIndexInput#readInt() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 6 votes |
private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { FieldInfo info = infos.fieldInfo(fieldNumber); if (info == null) { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); if (type == Lucene80DocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == Lucene80DocValuesFormat.BINARY) { binaries.put(info.name, readBinary(meta)); } else if (type == Lucene80DocValuesFormat.SORTED) { sorted.put(info.name, readSorted(meta)); } else if (type == Lucene80DocValuesFormat.SORTED_SET) { sortedSets.put(info.name, readSortedSet(meta)); } else if (type == Lucene80DocValuesFormat.SORTED_NUMERIC) { sortedNumerics.put(info.name, readSortedNumeric(meta)); } else { throw new CorruptIndexException("invalid type: " + type, meta); } } }
Example 2
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 6 votes |
private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { entry.termsDictSize = meta.readVLong(); entry.termsDictBlockShift = meta.readInt(); final int blockShift = meta.readInt(); final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; entry.termsAddressesMeta = DirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); entry.maxTermLength = meta.readInt(); entry.termsDataOffset = meta.readLong(); entry.termsDataLength = meta.readLong(); entry.termsAddressesOffset = meta.readLong(); entry.termsAddressesLength = meta.readLong(); entry.termsDictIndexShift = meta.readInt(); final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; entry.termsIndexAddressesMeta = DirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); entry.termsIndexOffset = meta.readLong(); entry.termsIndexLength = meta.readLong(); entry.termsIndexAddressesOffset = meta.readLong(); entry.termsIndexAddressesLength = meta.readLong(); }
Example 3
Source File: BloomFilteringPostingsFormat.java From lucene-solr with Apache License 2.0 | 5 votes |
public BloomFilteredFieldsProducer(SegmentReadState state) throws IOException { String bloomFileName = IndexFileNames.segmentFileName( state.segmentInfo.name, state.segmentSuffix, BLOOM_EXTENSION); ChecksumIndexInput bloomIn = null; boolean success = false; try { bloomIn = state.directory.openChecksumInput(bloomFileName, state.context); CodecUtil.checkIndexHeader(bloomIn, BLOOM_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); // // Load the hash function used in the BloomFilter // hashFunction = HashFunction.forName(bloomIn.readString()); // Load the delegate postings format PostingsFormat delegatePostingsFormat = PostingsFormat.forName(bloomIn .readString()); this.delegateFieldsProducer = delegatePostingsFormat .fieldsProducer(state); int numBlooms = bloomIn.readInt(); for (int i = 0; i < numBlooms; i++) { int fieldNum = bloomIn.readInt(); FuzzySet bloom = FuzzySet.deserialize(bloomIn); FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNum); bloomsByFieldName.put(fieldInfo.name, bloom); } CodecUtil.checkFooter(bloomIn); IOUtils.close(bloomIn); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(bloomIn, delegateFieldsProducer); } } }
Example 4
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); entry.jumpTableEntryCount = meta.readShort(); entry.denseRankPower = meta.readByte(); entry.numValues = meta.readLong(); int tableSize = meta.readInt(); if (tableSize > 256) { throw new CorruptIndexException("invalid table size: " + tableSize, meta); } if (tableSize >= 0) { entry.table = new long[tableSize]; ramBytesUsed += RamUsageEstimator.sizeOf(entry.table); for (int i = 0; i < tableSize; ++i) { entry.table[i] = meta.readLong(); } } if (tableSize < -1) { entry.blockShift = -2 - tableSize; } else { entry.blockShift = -1; } entry.bitsPerValue = meta.readByte(); entry.minValue = meta.readLong(); entry.gcd = meta.readLong(); entry.valuesOffset = meta.readLong(); entry.valuesLength = meta.readLong(); entry.valueJumpTableOffset = meta.readLong(); }
Example 5
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { BinaryEntry entry = new BinaryEntry(); entry.dataOffset = meta.readLong(); entry.dataLength = meta.readLong(); entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); entry.jumpTableEntryCount = meta.readShort(); entry.denseRankPower = meta.readByte(); entry.numDocsWithField = meta.readInt(); entry.minLength = meta.readInt(); entry.maxLength = meta.readInt(); if ((version >= Lucene80DocValuesFormat.VERSION_BIN_COMPRESSED && entry.numDocsWithField > 0) || entry.minLength < entry.maxLength) { entry.addressesOffset = meta.readLong(); // Old count of uncompressed addresses long numAddresses = entry.numDocsWithField + 1L; // New count of compressed addresses - the number of compresseed blocks if (version >= Lucene80DocValuesFormat.VERSION_BIN_COMPRESSED) { entry.numCompressedChunks = meta.readVInt(); entry.docsPerChunkShift = meta.readVInt(); entry.maxUncompressedChunkSize = meta.readVInt(); numAddresses = entry.numCompressedChunks; } final int blockShift = meta.readVInt(); entry.addressesMeta = DirectMonotonicReader.loadMeta(meta, numAddresses, blockShift); ramBytesUsed += entry.addressesMeta.ramBytesUsed(); entry.addressesLength = meta.readLong(); } return entry; }
Example 6
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { SortedEntry entry = new SortedEntry(); entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); entry.jumpTableEntryCount = meta.readShort(); entry.denseRankPower = meta.readByte(); entry.numDocsWithField = meta.readInt(); entry.bitsPerValue = meta.readByte(); entry.ordsOffset = meta.readLong(); entry.ordsLength = meta.readLong(); readTermDict(meta, entry); return entry; }
Example 7
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { SortedSetEntry entry = new SortedSetEntry(); byte multiValued = meta.readByte(); switch (multiValued) { case 0: // singlevalued entry.singleValueEntry = readSorted(meta); return entry; case 1: // multivalued break; default: throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); } entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); entry.jumpTableEntryCount = meta.readShort(); entry.denseRankPower = meta.readByte(); entry.bitsPerValue = meta.readByte(); entry.ordsOffset = meta.readLong(); entry.ordsLength = meta.readLong(); entry.numDocsWithField = meta.readInt(); entry.addressesOffset = meta.readLong(); final int blockShift = meta.readVInt(); entry.addressesMeta = DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); ramBytesUsed += entry.addressesMeta.ramBytesUsed(); entry.addressesLength = meta.readLong(); readTermDict(meta, entry); return entry; }
Example 8
Source File: Lucene80DocValuesProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { SortedNumericEntry entry = new SortedNumericEntry(); readNumeric(meta, entry); entry.numDocsWithField = meta.readInt(); if (entry.numDocsWithField != entry.numValues) { entry.addressesOffset = meta.readLong(); final int blockShift = meta.readVInt(); entry.addressesMeta = DirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); ramBytesUsed += entry.addressesMeta.ramBytesUsed(); entry.addressesLength = meta.readLong(); } return entry; }
Example 9
Source File: GenericRecordReader.java From incubator-retired-blur with Apache License 2.0 | 4 votes |
private SegmentInfoPerCommit segmentInfosRead(Directory directory, String segmentFileName, String segmentInfoName) throws IOException { boolean success = false; ChecksumIndexInput input = new ChecksumIndexInput(directory.openInput(segmentFileName, IOContext.READ)); try { final int format = input.readInt(); if (format == CodecUtil.CODEC_MAGIC) { // 4.0+ CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, SegmentInfos.VERSION_40); input.readLong();// read version input.readInt(); // read counter int numSegments = input.readInt(); if (numSegments < 0) { throw new CorruptIndexException("invalid segment count: " + numSegments + " (resource: " + input + ")"); } for (int seg = 0; seg < numSegments; seg++) { String segName = input.readString(); Codec codec = Codec.forName(input.readString()); SegmentInfo info = codec.segmentInfoFormat().getSegmentInfoReader().read(directory, segName, IOContext.READ); info.setCodec(codec); long delGen = input.readLong(); int delCount = input.readInt(); if (delCount < 0 || delCount > info.getDocCount()) { throw new CorruptIndexException("invalid deletion count: " + delCount + " (resource: " + input + ")"); } if (segName.equals(segmentInfoName)) { success = true; return new SegmentInfoPerCommit(info, delCount, delGen); } } } else { throw new IOException("Legacy Infos not supported for dir [" + directory + "]."); } throw new IOException("Segment [" + segmentInfoName + "] nout found in dir [" + directory + "]"); } finally { if (!success) { IOUtils.closeWhileHandlingException(input); } else { input.close(); } } }