org.apache.lucene.codecs.CodecUtil Java Examples
The following examples show how to use
org.apache.lucene.codecs.CodecUtil.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestOfflineSorter.java From lucene-solr with Apache License 2.0 | 6 votes |
@Nightly public void testFixedLengthHeap() throws Exception { // Make sure the RAM accounting is correct, i.e. if we are sorting fixed width // ints (4 bytes) then the heap used is really only 4 bytes per value: Directory dir = newDirectory(); IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) { byte[] bytes = new byte[Integer.BYTES]; for (int i=0;i<1024*1024;i++) { random().nextBytes(bytes); w.write(bytes); } CodecUtil.writeFooter(out); } ExecutorService exec = randomExecutorServiceOrNull(); OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Integer.BYTES, exec, TestUtil.nextInt(random(), 1, 4)); sorter.sort(out.getName()); if (exec != null) { exec.shutdownNow(); } // 1 MB of ints with 4 MH heap allowed should have been sorted in a single heap partition: assertEquals(0, sorter.sortInfo.mergeRounds); dir.close(); }
Example #2
Source File: ChecksumBlobStoreFormat.java From Elasticsearch with Apache License 2.0 | 6 votes |
/** * Reads blob with specified name without resolving the blobName using using {@link #blobName} method. * * @param blobContainer blob container * @param blobName blob name */ public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { try (InputStream inputStream = blobContainer.readBlob(blobName)) { byte[] bytes = ByteStreams.toByteArray(inputStream); final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, codec, VERSION, VERSION); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; BytesReference bytesReference = new BytesArray(bytes, (int) filePointer, (int) contentSize); return read(bytesReference); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
Example #3
Source File: RAMOnlyPostingsFormat.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public FieldsProducer fieldsProducer(SegmentReadState readState) throws IOException { // Load our ID: final String idFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, ID_EXTENSION); IndexInput in = readState.directory.openInput(idFileName, readState.context); boolean success = false; final int id; try { CodecUtil.checkHeader(in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST); id = in.readVInt(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(in); } else { IOUtils.close(in); } } synchronized(state) { return state.get(id); } }
Example #4
Source File: Lucene.java From Elasticsearch with Apache License 2.0 | 6 votes |
public static void checkSegmentInfoIntegrity(final Directory directory) throws IOException { new SegmentInfos.FindSegmentsFile(directory) { @Override protected Object doBody(String segmentFileName) throws IOException { try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { final int format = input.readInt(); final int actualFormat; if (format == CodecUtil.CODEC_MAGIC) { // 4.0+ actualFormat = CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, Integer.MAX_VALUE); if (actualFormat >= SegmentInfos.VERSION_48) { CodecUtil.checksumEntireFile(input); } } // legacy.... } return null; } }.run(); }
Example #5
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testDoubleClose() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT)) { CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix"); out.writeInt(3); CodecUtil.writeFooter(out); } si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); assertEquals(1, cfs.listAll().length); cfs.close(); cfs.close(); // second close should not throw exception dir.close(); }
Example #6
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testPassIOContext() throws IOException { final String testfile = "_123.test"; final IOContext myContext = new IOContext(); Directory dir = new FilterDirectory(newDirectory()) { @Override public IndexOutput createOutput(String name, IOContext context) throws IOException { assertSame(myContext, context); return super.createOutput(name, context); } }; SegmentInfo si = newSegmentInfo(dir, "_123"); try (IndexOutput out = dir.createOutput(testfile, myContext)) { CodecUtil.writeIndexHeader(out, "Foo", 0, si.getId(), "suffix"); out.writeInt(3); CodecUtil.writeFooter(out); } si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, myContext); dir.close(); }
Example #7
Source File: Store.java From Elasticsearch with Apache License 2.0 | 6 votes |
private static void checksumFromLuceneFile(Directory directory, String file, ImmutableMap.Builder<String, StoreFileMetaData> builder, ESLogger logger, Version version, boolean readFileAsHash) throws IOException { final String checksum; final BytesRefBuilder fileHash = new BytesRefBuilder(); try (final IndexInput in = directory.openInput(file, IOContext.READONCE)) { final long length; try { length = in.length(); if (length < CodecUtil.footerLength()) { // truncated files trigger IAE if we seek negative... these files are really corrupted though throw new CorruptIndexException("Can't retrieve checksum from file: " + file + " file length must be >= " + CodecUtil.footerLength() + " but was: " + in.length(), in); } if (readFileAsHash) { final VerifyingIndexInput verifyingIndexInput = new VerifyingIndexInput(in); // additional safety we checksum the entire file we read the hash for... hashFile(fileHash, new InputStreamIndexInput(verifyingIndexInput, length), length); checksum = digestToString(verifyingIndexInput.verify()); } else { checksum = digestToString(CodecUtil.retrieveChecksum(in)); } } catch (Throwable ex) { logger.debug("Can retrieve checksum from file [{}]", ex, file); throw ex; } builder.put(file, new StoreFileMetaData(file, length, checksum, version, fileHash.get())); } }
Example #8
Source File: TranslogWriter.java From Elasticsearch with Apache License 2.0 | 6 votes |
public static TranslogWriter create(Type type, ShardId shardId, String translogUUID, long fileGeneration, Path file, Callback<ChannelReference> onClose, int bufferSize, ChannelFactory channelFactory) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = getHeaderLength(ref.length); final FileChannel channel = channelFactory.open(file); try { // This OutputStreamDataOutput is intentionally not closed because // closing it will close the FileChannel final OutputStreamDataOutput out = new OutputStreamDataOutput(java.nio.channels.Channels.newOutputStream(channel)); CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION); out.writeInt(ref.length); out.writeBytes(ref.bytes, ref.offset, ref.length); channel.force(true); writeCheckpoint(headerLength, 0, file.getParent(), fileGeneration, StandardOpenOption.WRITE); final TranslogWriter writer = type.create(shardId, fileGeneration, new ChannelReference(file, fileGeneration, channel, onClose), bufferSize); return writer; } catch (Throwable throwable){ // if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that // file exists we remove it. We only apply this logic to the checkpoint.generation+1 any other file with a higher generation is an error condition IOUtils.closeWhileHandlingException(channel); throw throwable; } }
Example #9
Source File: Completion090PostingsFormat.java From Elasticsearch with Apache License 2.0 | 6 votes |
public CompletionFieldsConsumer(SegmentWriteState state) throws IOException { this.delegatesFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); IndexOutput output = null; boolean success = false; try { output = state.directory.createOutput(suggestFSTFile, state.context); CodecUtil.writeHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT); /* * we write the delegate postings format name so we can load it * without getting an instance in the ctor */ output.writeString(delegatePostingsFormat.getName()); output.writeString(writeProvider.getName()); this.suggestFieldsConsumer = writeProvider.consumer(output); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); } } }
Example #10
Source File: Lucene84PostingsWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { // TODO: add a finish() at least to PushBase? DV too...? boolean success = false; try { if (docOut != null) { CodecUtil.writeFooter(docOut); } if (posOut != null) { CodecUtil.writeFooter(posOut); } if (payOut != null) { CodecUtil.writeFooter(payOut); } success = true; } finally { if (success) { IOUtils.close(docOut, posOut, payOut); } else { IOUtils.closeWhileHandlingException(docOut, posOut, payOut); } docOut = posOut = payOut = null; } }
Example #11
Source File: ConnectionCostsWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void write(Path baseDir) throws IOException { Files.createDirectories(baseDir); String fileName = ConnectionCosts.class.getName().replace('.', '/') + ConnectionCosts.FILENAME_SUFFIX; try (OutputStream os = Files.newOutputStream(baseDir.resolve(fileName)); OutputStream bos = new BufferedOutputStream(os)) { final DataOutput out = new OutputStreamDataOutput(bos); CodecUtil.writeHeader(out, ConnectionCosts.HEADER, ConnectionCosts.VERSION); out.writeVInt(forwardSize); out.writeVInt(backwardSize); int last = 0; for (int i = 0; i < costs.limit() / 2; i++) { short cost = costs.getShort(i * 2); int delta = (int) cost - last; out.writeZInt(delta); last = cost; } } }
Example #12
Source File: Lucene80DocValuesConsumer.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { boolean success = false; try { if (meta != null) { meta.writeInt(-1); // write EOF marker CodecUtil.writeFooter(meta); // write checksum } if (data != null) { CodecUtil.writeFooter(data); // write checksum } success = true; } finally { if (success) { IOUtils.close(data, meta); } else { IOUtils.closeWhileHandlingException(data, meta); } meta = data = null; } }
Example #13
Source File: Store.java From crate with Apache License 2.0 | 6 votes |
/** * Marks this store as corrupted. This method writes a {@code corrupted_${uuid}} file containing the given exception * message. If a store contains a {@code corrupted_${uuid}} file {@link #isMarkedCorrupted()} will return <code>true</code>. */ public void markStoreCorrupted(IOException exception) throws IOException { ensureOpen(); if (!isMarkedCorrupted()) { String uuid = CORRUPTED + UUIDs.randomBase64UUID(); try (IndexOutput output = this.directory().createOutput(uuid, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, CODEC, VERSION); BytesStreamOutput out = new BytesStreamOutput(); out.writeException(exception); BytesReference bytes = out.bytes(); output.writeVInt(bytes.length()); BytesRef ref = bytes.toBytesRef(); output.writeBytes(ref.bytes, ref.offset, ref.length); CodecUtil.writeFooter(output); } catch (IOException ex) { logger.warn("Can't mark store as corrupted", ex); } directory().sync(Collections.singleton(uuid)); } }
Example #14
Source File: ChecksumBlobStoreFormat.java From Elasticsearch with Apache License 2.0 | 6 votes |
/** * Writes blob in atomic manner without resolving the blobName using using {@link #blobName} method. * <p> * The blob will be compressed and checksum will be written if required. * * @param obj object to be serialized * @param blobContainer blob container * @param blobName blob name */ protected void writeBlob(T obj, BlobContainer blobContainer, String blobName) throws IOException { BytesReference bytes = write(obj); try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { final String resourceDesc = "ChecksumBlobStoreFormat.writeBlob(blob=\"" + blobName + "\")"; try (OutputStreamIndexOutput indexOutput = new OutputStreamIndexOutput(resourceDesc, byteArrayOutputStream, BUFFER_SIZE)) { CodecUtil.writeHeader(indexOutput, codec, VERSION); try (OutputStream indexOutputOutputStream = new IndexOutputOutputStream(indexOutput) { @Override public void close() throws IOException { // this is important since some of the XContentBuilders write bytes on close. // in order to write the footer we need to prevent closing the actual index input. } }) { bytes.writeTo(indexOutputOutputStream); } CodecUtil.writeFooter(indexOutput); } blobContainer.writeBlob(blobName, new BytesArray(byteArrayOutputStream.toByteArray())); } }
Example #15
Source File: FreeTextSuggester.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public boolean load(DataInput input) throws IOException { CodecUtil.checkHeader(input, CODEC_NAME, VERSION_START, VERSION_START); count = input.readVLong(); byte separatorOrig = input.readByte(); if (separatorOrig != separator) { throw new IllegalStateException("separator=" + separator + " is incorrect: original model was built with separator=" + separatorOrig); } int gramsOrig = input.readVInt(); if (gramsOrig != grams) { throw new IllegalStateException("grams=" + grams + " is incorrect: original model was built with grams=" + gramsOrig); } totTokens = input.readVLong(); fst = new FST<>(input, input, PositiveIntOutputs.getSingleton()); return true; }
Example #16
Source File: TestOfflineSorter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testFixedLengthLiesLiesLies() throws Exception { // Make sure OfflineSorter catches me if I lie about the fixed value length: Directory dir = newDirectory(); IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT); try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) { byte[] bytes = new byte[Integer.BYTES]; random().nextBytes(bytes); w.write(bytes); CodecUtil.writeFooter(out); } OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Long.BYTES, null, 0); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { sorter.sort(out.getName()); }); assertEquals("value length is 4 but is supposed to always be 8", e.getMessage()); dir.close(); }
Example #17
Source File: CompletionFieldsConsumer.java From lucene-solr with Apache License 2.0 | 6 votes |
CompletionFieldsConsumer(String codecName, PostingsFormat delegatePostingsFormat, SegmentWriteState state) throws IOException { this.codecName = codecName; this.delegatePostingsFormatName = delegatePostingsFormat.getName(); this.state = state; String dictFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, DICT_EXTENSION); boolean success = false; try { this.delegateFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); dictOut = state.directory.createOutput(dictFile, state.context); CodecUtil.writeIndexHeader(dictOut, codecName, COMPLETION_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(dictOut, delegateFieldsConsumer); } } }
Example #18
Source File: Lucene80NormsConsumer.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public void close() throws IOException { boolean success = false; try { if (meta != null) { meta.writeInt(-1); // write EOF marker CodecUtil.writeFooter(meta); // write checksum } if (data != null) { CodecUtil.writeFooter(data); // write checksum } success = true; } finally { if (success) { IOUtils.close(data, meta); } else { IOUtils.closeWhileHandlingException(data, meta); } meta = data = null; } }
Example #19
Source File: Lucene50LiveDocsFormat.java From lucene-solr with Apache License 2.0 | 6 votes |
@Override public void writeLiveDocs(Bits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException { long gen = info.getNextDelGen(); String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen); int delCount = 0; try (IndexOutput output = dir.createOutput(name, context)) { CodecUtil.writeIndexHeader(output, CODEC_NAME, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX)); final int longCount = FixedBitSet.bits2words(bits.length()); for (int i = 0; i < longCount; ++i) { long currentBits = 0; for (int j = i << 6, end = Math.min(j + 63, bits.length() - 1); j <= end; ++j) { if (bits.get(j)) { currentBits |= 1L << j; // mod 64 } else { delCount += 1; } } output.writeLong(currentBits); } CodecUtil.writeFooter(output); } if (delCount != info.getDelCount() + newDelCount) { throw new CorruptIndexException("bits.deleted=" + delCount + " info.delcount=" + info.getDelCount() + " newdelcount=" + newDelCount, name); } }
Example #20
Source File: BlockTermsWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter, SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException { final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); this.termsIndexWriter = termsIndexWriter; maxDoc = state.segmentInfo.maxDoc(); out = state.directory.createOutput(termsFileName, state.context); boolean success = false; try { fieldInfos = state.fieldInfos; CodecUtil.writeIndexHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); currentField = null; this.postingsWriter = postingsWriter; // segment = state.segmentName; //System.out.println("BTW.init seg=" + state.segmentName); postingsWriter.init(out, state); // have consumer write its format/header success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); } } }
Example #21
Source File: BinaryDictionaryWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
private void writePosDict(Path path) throws IOException { Files.createDirectories(path.getParent()); try (OutputStream os = Files.newOutputStream(path); OutputStream bos = new BufferedOutputStream(os)) { final DataOutput out = new OutputStreamDataOutput(bos); CodecUtil.writeHeader(out, BinaryDictionary.POSDICT_HEADER, BinaryDictionary.VERSION); out.writeVInt(posDict.size()); for (String s : posDict) { if (s == null) { out.writeByte((byte)0); out.writeByte((byte)0); out.writeByte((byte)0); } else { String[] data = CSVUtil.parse(s); if (data.length != 3) { throw new IllegalArgumentException("Malformed pos/inflection: " + s + "; expected 3 characters"); } out.writeString(data[0]); out.writeString(data[1]); out.writeString(data[2]); } } } }
Example #22
Source File: CharacterDefinitionWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void write(Path baseDir) throws IOException { Path path = baseDir.resolve(CharacterDefinition.class.getName().replace('.', '/') + CharacterDefinition.FILENAME_SUFFIX); Files.createDirectories(path.getParent()); try (OutputStream os = new BufferedOutputStream(Files.newOutputStream(path))){ final DataOutput out = new OutputStreamDataOutput(os); CodecUtil.writeHeader(out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); out.writeBytes(characterCategoryMap, 0, characterCategoryMap.length); for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) { final byte b = (byte) ( (invokeMap[i] ? 0x01 : 0x00) | (groupMap[i] ? 0x02 : 0x00) ); out.writeByte(b); } } }
Example #23
Source File: CharacterDefinitionWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void write(Path baseDir) throws IOException { Path path = baseDir.resolve(CharacterDefinition.class.getName().replace('.', '/') + CharacterDefinition.FILENAME_SUFFIX); Files.createDirectories(path.getParent()); try (OutputStream os = new BufferedOutputStream(Files.newOutputStream(path))){ final DataOutput out = new OutputStreamDataOutput(os); CodecUtil.writeHeader(out, CharacterDefinition.HEADER, CharacterDefinition.VERSION); out.writeBytes(characterCategoryMap, 0, characterCategoryMap.length); for (int i = 0; i < CharacterDefinition.CLASS_COUNT; i++) { final byte b = (byte) ( (invokeMap[i] ? 0x01 : 0x00) | (groupMap[i] ? 0x02 : 0x00) ); out.writeByte(b); } } }
Example #24
Source File: Lucene50PostingsReader.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public void checkIntegrity() throws IOException { if (docIn != null) { CodecUtil.checksumEntireFile(docIn); } if (posIn != null) { CodecUtil.checksumEntireFile(posIn); } if (payIn != null) { CodecUtil.checksumEntireFile(payIn); } }
Example #25
Source File: OfflinePointWriter.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public void close() throws IOException { if (closed == false) { try { CodecUtil.writeFooter(out); } finally { out.close(); closed = true; } } }
Example #26
Source File: CompletionFieldsProducer.java From lucene-solr with Apache License 2.0 | 5 votes |
CompletionFieldsProducer(String codecName, SegmentReadState state, FSTLoadMode fstLoadMode) throws IOException { String indexFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, INDEX_EXTENSION); delegateFieldsProducer = null; boolean success = false; try (ChecksumIndexInput index = state.directory.openChecksumInput(indexFile, state.context)) { // open up dict file containing all fsts String dictFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, DICT_EXTENSION); dictIn = state.directory.openInput(dictFile, state.context); CodecUtil.checkIndexHeader(dictIn, codecName, COMPLETION_CODEC_VERSION, COMPLETION_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); // just validate the footer for the dictIn CodecUtil.retrieveChecksum(dictIn); // open up index file (fieldNumber, offset) CodecUtil.checkIndexHeader(index, codecName, COMPLETION_CODEC_VERSION, COMPLETION_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); // load delegate PF PostingsFormat delegatePostingsFormat = PostingsFormat.forName(index.readString()); delegateFieldsProducer = delegatePostingsFormat.fieldsProducer(state); // read suggest field numbers and their offsets in the terms file from index int numFields = index.readVInt(); readers = new HashMap<>(numFields); for (int i = 0; i < numFields; i++) { int fieldNumber = index.readVInt(); long offset = index.readVLong(); long minWeight = index.readVLong(); long maxWeight = index.readVLong(); byte type = index.readByte(); FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNumber); // we don't load the FST yet readers.put(fieldInfo.name, new CompletionsTermsReader(dictIn, offset, minWeight, maxWeight, type, fstLoadMode)); } CodecUtil.checkFooter(index); success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(delegateFieldsProducer, dictIn); } } }
Example #27
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
/** Creates a file of the specified size with sequential data. The first * byte is written as the start byte provided. All subsequent bytes are * computed as start + offset where offset is the number of the byte. */ protected static void createSequenceFile(Directory dir, String name, byte start, int size, byte[] segID, String segSuffix) throws IOException { try (IndexOutput os = dir.createOutput(name, newIOContext(random()))) { CodecUtil.writeIndexHeader(os, "Foo", 0, segID, segSuffix); for (int i=0; i < size; i++) { os.writeByte(start); start ++; } CodecUtil.writeFooter(os); } }
Example #28
Source File: Lucene50PostingsReader.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public void init(IndexInput termsIn, SegmentReadState state) throws IOException { // Make sure we are talking to the matching postings writer CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); final int indexBlockSize = termsIn.readVInt(); if (indexBlockSize != BLOCK_SIZE) { throw new IllegalStateException("index-time BLOCK_SIZE (" + indexBlockSize + ") != read-time BLOCK_SIZE (" + BLOCK_SIZE + ")"); } }
Example #29
Source File: Lucene50CompoundReader.java From lucene-solr with Apache License 2.0 | 5 votes |
/** Helper method that reads CFS entries from an input stream */ private Map<String, FileEntry> readEntries(byte[] segmentID, Directory dir, String entriesFileName) throws IOException { Map<String,FileEntry> mapping = null; try (ChecksumIndexInput entriesStream = dir.openChecksumInput(entriesFileName, IOContext.READONCE)) { Throwable priorE = null; try { version = CodecUtil.checkIndexHeader(entriesStream, Lucene50CompoundFormat.ENTRY_CODEC, Lucene50CompoundFormat.VERSION_START, Lucene50CompoundFormat.VERSION_CURRENT, segmentID, ""); final int numEntries = entriesStream.readVInt(); mapping = new HashMap<>(numEntries); for (int i = 0; i < numEntries; i++) { final FileEntry fileEntry = new FileEntry(); final String id = entriesStream.readString(); FileEntry previous = mapping.put(id, fileEntry); if (previous != null) { throw new CorruptIndexException("Duplicate cfs entry id=" + id + " in CFS ", entriesStream); } fileEntry.offset = entriesStream.readLong(); fileEntry.length = entriesStream.readLong(); } } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(entriesStream, priorE); } } return Collections.unmodifiableMap(mapping); }
Example #30
Source File: FieldsIndexReader.java From lucene-solr with Apache License 2.0 | 5 votes |
FieldsIndexReader(Directory dir, String name, String suffix, String extensionPrefix, String codecName, byte[] id) throws IOException { try (ChecksumIndexInput metaIn = dir.openChecksumInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_META_EXTENSION_SUFFIX), IOContext.READONCE)) { Throwable priorE = null; try { CodecUtil.checkIndexHeader(metaIn, codecName + "Meta", VERSION_START, VERSION_CURRENT, id, suffix); maxDoc = metaIn.readInt(); blockShift = metaIn.readInt(); numChunks = metaIn.readInt(); docsStartPointer = metaIn.readLong(); docsMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift); docsEndPointer = startPointersStartPointer = metaIn.readLong(); startPointersMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift); startPointersEndPointer = metaIn.readLong(); maxPointer = metaIn.readLong(); } finally { CodecUtil.checkFooter(metaIn, priorE); } } indexInput = dir.openInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_INDEX_EXTENSION_SUFFIX), IOContext.READ); boolean success = false; try { CodecUtil.checkIndexHeader(indexInput, codecName + "Idx", VERSION_START, VERSION_CURRENT, id, suffix); CodecUtil.retrieveChecksum(indexInput); success = true; } finally { if (success == false) { indexInput.close(); } } final RandomAccessInput docsSlice = indexInput.randomAccessSlice(docsStartPointer, docsEndPointer - docsStartPointer); final RandomAccessInput startPointersSlice = indexInput.randomAccessSlice(startPointersStartPointer, startPointersEndPointer - startPointersStartPointer); docs = DirectMonotonicReader.getInstance(docsMeta, docsSlice); startPointers = DirectMonotonicReader.getInstance(startPointersMeta, startPointersSlice); }