Java Code Examples for org.apache.lucene.util.packed.PackedInts#COMPACT
The following examples show how to use
org.apache.lucene.util.packed.PackedInts#COMPACT .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NodeHash.java From lucene-solr with Apache License 2.0 | 5 votes |
private void rehash() throws IOException { final PagedGrowableWriter oldTable = table; table = new PagedGrowableWriter(2*oldTable.size(), 1<<30, PackedInts.bitsRequired(count), PackedInts.COMPACT); mask = table.size()-1; for(long idx=0;idx<oldTable.size();idx++) { final long address = oldTable.get(idx); if (address != 0) { addNew(address); } } }
Example 2
Source File: NodeHash.java From lucene-solr with Apache License 2.0 | 4 votes |
public NodeHash(FST<T> fst, FST.BytesReader in) { table = new PagedGrowableWriter(16, 1<<27, 8, PackedInts.COMPACT); mask = 15; this.fst = fst; this.in = in; }
Example 3
Source File: Lucene50PostingsWriter.java From lucene-solr with Apache License 2.0 | 4 votes |
/** Creates a postings writer */ public Lucene50PostingsWriter(SegmentWriteState state) throws IOException { final float acceptableOverheadRatio = PackedInts.COMPACT; String docFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.DOC_EXTENSION); docOut = state.directory.createOutput(docFileName, state.context); IndexOutput posOut = null; IndexOutput payOut = null; boolean success = false; try { CodecUtil.writeIndexHeader(docOut, DOC_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); forUtil = new ForUtil(acceptableOverheadRatio, docOut); if (state.fieldInfos.hasProx()) { posDeltaBuffer = new int[MAX_DATA_SIZE]; String posFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.POS_EXTENSION); posOut = state.directory.createOutput(posFileName, state.context); CodecUtil.writeIndexHeader(posOut, POS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); if (state.fieldInfos.hasPayloads()) { payloadBytes = new byte[128]; payloadLengthBuffer = new int[MAX_DATA_SIZE]; } else { payloadBytes = null; payloadLengthBuffer = null; } if (state.fieldInfos.hasOffsets()) { offsetStartDeltaBuffer = new int[MAX_DATA_SIZE]; offsetLengthBuffer = new int[MAX_DATA_SIZE]; } else { offsetStartDeltaBuffer = null; offsetLengthBuffer = null; } if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) { String payFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.PAY_EXTENSION); payOut = state.directory.createOutput(payFileName, state.context); CodecUtil.writeIndexHeader(payOut, PAY_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); } } else { posDeltaBuffer = null; payloadLengthBuffer = null; offsetStartDeltaBuffer = null; offsetLengthBuffer = null; payloadBytes = null; } this.payOut = payOut; this.posOut = posOut; success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(docOut, posOut, payOut); } } docDeltaBuffer = new int[MAX_DATA_SIZE]; freqBuffer = new int[MAX_DATA_SIZE]; // TODO: should we try skipping every 2/4 blocks...? skipWriter = new Lucene50SkipWriter(MAX_SKIP_LEVELS, BLOCK_SIZE, state.segmentInfo.maxDoc(), docOut, posOut, payOut); encoded = new byte[MAX_ENCODED_SIZE]; }