Java Code Examples for org.apache.lucene.store.IOContext#READ
The following examples show how to use
org.apache.lucene.store.IOContext#READ .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SegmentDocValues.java From lucene-solr with Apache License 2.0 | 6 votes |
private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, Directory dir, final Long gen, FieldInfos infos) throws IOException { Directory dvDir = dir; String segmentSuffix = ""; if (gen.longValue() != -1) { dvDir = si.info.dir; // gen'd files are written outside CFS, so use SegInfo directory segmentSuffix = Long.toString(gen.longValue(), Character.MAX_RADIX); } // set SegmentReadState to list only the fields that are relevant to that gen SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, IOContext.READ, segmentSuffix); DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat(); return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) { @SuppressWarnings("synthetic-access") @Override protected void release() throws IOException { object.close(); synchronized (SegmentDocValues.this) { genDVProducers.remove(gen); } } }; }
Example 2
Source File: TestSegmentReader.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public void setUp() throws Exception { super.setUp(); dir = newDirectory(); DocHelper.setupDoc(testDoc); SegmentCommitInfo info = DocHelper.writeDoc(random(), dir, testDoc); reader = new SegmentReader(info, Version.LATEST.major, IOContext.READ); }
Example 3
Source File: GenericRecordReader.java From incubator-retired-blur with Apache License 2.0 | 5 votes |
public void initialize(BlurInputSplit blurInputSplit, Configuration configuration) throws IOException { if (_setup) { return; } _setup = true; _table = blurInputSplit.getTable(); Path localCachePath = BlurInputFormat.getLocalCachePath(configuration); LOG.info("Local cache path [{0}]", localCachePath); _directory = BlurInputFormat.getDirectory(configuration, _table.toString(), blurInputSplit.getDir()); SegmentInfoPerCommit commit = segmentInfosRead(_directory, blurInputSplit.getSegmentsName(), blurInputSplit.getSegmentInfoName()); SegmentInfo segmentInfo = commit.info; if (localCachePath != null) { _readingDirectory = copyFilesLocally(configuration, _directory, _table.toString(), blurInputSplit.getDir(), localCachePath, commit.files(), blurInputSplit.getSegmentInfoName()); } else { _readingDirectory = _directory; } Blur024Codec blur024Codec = new Blur024Codec(); IOContext iocontext = IOContext.READ; String segmentName = segmentInfo.name; FieldInfos fieldInfos = blur024Codec.fieldInfosFormat().getFieldInfosReader() .read(_readingDirectory, segmentName, iocontext); if (commit.getDelCount() > 0) { _liveDocs = blur024Codec.liveDocsFormat().readLiveDocs(_readingDirectory, commit, iocontext); } _fieldsReader = blur024Codec.storedFieldsFormat().fieldsReader(_readingDirectory, segmentInfo, fieldInfos, iocontext); _maxDoc = commit.info.getDocCount(); }
Example 4
Source File: DefaultIndexingChain.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public Sorter.DocMap flush(SegmentWriteState state) throws IOException { // NOTE: caller (DocumentsWriterPerThread) handles // aborting on any exception from this method Sorter.DocMap sortMap = maybeSortSegment(state); int maxDoc = state.segmentInfo.maxDoc(); long t0 = System.nanoTime(); writeNorms(state, sortMap); if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write norms"); } SegmentReadState readState = new SegmentReadState(state.directory, state.segmentInfo, state.fieldInfos, IOContext.READ, state.segmentSuffix); t0 = System.nanoTime(); writeDocValues(state, sortMap); if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write docValues"); } t0 = System.nanoTime(); writePoints(state, sortMap); if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write points"); } // it's possible all docs hit non-aborting exceptions... t0 = System.nanoTime(); storedFieldsConsumer.finish(maxDoc); storedFieldsConsumer.flush(state, sortMap); if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to finish stored fields"); } t0 = System.nanoTime(); Map<String,TermsHashPerField> fieldsToFlush = new HashMap<>(); for (int i=0;i<fieldHash.length;i++) { PerField perField = fieldHash[i]; while (perField != null) { if (perField.invertState != null) { fieldsToFlush.put(perField.fieldInfo.name, perField.termsHashPerField); } perField = perField.next; } } try (NormsProducer norms = readState.fieldInfos.hasNorms() ? state.segmentInfo.getCodec().normsFormat().normsProducer(readState) : null) { NormsProducer normsMergeInstance = null; if (norms != null) { // Use the merge instance in order to reuse the same IndexInput for all terms normsMergeInstance = norms.getMergeInstance(); } termsHash.flush(fieldsToFlush, state, sortMap, normsMergeInstance); } if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write postings and finish vectors"); } // Important to save after asking consumer to flush so // consumer can alter the FieldInfo* if necessary. EG, // FreqProxTermsWriter does this with // FieldInfo.storePayload. t0 = System.nanoTime(); docWriter.codec.fieldInfosFormat().write(state.directory, state.segmentInfo, "", state.fieldInfos, IOContext.DEFAULT); if (infoStream.isEnabled("IW")) { infoStream.message("IW", ((System.nanoTime()-t0)/1000000) + " msec to write fieldInfos"); } return sortMap; }