Java Code Examples for org.apache.lucene.util.Version#LATEST
The following examples show how to use
org.apache.lucene.util.Version#LATEST .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SolrConfig.java From lucene-solr with Apache License 2.0 | 6 votes |
public static final Version parseLuceneVersionString(final String matchVersion) { final Version version; try { version = Version.parseLeniently(matchVersion); } catch (ParseException pe) { throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid luceneMatchVersion. Should be of the form 'V.V.V' (e.g. 4.8.0)", pe); } if (version == Version.LATEST && !versionWarningAlreadyLogged.getAndSet(true)) { log.warn("You should not use LATEST as luceneMatchVersion property: " + "if you use this setting, and then Solr upgrades to a newer release of Lucene, " + "sizable changes may happen. If precise back compatibility is important " + "then you should instead explicitly specify an actual Lucene version."); } return version; }
Example 2
Source File: TestPendingDeletes.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testIsFullyDeleted() throws IOException { Directory dir = new ByteBuffersDirectory(); SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 3, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1, StringHelper.randomId()); FieldInfos fieldInfos = FieldInfos.EMPTY; si.getCodec().fieldInfosFormat().write(dir, si, "", fieldInfos, IOContext.DEFAULT); PendingDeletes deletes = newPendingDeletes(commitInfo); for (int i = 0; i < 3; i++) { assertTrue(deletes.delete(i)); if (random().nextBoolean()) { assertTrue(deletes.writeLiveDocs(dir)); } assertEquals(i == 2, deletes.isFullyDeleted(() -> null)); } }
Example 3
Source File: SlowCompositeReaderWrapper.java From lucene-solr with Apache License 2.0 | 6 votes |
SlowCompositeReaderWrapper(CompositeReader reader) throws IOException { in = reader; in.registerParentReader(this); if (reader.leaves().isEmpty()) { metaData = new LeafMetaData(Version.LATEST.major, Version.LATEST, null); } else { Version minVersion = Version.LATEST; for (LeafReaderContext leafReaderContext : reader.leaves()) { Version leafVersion = leafReaderContext.reader().getMetaData().getMinVersion(); if (leafVersion == null) { minVersion = null; break; } else if (minVersion.onOrAfter(leafVersion)) { minVersion = leafVersion; } } metaData = new LeafMetaData(reader.leaves().get(0).reader().getMetaData().getCreatedVersionMajor(), minVersion, null); } fieldInfos = FieldInfos.getMergedFieldInfos(in); }
Example 4
Source File: AbstractAnalysisFactory.java From lucene-solr with Apache License 2.0 | 6 votes |
/** * Initialize this factory via a set of key-value pairs. */ protected AbstractAnalysisFactory(Map<String,String> args) { originalArgs = Map.copyOf(args); String version = get(args, LUCENE_MATCH_VERSION_PARAM); if (version == null) { luceneMatchVersion = Version.LATEST; } else { try { luceneMatchVersion = Version.parseLeniently(version); } catch (ParseException pe) { throw new IllegalArgumentException(pe); } } args.remove(CLASS_NAME); // consume the class arg args.remove(SPI_NAME); // consume the spi arg }
Example 5
Source File: BaseMergePolicyTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
/** * Make a new {@link SegmentCommitInfo} with the given {@code maxDoc}, * {@code numDeletedDocs} and {@code sizeInBytes}, which are usually the * numbers that merge policies care about. */ protected static SegmentCommitInfo makeSegmentCommitInfo(String name, int maxDoc, int numDeletedDocs, double sizeMB, String source) { if (name.startsWith("_") == false) { throw new IllegalArgumentException("name must start with an _, got " + name); } byte[] id = new byte[StringHelper.ID_LENGTH]; random().nextBytes(id); SegmentInfo info = new SegmentInfo(FAKE_DIRECTORY, Version.LATEST, Version.LATEST, name, maxDoc, false, TestUtil.getDefaultCodec(), Collections.emptyMap(), id, Collections.singletonMap(IndexWriter.SOURCE, source), null); info.setFiles(Collections.singleton(name + "_size=" + Long.toString((long) (sizeMB * 1024 * 1024)) + ".fake")); return new SegmentCommitInfo(info, numDeletedDocs, 0, 0, 0, 0, StringHelper.randomId()); }
Example 6
Source File: TestPendingDeletes.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testDeleteDoc() throws IOException { Directory dir = new ByteBuffersDirectory(); SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 10, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1, StringHelper.randomId()); PendingDeletes deletes = newPendingDeletes(commitInfo); assertNull(deletes.getLiveDocs()); int docToDelete = TestUtil.nextInt(random(), 0, 7); assertTrue(deletes.delete(docToDelete)); assertNotNull(deletes.getLiveDocs()); assertEquals(1, deletes.numPendingDeletes()); Bits liveDocs = deletes.getLiveDocs(); assertFalse(liveDocs.get(docToDelete)); assertFalse(deletes.delete(docToDelete)); // delete again assertTrue(liveDocs.get(8)); assertTrue(deletes.delete(8)); assertTrue(liveDocs.get(8)); // we have a snapshot assertEquals(2, deletes.numPendingDeletes()); assertTrue(liveDocs.get(9)); assertTrue(deletes.delete(9)); assertTrue(liveDocs.get(9)); // now make sure new live docs see the deletions liveDocs = deletes.getLiveDocs(); assertFalse(liveDocs.get(9)); assertFalse(liveDocs.get(8)); assertFalse(liveDocs.get(docToDelete)); assertEquals(3, deletes.numPendingDeletes()); dir.close(); }
Example 7
Source File: TestDoc.java From lucene-solr with Apache License 2.0 | 5 votes |
private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile) throws Exception { IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1))); SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, context); SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, context); final Codec codec = Codec.getDefault(); TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir); final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, null, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2), si, InfoStream.getDefault(), trackingDir, new FieldInfos.FieldNumbers(null), context); MergeState mergeState = merger.merge(); r1.close(); r2.close();; si.setFiles(new HashSet<>(trackingDir.getCreatedFiles())); if (useCompoundFile) { Collection<String> filesToDelete = si.files(); codec.compoundFormat().write(dir, si, context); si.setUseCompoundFile(true); for(String name : filesToDelete) { si1.info.dir.deleteFile(name); } } return new SegmentCommitInfo(si, 0, 0, -1L, -1L, -1L, StringHelper.randomId()); }
Example 8
Source File: BaseMergePolicyTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testFindForcedDeletesMerges() throws IOException { MergePolicy mp = mergePolicy(); if (mp instanceof FilterMergePolicy) { assumeFalse("test doesn't work with MockRandomMP", ((FilterMergePolicy) mp).in instanceof MockRandomMergePolicy); } SegmentInfos infos = new SegmentInfos(Version.LATEST.major); try (Directory directory = newDirectory()) { MergePolicy.MergeContext context = new MockMergeContext(s -> 0); int numSegs = random().nextInt(10); for (int i = 0; i < numSegs; i++) { SegmentInfo info = new SegmentInfo( directory, // dir Version.LATEST, // version Version.LATEST, // min version TestUtil.randomSimpleString(random()), // name random().nextInt(Integer.MAX_VALUE), // maxDoc random().nextBoolean(), // isCompoundFile null, // codec Collections.emptyMap(), // diagnostics TestUtil.randomSimpleString(// id random(), StringHelper.ID_LENGTH, StringHelper.ID_LENGTH).getBytes(StandardCharsets.US_ASCII), Collections.emptyMap(), // attributes null /* indexSort */); info.setFiles(Collections.emptyList()); infos.add(new SegmentCommitInfo(info, random().nextInt(1), 0, -1, -1, -1, StringHelper.randomId())); } MergePolicy.MergeSpecification forcedDeletesMerges = mp.findForcedDeletesMerges(infos, context); if (forcedDeletesMerges != null) { assertEquals(0, forcedDeletesMerges.merges.size()); } } }
Example 9
Source File: LindenConfig.java From linden with Apache License 2.0 | 5 votes |
public IndexWriterConfig createIndexWriterConfig() throws IOException { IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Version.LATEST, getIndexAnalyzerInstance()); indexWriterConfig.setRAMBufferSizeMB(48); MergePolicy mergePolicy = getPluginManager().getInstance(LindenConfigBuilder.MERGE_POLICY, MergePolicy.class); if (mergePolicy != null) { indexWriterConfig.setMergePolicy(mergePolicy); } LOGGER.info("Merge policy : {}", mergePolicy == null ? "Default" : mergePolicy); ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler(); cms.setMaxMergesAndThreads(8, 1); indexWriterConfig.setMergeScheduler(cms); return indexWriterConfig; }
Example 10
Source File: Solr4QueryParserTest.java From SearchServices with GNU Lesser General Public License v3.0 | 5 votes |
@Before public void setUp() throws Exception { SolrQueryRequest req = Mockito.mock(SolrQueryRequest.class); parser = new Solr4QueryParser(req, Version.LATEST, "TEXT", null, FTSQueryParser.RerankPhase.SINGLE_PASS); parser.setSearchParameters(searchParameters); }
Example 11
Source File: TestSimpleTextSegmentInfoFormat.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override protected Version[] getVersions() { return new Version[] { Version.LATEST }; }
Example 12
Source File: TestSegmentMerger.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testMerge() throws IOException { final Codec codec = Codec.getDefault(); final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, null, mergedSegment, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(reader1, reader2), si, InfoStream.getDefault(), mergedDir, new FieldInfos.FieldNumbers(null), newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)))); MergeState mergeState = merger.merge(); int docsMerged = mergeState.segmentInfo.maxDoc(); assertTrue(docsMerged == 2); //Should be able to open a new SegmentReader against the new directory SegmentReader mergedReader = new SegmentReader(new SegmentCommitInfo( mergeState.segmentInfo, 0, 0, -1L, -1L, -1L, StringHelper.randomId()), Version.LATEST.major, newIOContext(random())); assertTrue(mergedReader != null); assertTrue(mergedReader.numDocs() == 2); Document newDoc1 = mergedReader.document(0); assertTrue(newDoc1 != null); //There are 2 unstored fields on the document assertTrue(DocHelper.numFields(newDoc1) == DocHelper.numFields(doc1) - DocHelper.unstored.size()); Document newDoc2 = mergedReader.document(1); assertTrue(newDoc2 != null); assertTrue(DocHelper.numFields(newDoc2) == DocHelper.numFields(doc2) - DocHelper.unstored.size()); PostingsEnum termDocs = TestUtil.docs(random(), mergedReader, DocHelper.TEXT_FIELD_2_KEY, new BytesRef("field"), null, 0); assertTrue(termDocs != null); assertTrue(termDocs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); int tvCount = 0; for(FieldInfo fieldInfo : mergedReader.getFieldInfos()) { if (fieldInfo.hasVectors()) { tvCount++; } } //System.out.println("stored size: " + stored.size()); assertEquals("We do not have 3 fields that were indexed with term vector", 3, tvCount); Terms vector = mergedReader.getTermVectors(0).terms(DocHelper.TEXT_FIELD_2_KEY); assertNotNull(vector); assertEquals(3, vector.size()); TermsEnum termsEnum = vector.iterator(); int i = 0; while (termsEnum.next() != null) { String term = termsEnum.term().utf8ToString(); int freq = (int) termsEnum.totalTermFreq(); //System.out.println("Term: " + term + " Freq: " + freq); assertTrue(DocHelper.FIELD_2_TEXT.indexOf(term) != -1); assertTrue(DocHelper.FIELD_2_FREQS[i] == freq); i++; } TestSegmentReader.checkNorms(mergedReader); mergedReader.close(); }
Example 13
Source File: IntermediateForm.java From linden with Apache License 2.0 | 4 votes |
private void createWriter() throws IOException { IndexWriterConfig config = new IndexWriterConfig(Version.LATEST, null); config.setIndexDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); writer = new IndexWriter(dir, config); taxoWriter = new DirectoryTaxonomyWriter(taxoDir); }
Example 14
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 4 votes |
/** Returns a new fake segment */ protected static SegmentInfo newSegmentInfo(Directory dir, String name) { Version minVersion = random().nextBoolean() ? null : Version.LATEST; return new SegmentInfo(dir, Version.LATEST, minVersion, name, 10000, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), Collections.emptyMap(), null); }
Example 15
Source File: TestLucene86SegmentInfoFormat.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override protected Version[] getVersions() { return new Version[] { Version.LATEST }; }
Example 16
Source File: TestSegmentInfos.java From lucene-solr with Apache License 2.0 | 4 votes |
/** Test toString method */ public void testToString() throws Throwable{ SegmentInfo si; final Directory dir = newDirectory(); Codec codec = Codec.getDefault(); // diagnostics map Map<String, String> diagnostics = Map.of("key1", "value1", "key2", "value2"); // attributes map Map<String,String> attributes = Map.of("akey1", "value1", "akey2", "value2"); // diagnostics X, attributes X si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "TEST", 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), Sort.INDEXORDER); assertEquals("TEST(" + Version.LATEST.toString() + ")" + ":C10000" + ":[indexSort=<doc>]", si.toString()); // diagnostics O, attributes X si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "TEST", 10000, false, codec, diagnostics, StringHelper.randomId(), new HashMap<>(), Sort.INDEXORDER); assertEquals("TEST(" + Version.LATEST.toString() + ")" + ":C10000" + ":[indexSort=<doc>]" + ":[diagnostics=" + diagnostics + "]", si.toString()); // diagnostics X, attributes O si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "TEST", 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), attributes, Sort.INDEXORDER); assertEquals("TEST(" + Version.LATEST.toString() + ")" + ":C10000" + ":[indexSort=<doc>]" + ":[attributes=" + attributes + "]", si.toString()); // diagnostics O, attributes O si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "TEST", 10000, false, codec, diagnostics, StringHelper.randomId(), attributes, Sort.INDEXORDER); assertEquals("TEST(" + Version.LATEST.toString() + ")" + ":C10000" + ":[indexSort=<doc>]" + ":[diagnostics=" + diagnostics + "]" + ":[attributes=" + attributes + "]", si.toString()); dir.close(); }
Example 17
Source File: TestSegmentInfos.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testBitFlippedTriggersCorruptIndexException() throws IOException { BaseDirectoryWrapper dir = newDirectory(); dir.setCheckIndexOnClose(false); byte id[] = StringHelper.randomId(); Codec codec = Codec.getDefault(); SegmentInfos sis = new SegmentInfos(Version.LATEST.major); SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(), Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId()); sis.add(commitInfo); info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(), Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null); info.setFiles(Collections.<String>emptySet()); codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT); commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId()); sis.add(commitInfo); sis.commit(dir); BaseDirectoryWrapper corruptDir = newDirectory(); corruptDir.setCheckIndexOnClose(false); boolean corrupt = false; for (String file : dir.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS)) { try (IndexInput in = dir.openInput(file, IOContext.DEFAULT); IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) { final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1); out.copyBytes(in, corruptIndex); final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff); out.writeByte((byte) b); out.copyBytes(in, in.length() - in.getFilePointer()); } try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) { CodecUtil.checksumEntireFile(in); if (VERBOSE) { System.out.println("TEST: Altering the file did not update the checksum, aborting..."); } return; } catch (CorruptIndexException e) { // ok } corrupt = true; } else if (ExtrasFS.isExtra(file) == false) { corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT); } } assertTrue("No segments file found", corrupt); expectThrowsAnyOf( Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class), () -> SegmentInfos.readLatestCommit(corruptDir)); dir.close(); corruptDir.close(); }
Example 18
Source File: ConcatenateTokenFilterFactory.java From elasticsearch-concatenate-token-filter with Apache License 2.0 | 4 votes |
@Override public TokenStream create(TokenStream tokenStream) { return new ConcatenateFilter(Version.LATEST, tokenStream, tokenSeparator, incrementGap); }
Example 19
Source File: TestCodecs.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testFixedPostings() throws Throwable { final int NUM_TERMS = 100; final TermData[] terms = new TermData[NUM_TERMS]; for(int i=0;i<NUM_TERMS;i++) { final int[] docs = new int[] {i}; final String text = Integer.toString(i, Character.MAX_RADIX); terms[i] = new TermData(text, docs, null); } final FieldInfos.Builder builder = new FieldInfos.Builder(new FieldInfos.FieldNumbers(null)); final FieldData field = new FieldData("field", builder, terms, true, false); final FieldData[] fields = new FieldData[] {field}; final FieldInfos fieldInfos = builder.finish(); final Directory dir = newDirectory(); Codec codec = Codec.getDefault(); final SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, SEGMENT, 10000, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); this.write(si, fieldInfos, dir, fields); final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random()))); final Iterator<String> fieldsEnum = reader.iterator(); String fieldName = fieldsEnum.next(); assertNotNull(fieldName); final Terms terms2 = reader.terms(fieldName); assertNotNull(terms2); final TermsEnum termsEnum = terms2.iterator(); PostingsEnum postingsEnum = null; for(int i=0;i<NUM_TERMS;i++) { final BytesRef term = termsEnum.next(); assertNotNull(term); assertEquals(terms[i].text2, term.utf8ToString()); // do this twice to stress test the codec's reuse, ie, // make sure it properly fully resets (rewinds) its // internal state: for(int iter=0;iter<2;iter++) { postingsEnum = TestUtil.docs(random(), termsEnum, postingsEnum, PostingsEnum.NONE); assertEquals(terms[i].docs[0], postingsEnum.nextDoc()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, postingsEnum.nextDoc()); } } assertNull(termsEnum.next()); for(int i=0;i<NUM_TERMS;i++) { assertEquals(termsEnum.seekCeil(new BytesRef(terms[i].text2)), TermsEnum.SeekStatus.FOUND); } assertFalse(fieldsEnum.hasNext()); reader.close(); dir.close(); }
Example 20
Source File: TestPendingDeletes.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testWriteLiveDocs() throws IOException { Directory dir = new ByteBuffersDirectory(); SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 6, false, Codec.getDefault(), Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null); SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1, StringHelper.randomId()); PendingDeletes deletes = newPendingDeletes(commitInfo); assertFalse(deletes.writeLiveDocs(dir)); assertEquals(0, dir.listAll().length); boolean secondDocDeletes = random().nextBoolean(); deletes.delete(5); if (secondDocDeletes) { deletes.getLiveDocs(); deletes.delete(2); } assertEquals(-1, commitInfo.getDelGen()); assertEquals(0, commitInfo.getDelCount()); assertEquals(secondDocDeletes ? 2 : 1, deletes.numPendingDeletes()); assertTrue(deletes.writeLiveDocs(dir)); assertEquals(1, dir.listAll().length); Bits liveDocs = Codec.getDefault().liveDocsFormat().readLiveDocs(dir, commitInfo, IOContext.DEFAULT); assertFalse(liveDocs.get(5)); if (secondDocDeletes) { assertFalse(liveDocs.get(2)); } else { assertTrue(liveDocs.get(2)); } assertTrue(liveDocs.get(0)); assertTrue(liveDocs.get(1)); assertTrue(liveDocs.get(3)); assertTrue(liveDocs.get(4)); assertEquals(0, deletes.numPendingDeletes()); assertEquals(secondDocDeletes ? 2 : 1, commitInfo.getDelCount()); assertEquals(1, commitInfo.getDelGen()); deletes.delete(0); assertTrue(deletes.writeLiveDocs(dir)); assertEquals(2, dir.listAll().length); liveDocs = Codec.getDefault().liveDocsFormat().readLiveDocs(dir, commitInfo, IOContext.DEFAULT); assertFalse(liveDocs.get(5)); if (secondDocDeletes) { assertFalse(liveDocs.get(2)); } else { assertTrue(liveDocs.get(2)); } assertFalse(liveDocs.get(0)); assertTrue(liveDocs.get(1)); assertTrue(liveDocs.get(3)); assertTrue(liveDocs.get(4)); assertEquals(0, deletes.numPendingDeletes()); assertEquals(secondDocDeletes ? 3 : 2, commitInfo.getDelCount()); assertEquals(2, commitInfo.getDelGen()); dir.close(); }