org.apache.lucene.store.Directory Java Examples
The following examples show how to use
org.apache.lucene.store.Directory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SolrCoreCheckLockOnStartupTest.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testSimpleLockErrorOnStartup() throws Exception { Directory directory = newFSDirectory(new File(initAndGetDataDir(), "index").toPath(), SimpleFSLockFactory.INSTANCE); //creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); ignoreException("locked"); try { System.setProperty("solr.tests.lockType",DirectoryFactory.LOCK_TYPE_SIMPLE); //opening a new core on the same index initCore("solrconfig-basic.xml", "schema.xml"); if (checkForCoreInitException(LockObtainFailedException.class)) return; fail("Expected " + LockObtainFailedException.class.getSimpleName()); } finally { System.clearProperty("solr.tests.lockType"); unIgnoreException("locked"); indexWriter.close(); directory.close(); deleteCore(); } }
Example #2
Source File: TestQueryParser.java From lucene-solr with Apache License 2.0 | 6 votes |
private boolean isAHit(Query q, String content, Analyzer analyzer) throws IOException{ Directory ramDir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, analyzer); Document doc = new Document(); FieldType fieldType = new FieldType(); fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); fieldType.setTokenized(true); fieldType.setStored(true); Field field = new Field(FIELD, content, fieldType); doc.add(field); writer.addDocument(doc); writer.close(); DirectoryReader ir = DirectoryReader.open(ramDir); IndexSearcher is = new IndexSearcher(ir); long hits = is.count(q); ir.close(); ramDir.close(); if (hits == 1){ return true; } else { return false; } }
Example #3
Source File: BaseGeoPointTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
/** test we can search for a polygon */ public void testPolygonBasics() throws Exception { assumeTrue("Impl does not support polygons", supportsPolygons()); Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); // add a doc with a point Document document = new Document(); addPointToDoc("field", document, 18.313694, -65.227444); writer.addDocument(document); // search and verify we found our doc IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader); assertEquals(1, searcher.count(newPolygonQuery("field", new Polygon( new double[] { 18, 18, 19, 19, 18 }, new double[] { -66, -65, -65, -66, -66 })))); reader.close(); writer.close(); dir.close(); }
Example #4
Source File: IntermediateForm.java From hadoop-gpu with Apache License 2.0 | 6 votes |
/** * This method is used by the index update combiner and process an * intermediate form into the current intermediate form. More specifically, * the input intermediate forms are a single-document ram index and/or a * single delete term. * @param form the input intermediate form * @throws IOException */ public void process(IntermediateForm form) throws IOException { if (form.deleteList.size() > 0) { deleteList.addAll(form.deleteList); } if (form.dir.sizeInBytes() > 0) { if (writer == null) { writer = createWriter(); } writer.addIndexesNoOptimize(new Directory[] { form.dir }); numDocs++; } }
Example #5
Source File: TestIndexWriterMaxDocs.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testUpdateDocument() throws Exception { setIndexWriterMaxDocs(10); try { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); for(int i=0;i<10;i++) { w.addDocument(new Document()); } // 11th document should fail: expectThrows(IllegalArgumentException.class, () -> { w.updateDocument(new Term("field", "foo"), new Document()); }); w.close(); dir.close(); } finally { restoreIndexWriterMaxDocs(); } }
Example #6
Source File: TestIndexWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testHasUncommittedChangesAfterException() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!! IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); doc.add(new SortedDocValuesField("dv", new BytesRef("bar!"))); expectThrows(IllegalArgumentException.class, () -> { iwriter.addDocument(doc); }); iwriter.commit(); assertFalse(iwriter.hasUncommittedChanges()); iwriter.close(); directory.close(); }
Example #7
Source File: HdfsDirectoryTest.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testCreateTempFiles() throws IOException { String file1; try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { out.writeByte((byte) 42); file1 = out.getName(); } assertTrue(file1.startsWith("foo_bar")); assertTrue(file1.endsWith(".tmp")); // Create the directory again to force the counter to be reset String file2; try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { out.writeByte((byte) 42); file2 = out.getName(); } assertTrue(file2.startsWith("foo_bar")); assertTrue(file2.endsWith(".tmp")); assertNotEquals(file1, file2); }
Example #8
Source File: TestReaderPool.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testDrop() throws IOException { Directory directory = newDirectory(); FieldInfos.FieldNumbers fieldNumbers = buildIndex(directory); StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, null, null, null); SegmentCommitInfo commitInfo = RandomPicks.randomFrom(random(), segmentInfos.asList()); ReadersAndUpdates readersAndUpdates = pool.get(commitInfo, true); assertSame(readersAndUpdates, pool.get(commitInfo, false)); assertTrue(pool.drop(commitInfo)); if (random().nextBoolean()) { assertFalse(pool.drop(commitInfo)); } assertNull(pool.get(commitInfo, false)); pool.release(readersAndUpdates, random().nextBoolean()); IOUtils.close(pool, reader, directory); }
Example #9
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testRenameFileDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.rename(testfile, "bogus"); }); cfs.close(); dir.close(); }
Example #10
Source File: TestDirectoryReader.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testIsCurrent() throws Exception { Directory d = newDirectory(); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); addDocumentWithFields(writer); writer.close(); // set up reader: DirectoryReader reader = DirectoryReader.open(d); assertTrue(reader.isCurrent()); // modify index by adding another document: writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); // re-create index: writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); reader.close(); d.close(); }
Example #11
Source File: TestIndexWriterOnOldIndex.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testOpenModeAndCreatedVersion() throws IOException { assumeTrue("Reenable when 8.0 is released", false); InputStream resource = getClass().getResourceAsStream("index.single-empty-doc.8.0.0.zip"); assertNotNull(resource); Path path = createTempDir(); TestUtil.unzip(resource, path); Directory dir = newFSDirectory(path); for (OpenMode openMode : OpenMode.values()) { Directory tmpDir = newDirectory(dir); assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode)); w.commit(); w.close(); switch (openMode) { case CREATE: assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); break; default: assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); } tmpDir.close(); } dir.close(); }
Example #12
Source File: TestDirectoryTaxonomyWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testBackwardsCompatibility() throws Exception { // tests that if the taxonomy index doesn't have the INDEX_EPOCH // property (supports pre-3.6 indexes), all still works. Directory dir = newDirectory(); // create an empty index first, so that DirTaxoWriter initializes indexEpoch to 1. new IndexWriter(dir, new IndexWriterConfig(null)).close(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE); taxoWriter.close(); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(dir); assertEquals(1, Integer.parseInt(taxoReader.getCommitUserData().get(DirectoryTaxonomyWriter.INDEX_EPOCH))); assertNull(TaxonomyReader.openIfChanged(taxoReader)); taxoReader.close(); dir.close(); }
Example #13
Source File: TestControlledRealTimeReopenThread.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testEvilSearcherFactory() throws Exception { final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), dir); w.commit(); final IndexReader other = DirectoryReader.open(dir); final SearcherFactory theEvilOne = new SearcherFactory() { @Override public IndexSearcher newSearcher(IndexReader ignored, IndexReader previous) { return LuceneTestCase.newSearcher(other); } }; expectThrows(IllegalStateException.class, () -> { new SearcherManager(w.w, false, false, theEvilOne); }); w.close(); other.close(); dir.close(); }
Example #14
Source File: EngineTestCase.java From crate with Apache License 2.0 | 6 votes |
protected InternalEngine createEngine(@Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation, EngineConfig config) throws IOException { final Store store = config.getStore(); final Directory directory = store.directory(); if (Lucene.indexExists(directory) == false) { store.createEmpty(); final String translogUuid = Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(), SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get()); store.associateIndexWithNewTranslog(translogUuid); } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return internalEngine; }
Example #15
Source File: InternalEngine.java From crate with Apache License 2.0 | 6 votes |
@Override protected void handleMergeException(final Directory dir, final Throwable exc) { engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { logger.debug("merge failure action rejected", e); } @Override protected void doRun() throws Exception { /* * We do this on another thread rather than the merge thread that we are initially called on so that we have complete * confidence that the call stack does not contain catch statements that would cause the error that might be thrown * here from being caught and never reaching the uncaught exception handler. */ failEngine("merge failed", new MergePolicy.MergeException(exc, dir)); } }); }
Example #16
Source File: TestAllFilesHaveCodecHeader.java From lucene-solr with Apache License 2.0 | 6 votes |
public void test() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(TestUtil.getDefaultCodec()); RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); // Use LineFileDocs so we (hopefully) get most Lucene features // tested, e.g. IntPoint was recently added to it: LineFileDocs docs = new LineFileDocs(random()); for (int i = 0; i < 100; i++) { riw.addDocument(docs.nextDoc()); if (random().nextInt(7) == 0) { riw.commit(); } if (random().nextInt(20) == 0) { riw.deleteDocuments(new Term("docid", Integer.toString(i))); } if (random().nextInt(15) == 0) { riw.updateNumericDocValue(new Term("docid", Integer.toString(i)), "docid_intDV", Long.valueOf(i)); } } riw.close(); checkHeaders(dir, new HashMap<String,String>()); dir.close(); }
Example #17
Source File: LuceneSearch.java From mysiteforme with Apache License 2.0 | 6 votes |
/** * 根据ID更新搜索内容 * @param blogArticle * @throws IOException */ public static void updateIndexById(BlogArticle blogArticle) throws IOException{ Directory directory = FSDirectory.open(Paths.get(dir));// 打开文件索引目录 Analyzer analyzer = new IKAnalyzer(); IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer); //创建索引写入对象 IndexWriter writer = new IndexWriter(directory,indexWriterConfig); Document doc = new Document(); doc.add(new LongPoint("id",blogArticle.getId())); doc.add(new TextField("title",blogArticle.getTitle(), Field.Store.YES)); doc.add(new TextField("marks",blogArticle.getMarks()==null?"":blogArticle.getMarks(),Field.Store.YES)); doc.add(new TextField("text",blogArticle.getText()==null?"":blogArticle.getText(),Field.Store.YES)); doc.add(new StoredField("href",blogArticle.getBlogChannel().getHref())); doc.add(new StoredField("show_pic",blogArticle.getShowPic()==null?"":blogArticle.getShowPic())); writer.updateDocument(new Term("id", blogArticle.getId().toString()), doc); writer.commit();// 提交 writer.close();// 关闭 }
Example #18
Source File: NodeEnvironment.java From crate with Apache License 2.0 | 6 votes |
/** * Acquires, then releases, all {@code write.lock} files in the given * shard paths. The "write.lock" file is assumed to be under the shard * path's "index" directory as used by Elasticsearch. * * @throws LockObtainFailedException if any of the locks could not be acquired */ public static void acquireFSLockForPaths(IndexSettings indexSettings, Path... shardPaths) throws IOException { Lock[] locks = new Lock[shardPaths.length]; Directory[] dirs = new Directory[shardPaths.length]; try { for (int i = 0; i < shardPaths.length; i++) { // resolve the directory the shard actually lives in Path p = shardPaths[i].resolve("index"); // open a directory (will be immediately closed) on the shard's location dirs[i] = new SimpleFSDirectory(p, indexSettings.getValue(FsDirectoryService.INDEX_LOCK_FACTOR_SETTING)); // create a lock for the "write.lock" file try { locks[i] = dirs[i].obtainLock(IndexWriter.WRITE_LOCK_NAME); } catch (IOException ex) { throw new LockObtainFailedException("unable to acquire " + IndexWriter.WRITE_LOCK_NAME + " for " + p, ex); } } } finally { IOUtils.closeWhileHandlingException(locks); IOUtils.closeWhileHandlingException(dirs); } }
Example #19
Source File: TestAllFilesDetectTruncation.java From lucene-solr with Apache License 2.0 | 5 votes |
private void checkTruncation(Directory dir) throws IOException { for(String name : dir.listAll()) { if (name.equals(IndexWriter.WRITE_LOCK_NAME) == false) { truncateOneFile(dir, name); } } }
Example #20
Source File: Lucene50CompoundReader.java From lucene-solr with Apache License 2.0 | 5 votes |
/** * Create a new CompoundFileDirectory. */ // TODO: we should just pre-strip "entries" and append segment name up-front like simpletext? // this need not be a "general purpose" directory anymore (it only writes index files) public Lucene50CompoundReader(Directory directory, SegmentInfo si, IOContext context) throws IOException { this.directory = directory; this.segmentName = si.name; String dataFileName = IndexFileNames.segmentFileName(segmentName, "", Lucene50CompoundFormat.DATA_EXTENSION); String entriesFileName = IndexFileNames.segmentFileName(segmentName, "", Lucene50CompoundFormat.ENTRIES_EXTENSION); this.entries = readEntries(si.getId(), directory, entriesFileName); boolean success = false; long expectedLength = CodecUtil.indexHeaderLength(Lucene50CompoundFormat.DATA_CODEC, ""); for(Map.Entry<String,FileEntry> ent : entries.entrySet()) { expectedLength += ent.getValue().length; } expectedLength += CodecUtil.footerLength(); handle = directory.openInput(dataFileName, context); try { CodecUtil.checkIndexHeader(handle, Lucene50CompoundFormat.DATA_CODEC, version, version, si.getId(), ""); // NOTE: data file is too costly to verify checksum against all the bytes on open, // but for now we at least verify proper structure of the checksum footer: which looks // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption // such as file truncation. CodecUtil.retrieveChecksum(handle); // We also validate length, because e.g. if you strip 16 bytes off the .cfs we otherwise // would not detect it: if (handle.length() != expectedLength) { throw new CorruptIndexException("length should be " + expectedLength + " bytes, but is " + handle.length() + " instead", handle); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(handle); } } }
Example #21
Source File: HdfsDirectoryTest.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void testEOF() throws IOException { Directory fsDir = new ByteBuffersDirectory(); String name = "test.eof"; createFile(name, fsDir, directory); long fsLength = fsDir.fileLength(name); long hdfsLength = directory.fileLength(name); assertEquals(fsLength, hdfsLength); testEof(name,fsDir,fsLength); testEof(name,directory,hdfsLength); }
Example #22
Source File: LuceneFactory.java From Stargraph with MIT License | 5 votes |
private Directory getLuceneDir(Stargraph stargraph, KBId kbId) { return luceneDirs.computeIfAbsent(kbId, (id) -> { try { String rootPath = stargraph.getDataRootDir(); Path idxPath = Paths.get(rootPath, id.getId(), id.getModel(), "idx"); return new MMapDirectory(idxPath); } catch (IOException e) { throw new StarGraphException(e); } }); }
Example #23
Source File: TestIndexWriter.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testMergeZeroDocsMergeIsClosedOnce() throws IOException { LogDocMergePolicy keepAllSegments = new LogDocMergePolicy() { @Override public boolean keepFullyDeletedSegment(IOSupplier<CodecReader> readerIOSupplier) { return true; } }; try (Directory dir = newDirectory()) { try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig().setMergePolicy(new OneMergeWrappingMergePolicy(keepAllSegments, merge -> { SetOnce<Boolean> onlyFinishOnce = new SetOnce<>(); return new MergePolicy.OneMerge(merge.segments) { @Override public void mergeFinished(boolean success, boolean segmentDropped) throws IOException { super.mergeFinished(success, segmentDropped); onlyFinishOnce.set(true); } }; })))) { Document doc = new Document(); doc.add(new StringField("id", "1", Field.Store.NO)); writer.addDocument(doc); writer.flush(); writer.addDocument(doc); writer.flush(); writer.deleteDocuments(new Term("id", "1")); writer.flush(); assertEquals(2, writer.getSegmentCount()); assertEquals(0, writer.getDocStats().numDocs); assertEquals(2, writer.getDocStats().maxDoc); writer.forceMerge(1); } } }
Example #24
Source File: TestIndexWriterDelete.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testDeleteAllNRT() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) .setMaxBufferedDocs(2)); int id = 0; int value = 100; for (int i = 0; i < 7; i++) { addDoc(modifier, ++id, value); } modifier.commit(); IndexReader reader = modifier.getReader(); assertEquals(7, reader.numDocs()); reader.close(); addDoc(modifier, ++id, value); addDoc(modifier, ++id, value); // Delete all modifier.deleteAll(); reader = modifier.getReader(); assertEquals(0, reader.numDocs()); reader.close(); // Roll it back modifier.rollback(); // Validate that the docs are still there reader = DirectoryReader.open(dir); assertEquals(7, reader.numDocs()); reader.close(); dir.close(); }
Example #25
Source File: LuceneSearch.java From mysiteforme with Apache License 2.0 | 5 votes |
/** * 根据ID删除索引 * @param id * @throws IOException */ public static void deleteIndexById(String id) throws IOException{ Directory directory = FSDirectory.open(Paths.get(dir));// 打开文件索引目录 Analyzer analyzer = new IKAnalyzer(); IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer); //创建索引写入对象 IndexWriter writer = new IndexWriter(directory,indexWriterConfig); IndexReader reader = DirectoryReader.open(directory);// 读取目 Query q = new TermQuery(new Term("id", id)); writer.deleteDocuments(q);// 删除指定ID的Document writer.commit();// 提交 writer.close();// 关闭 reader.close();// 关闭 }
Example #26
Source File: TestHighFrequencyDictionary.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testEmpty() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); Dictionary dictionary = new HighFrequencyDictionary(ir, "bogus", 0.1f); BytesRefIterator tf = dictionary.getEntryIterator(); assertNull(tf.next()); dir.close(); }
Example #27
Source File: TestOfflineSorter.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testSingleLine() throws Exception { try (Directory dir = newDirectory()) { checkSort(dir, new OfflineSorter(dir, "foo"), new byte [][] { "Single line only.".getBytes(StandardCharsets.UTF_8) }); } }
Example #28
Source File: TestAllFilesDetectTruncation.java From lucene-solr with Apache License 2.0 | 5 votes |
private void truncateOneFile(Directory dir, String victim) throws IOException { try (BaseDirectoryWrapper dirCopy = newDirectory()) { dirCopy.setCheckIndexOnClose(false); long victimLength = dir.fileLength(victim); int lostBytes = TestUtil.nextInt(random(), 1, (int) Math.min(100, victimLength)); assert victimLength > 0; if (VERBOSE) { System.out.println("TEST: now truncate file " + victim + " by removing " + lostBytes + " of " + victimLength + " bytes"); } for(String name : dir.listAll()) { if (name.equals(victim) == false) { dirCopy.copyFrom(dir, name, name, IOContext.DEFAULT); } else { try(IndexOutput out = dirCopy.createOutput(name, IOContext.DEFAULT); IndexInput in = dir.openInput(name, IOContext.DEFAULT)) { out.copyBytes(in, victimLength - lostBytes); } } dirCopy.sync(Collections.singleton(name)); } // NOTE: we .close so that if the test fails (truncation not detected) we don't also get all these confusing errors about open files: expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, EOFException.class), () -> DirectoryReader.open(dirCopy).close() ); // CheckIndex should also fail: expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, EOFException.class), () -> TestUtil.checkIndex(dirCopy, true, true, null) ); } }
Example #29
Source File: TestParallelCompositeReader.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testCloseInnerReader() throws Exception { Directory dir1 = getDir1(random()); CompositeReader ir1 = DirectoryReader.open(dir1); assertEquals(1, ir1.getSequentialSubReaders().get(0).getRefCount()); // with overlapping ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] {ir1}, new CompositeReader[] {ir1}); IndexReader psub = pr.getSequentialSubReaders().get(0); assertEquals(1, psub.getRefCount()); ir1.close(); assertEquals("refCount of synthetic subreader should be unchanged", 1, psub.getRefCount()); expectThrows(AlreadyClosedException.class, () -> { psub.document(0); }); expectThrows(AlreadyClosedException.class, () -> { pr.document(0); }); // noop: pr.close(); assertEquals(0, psub.getRefCount()); dir1.close(); }
Example #30
Source File: TestDirectPacked.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testRandom() throws Exception { Directory dir = newDirectory(); for (int bpv = 1; bpv <= 64; bpv++) { doTestBpv(dir, bpv, 0); } dir.close(); }