Java Code Examples for org.apache.lucene.index.IndexWriter#deleteAll()
The following examples show how to use
org.apache.lucene.index.IndexWriter#deleteAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MtasDocumentIndex.java From inception with Apache License 2.0 | 6 votes |
@Override public synchronized void clear() throws IOException { // Disable the scheduler temporarily to avoid new commits getting scheduled if (schedulerService != null) { schedulerService.shutdown(); } // Remove all data from the index IndexWriter indexWriter = getIndexWriter(); indexWriter.deleteAll(); // Close the index temporarily because we want the IndexWriter to be re-initialized on the // next access in order to pick up the current layer configuration of the project. close(); }
Example 2
Source File: AbstractIndexManager.java From webdsl with Apache License 2.0 | 6 votes |
protected static boolean clearIndex(File path) { try { if (path == null || !path.exists()) return true; // if path doesnt exist, then there is nothing to // clear FSDirectory indexDir = new FSDirectoryProvider().getDirectory(); IndexWriter writer = new IndexWriter(indexDir.open(path), new IndexWriterConfig(Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT))); writer.deleteAll(); writer.close(); return true; } catch (Exception ex) { org.webdsl.logging.Logger.error( "Error while clearing index on location: " + path, ex); return false; } }
Example 3
Source File: TestControlledRealTimeReopenThread.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testDeleteAll() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); SearcherManager mgr = new SearcherManager(w, new SearcherFactory()); nrtDeletesThread = new ControlledRealTimeReopenThread<>(w, mgr, 0.1, 0.01); nrtDeletesThread.setName("NRTDeletes Reopen Thread"); nrtDeletesThread.setDaemon(true); nrtDeletesThread.start(); long gen1 = w.addDocument(new Document()); long gen2 = w.deleteAll(); nrtDeletesThread.waitForGeneration(gen2); IOUtils.close(nrtDeletesThread, nrtDeletes, w, dir); }
Example 4
Source File: AutoCompleter.java From webdsl with Apache License 2.0 | 5 votes |
/** * Indexes the data from the given reader. * @param reader Source index reader, from which autocomplete words are obtained for the defined field * @param field the field of the source index reader to index for autocompletion * @param mergeFactor mergeFactor to use when indexing * @param ramMB the max amount or memory in MB to use * @param optimize whether or not the autocomplete index should be optimized * @throws AlreadyClosedException if the Autocompleter is already closed * @throws IOException */ public final void indexDictionary(IndexReader reader, String field, int mergeFactor, int ramMB, boolean optimize) throws IOException { synchronized (modifyCurrentIndexLock) { ensureOpen(); final Directory dir = this.autoCompleteIndex; final Dictionary dict = new LuceneDictionary(reader, field); final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).setRAMBufferSizeMB(ramMB)); IndexSearcher indexSearcher = obtainSearcher(); final List<IndexReader> readers = new ArrayList<IndexReader>(); if (searcher.maxDoc() > 0) { ReaderUtil.gatherSubReaders(readers, searcher.getIndexReader()); } //clear the index writer.deleteAll(); try { Iterator<String> iter = dict.getWordsIterator(); while (iter.hasNext()) { String word = iter.next(); // ok index the word Document doc = createDocument(word, reader.docFreq(new Term(field, word))); writer.addDocument(doc); } } finally { releaseSearcher(indexSearcher); } // close writer if (optimize) writer.optimize(); writer.close(); // also re-open the autocomplete index to see our own changes when the next suggestion // is fetched: swapSearcher(dir); } }
Example 5
Source File: IndexWriterWorker.java From olat with Apache License 2.0 | 5 votes |
/** * @param id * Unique index ID. Is used to generate unique directory name. * @param tempIndexPath * Absolute directory-path where the temporary index can be generated. * @param fullIndexer * Reference to full-index */ public IndexWriterWorker(final int id, final File tempIndexDir, final OlatFullIndexer fullIndexer) { this.id = id; this.fullIndexer = fullIndexer; try { final File indexPartFile = new File(tempIndexDir, "part" + id); final Directory indexPartDirectory = FSDirectory.open(indexPartFile); indexWriter = new IndexWriter(indexPartDirectory, new StandardAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED); indexWriter.deleteAll(); } catch (final IOException e) { log.warn("Can not create IndexWriter"); } }
Example 6
Source File: Catalog.java From cxf with Apache License 2.0 | 5 votes |
@DELETE public Response delete() throws IOException { final IndexWriter writer = getIndexWriter(); try { storage.deleteAll(); writer.deleteAll(); writer.commit(); } finally { writer.close(); } return Response.ok().build(); }
Example 7
Source File: MtasSearchTestConsistency.java From mtas with Apache License 2.0 | 4 votes |
/** * Creates the index. * * @param configFile the config file * @param files the files * @throws IOException Signals that an I/O exception has occurred. */ private static void createIndex(String configFile, HashMap<String, String> files) throws IOException { // analyzer Map<String, String> paramsCharFilterMtas = new HashMap<>(); paramsCharFilterMtas.put("type", "file"); Map<String, String> paramsTokenizer = new HashMap<>(); paramsTokenizer.put("configFile", configFile); Analyzer mtasAnalyzer = CustomAnalyzer .builder(Paths.get("docker").toAbsolutePath()) .addCharFilter("mtas", paramsCharFilterMtas) .withTokenizer("mtas", paramsTokenizer).build(); Map<String, Analyzer> analyzerPerField = new HashMap<>(); analyzerPerField.put(FIELD_CONTENT, mtasAnalyzer); PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper( new StandardAnalyzer(), analyzerPerField); // indexwriter IndexWriterConfig config = new IndexWriterConfig(analyzer); config.setUseCompoundFile(false); config.setCodec(Codec.forName("MtasCodec")); IndexWriter w = new IndexWriter(directory, config); // delete w.deleteAll(); // add int counter = 0; for (Entry<String, String> entry : files.entrySet()) { addDoc(w, counter, entry.getKey(), entry.getValue()); if (counter == 0) { w.commit(); } else { addDoc(w, counter, entry.getKey(), entry.getValue()); addDoc(w, counter, "deletable", entry.getValue()); w.commit(); w.deleteDocuments(new Term(FIELD_ID, Integer.toString(counter))); w.deleteDocuments(new Term(FIELD_TITLE, "deletable")); addDoc(w, counter, entry.getKey(), entry.getValue()); } counter++; } w.commit(); // finish w.close(); }
Example 8
Source File: OlatFullIndexer.java From olat with Apache License 2.0 | 4 votes |
/** * Create index-writer object. In multi-threaded mode ctreates an array of index-workers. Start indexing with main-index as root object. Index recursive all elements. * At the end optimze and close new index. The new index is stored in [temporary-index-path]/main * * @throws InterruptedException */ private void doIndex() throws InterruptedException { try { final File tempIndexDir = new File(tempIndexPath); final Directory indexPath = FSDirectory.open(new File(tempIndexDir, "main")); final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT); indexWriter = new IndexWriter(indexPath, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED); indexWriter.deleteAll(); indexWriter.setMergeFactor(INDEX_MERGE_FACTOR); // for better performance indexWriter.setRAMBufferSizeMB(ramBufferSizeMB);// for better performance set to 48MB (see lucene docu 'how to make indexing faster") log.info("IndexWriter config RAMBufferSizeMB=" + indexWriter.getRAMBufferSizeMB()); indexWriter.setUseCompoundFile(useCompoundFile); // for better performance (see lucene docu 'how to make indexing faster") log.info("IndexWriter config UseCompoundFile=" + indexWriter.getUseCompoundFile()); // Create IndexWriterWorker log.info("Running with " + numberIndexWriter + " IndexerWriterWorker"); indexWriterWorkers = new IndexWriterWorker[numberIndexWriter]; final Directory[] partIndexDirs = new Directory[numberIndexWriter]; for (int i = 0; i < numberIndexWriter; i++) { final IndexWriterWorker indexWriterWorker = new IndexWriterWorker(i, tempIndexDir, this); indexWriterWorkers[i] = indexWriterWorker; indexWriterWorkers[i].start(); partIndexDirs[i] = indexWriterWorkers[i].getIndexDir(); } final SearchResourceContext searchResourceContext = new SearchResourceContext(); log.info("doIndex start. OlatFullIndexer with Debug output"); mainIndexer.doIndex(searchResourceContext, null /* no parent */, this); log.info("Wait until every folder indexer is finished"); DBFactory.getInstance().commitAndCloseSession(); // check if every folder indexer is finished max waiting-time 10Min (=waitingCount-limit = 60) int waitingCount = 0; final int MAX_WAITING_COUNT = 60;// = 10Min while (FolderIndexerWorkerPool.getInstance().isIndexerRunning() && (waitingCount++ < MAX_WAITING_COUNT)) { Thread.sleep(10000); } if (waitingCount >= MAX_WAITING_COUNT) { log.info("Finished with max waiting time!"); } log.info("Set Finish-flag for each indexWriterWorkers"); // Set Finish-flag for (int i = 0; i < numberIndexWriter; i++) { indexWriterWorkers[i].finishIndexing(); } log.info("Wait until every indexworker is finished"); // check if every indexworker is finished max waiting-time 10Min (=waitingCount-limit = 60) waitingCount = 0; while (!areIndexingDone() && (waitingCount++ < MAX_WAITING_COUNT)) { Thread.sleep(10000); } if (waitingCount >= MAX_WAITING_COUNT) { log.info("Finished with max waiting time!"); } // Merge all partIndex DBFactory.getInstance().commitAndCloseSession(); if (partIndexDirs.length > 0) { log.info("Start merging part Indexes"); indexWriter.addIndexesNoOptimize(partIndexDirs); log.info("Added all part Indexes"); } fullIndexerStatus.setIndexSize(indexWriter.maxDoc()); indexWriter.optimize(); indexWriter.close(); } catch (final IOException e) { e.printStackTrace(); log.warn("Can not create IndexWriter, indexname=" + tempIndexPath, e); } finally { DBFactory.getInstance().commitAndCloseSession(); log.debug("doIndex: commit & close session"); } }