Java Code Examples for org.apache.lucene.facet.FacetsConfig#setMultiValued()
The following examples show how to use
org.apache.lucene.facet.FacetsConfig#setMultiValued() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RandomFacetSource.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public void configure(FacetsConfig config) { for(int i=0;i<maxDims;i++) { config.setHierarchical(Integer.toString(i), true); config.setMultiValued(Integer.toString(i), true); } }
Example 2
Source File: AssociationsFacetsExample.java From lucene-solr with Apache License 2.0 | 5 votes |
/** Empty constructor */ public AssociationsFacetsExample() { config = new FacetsConfig(); config.setMultiValued("tags", true); config.setIndexFieldName("tags", "$tags"); config.setMultiValued("genre", true); config.setIndexFieldName("genre", "$genre"); }
Example 3
Source File: TestTaxonomyFacetAssociations.java From lucene-solr with Apache License 2.0 | 5 votes |
@BeforeClass public static void beforeClass() throws Exception { dir = newDirectory(); taxoDir = newDirectory(); // preparations - index, taxonomy, content TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); // Cannot mix ints & floats in the same indexed field: config = new FacetsConfig(); config.setIndexFieldName("int", "$facets.int"); config.setMultiValued("int", true); config.setIndexFieldName("float", "$facets.float"); config.setMultiValued("float", true); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); // index documents, 50% have only 'b' and all have 'a' for (int i = 0; i < 110; i++) { Document doc = new Document(); // every 11th document is added empty, this used to cause the association // aggregators to go into an infinite loop if (i % 11 != 0) { doc.add(new IntAssociationFacetField(2, "int", "a")); doc.add(new FloatAssociationFacetField(0.5f, "float", "a")); if (i % 2 == 0) { // 50 doc.add(new IntAssociationFacetField(3, "int", "b")); doc.add(new FloatAssociationFacetField(0.2f, "float", "b")); } } writer.addDocument(config.build(taxoWriter, doc)); } taxoWriter.close(); reader = writer.getReader(); writer.close(); taxoReader = new DirectoryTaxonomyReader(taxoDir); }
Example 4
Source File: TestTaxonomyFacetCounts.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testMultiValuedHierarchy() throws Exception { Directory dir = newDirectory(); Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE); FacetsConfig config = new FacetsConfig(); config.setHierarchical("a", true); config.setMultiValued("a", true); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(newTextField("field", "text", Field.Store.NO)); doc.add(new FacetField("a", "path", "x")); doc.add(new FacetField("a", "path", "y")); writer.addDocument(config.build(taxoWriter, doc)); // NRT open IndexSearcher searcher = newSearcher(writer.getReader()); // NRT open TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config); expectThrows(IllegalArgumentException.class, () -> { facets.getSpecificValue("a"); }); FacetResult result = facets.getTopChildren(10, "a"); assertEquals(1, result.labelValues.length); assertEquals(1, result.labelValues[0].value.intValue()); writer.close(); IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir); }
Example 5
Source File: TestTaxonomyFacetCounts.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testLabelWithDelimiter() throws Exception { Directory dir = newDirectory(); Directory taxoDir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE); FacetsConfig config = new FacetsConfig(); config.setMultiValued("dim", true); Document doc = new Document(); doc.add(newTextField("field", "text", Field.Store.NO)); doc.add(new FacetField("dim", "test\u001Fone")); doc.add(new FacetField("dim", "test\u001Etwo")); writer.addDocument(config.build(taxoWriter, doc)); // NRT open IndexSearcher searcher = newSearcher(writer.getReader()); // NRT open TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config); assertEquals(1, facets.getSpecificValue("dim", "test\u001Fone")); assertEquals(1, facets.getSpecificValue("dim", "test\u001Etwo")); // no hierarchy assertFalse(((TaxonomyFacets) facets).siblingsLoaded()); assertFalse(((TaxonomyFacets) facets).childrenLoaded()); FacetResult result = facets.getTopChildren(10, "dim"); assertEquals("dim=dim path=[] value=-1 childCount=2\n test\u001Fone (1)\n test\u001Etwo (1)\n", result.toString()); writer.close(); IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir); }
Example 6
Source File: TestTaxonomyFacetCounts.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testManyFacetsInOneDocument() throws Exception { assumeTrue("default Codec doesn't support huge BinaryDocValues", TestUtil.fieldSupportsHugeBinaryDocValues(FacetsConfig.DEFAULT_INDEX_FIELD_NAME)); Directory dir = newDirectory(); Directory taxoDir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE); FacetsConfig config = new FacetsConfig(); config.setMultiValued("dim", true); int numLabels = TEST_NIGHTLY ? TestUtil.nextInt(random(), 40000, 100000) : TestUtil.nextInt(random(), 4000, 10000); Document doc = new Document(); doc.add(newTextField("field", "text", Field.Store.NO)); for (int i = 0; i < numLabels; i++) { doc.add(new FacetField("dim", "" + i)); } writer.addDocument(config.build(taxoWriter, doc)); // NRT open IndexSearcher searcher = newSearcher(writer.getReader()); // NRT open TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config); FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim"); assertEquals(numLabels, result.labelValues.length); Set<String> allLabels = new HashSet<>(); for (LabelAndValue labelValue : result.labelValues) { allLabels.add(labelValue.label); assertEquals(1, labelValue.value.intValue()); } assertEquals(numLabels, allLabels.size()); writer.close(); IOUtils.close(searcher.getIndexReader(), taxoWriter, taxoReader, dir, taxoDir); }
Example 7
Source File: TestTaxonomyFacetCounts2.java From lucene-solr with Apache License 2.0 | 5 votes |
private static FacetsConfig getConfig() { FacetsConfig config = new FacetsConfig(); config.setMultiValued("A", true); config.setMultiValued("B", true); config.setRequireDimCount("B", true); config.setHierarchical("D", true); return config; }
Example 8
Source File: IndexBuilderLuceneImpl.java From yes-cart with Apache License 2.0 | 5 votes |
/** * Process single entity update in the FT index. * * @param iw index writer * @param indexName index name * @param documents documents to index and PK * @param remove remove only * @param indexTime time of this index (added as field to added documents) * @param counts counts[3] = { added, removed, failed } * * @throws IOException error */ protected void fullTextSearchReindexSingleEntity(final IndexWriter iw, final String indexName, final Pair<PK, Document[]> documents, final boolean remove, final long indexTime, final long[] counts) throws IOException { final PK primaryKey = documents.getFirst(); // Remove all documents with primary key (could be multiple) iw.deleteDocuments(new Term(AdapterUtils.FIELD_PK, String.valueOf(primaryKey))); counts[1]++; LOGFTQ.trace("Removing {} document _PK:{}", indexName, primaryKey); if (!remove) { // Add documents final FacetsConfig facetsConfig = new FacetsConfig(); for (final Document document : documents.getSecond()) { try { LuceneDocumentAdapterUtils.addNumericField(document, AdapterUtils.FIELD_INDEXTIME, indexTime, false); for (final IndexableField ixf : document) { if (ixf.fieldType() == SortedSetDocValuesFacetField.TYPE) { SortedSetDocValuesFacetField facetField = (SortedSetDocValuesFacetField) ixf; facetsConfig.setIndexFieldName(facetField.dim, facetField.dim); facetsConfig.setMultiValued(facetField.dim, true); // TODO: revisit this but for now all fields assumed to have multivalue } } iw.addDocument(facetsConfig.build(document)); counts[0]++; } catch (Exception sde) { LOGFTQ.error("Updating {} document _PK:{} failed ... cause: {}", indexName, documents.getFirst(), sde.getMessage()); counts[2]++; } } LOGFTQ.trace("Updating {} document _PK:{}", indexName, primaryKey); } }
Example 9
Source File: TestTaxonomyFacetCounts.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testRequireDimCount() throws Exception { Directory dir = newDirectory(); Directory taxoDir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE); FacetsConfig config = new FacetsConfig(); config.setRequireDimCount("dim", true); config.setMultiValued("dim2", true); config.setRequireDimCount("dim2", true); config.setMultiValued("dim3", true); config.setHierarchical("dim3", true); config.setRequireDimCount("dim3", true); Document doc = new Document(); doc.add(newTextField("field", "text", Field.Store.NO)); doc.add(new FacetField("dim", "a")); doc.add(new FacetField("dim2", "a")); doc.add(new FacetField("dim2", "b")); doc.add(new FacetField("dim3", "a", "b")); doc.add(new FacetField("dim3", "a", "c")); writer.addDocument(config.build(taxoWriter, doc)); // NRT open IndexSearcher searcher = newSearcher(writer.getReader()); // NRT open TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, searcher, taxoReader, config); assertEquals(1, facets.getTopChildren(10, "dim").value); assertEquals(1, facets.getTopChildren(10, "dim2").value); assertEquals(1, facets.getTopChildren(10, "dim3").value); expectThrows(IllegalArgumentException.class, () -> { facets.getSpecificValue("dim"); }); assertEquals(1, facets.getSpecificValue("dim2")); assertEquals(1, facets.getSpecificValue("dim3")); writer.close(); IOUtils.close(taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir); }
Example 10
Source File: TestSearcherTaxonomyManager.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testNRT() throws Exception { Directory dir = newDirectory(); Directory taxoDir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); // Don't allow tiny maxBufferedDocs; it can make this // test too slow: iwc.setMaxBufferedDocs(Math.max(500, iwc.getMaxBufferedDocs())); // MockRandom/AlcololicMergePolicy are too slow: TieredMergePolicy tmp = new TieredMergePolicy(); tmp.setFloorSegmentMB(.001); iwc.setMergePolicy(tmp); final IndexWriter w = new IndexWriter(dir, iwc); final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir); final FacetsConfig config = new FacetsConfig(); config.setMultiValued("field", true); final AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: final int ordLimit = TEST_NIGHTLY ? 100000 : 6000; Thread indexer = new IndexerThread(w, config, tw, null, ordLimit, stop); final SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw); Thread reopener = new Thread() { @Override public void run() { while(!stop.get()) { try { // Sleep for up to 20 msec: Thread.sleep(random().nextInt(20)); if (VERBOSE) { System.out.println("TEST: reopen"); } mgr.maybeRefresh(); if (VERBOSE) { System.out.println("TEST: reopen done"); } } catch (Exception ioe) { throw new RuntimeException(ioe); } } } }; reopener.setName("reopener"); reopener.start(); indexer.setName("indexer"); indexer.start(); try { while (!stop.get()) { SearcherAndTaxonomy pair = mgr.acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.search(new MatchAllDocsQuery(), sfc); Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.getTopChildren(10, "field"); if (pair.searcher.getIndexReader().numDocs() > 0) { //System.out.println(pair.taxonomyReader.getSize()); assertTrue(result.childCount > 0); assertTrue(result.labelValues.length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.release(pair); } } } finally { indexer.join(); reopener.join(); } if (VERBOSE) { System.out.println("TEST: now stop"); } w.close(); IOUtils.close(mgr, tw, taxoDir, dir); }
Example 11
Source File: TestSearcherTaxonomyManager.java From lucene-solr with Apache License 2.0 | 4 votes |
public void testDirectory() throws Exception { Directory indexDir = newDirectory(); Directory taxoDir = newDirectory(); final IndexWriter w = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir); // first empty commit w.commit(); tw.commit(); final SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null); final FacetsConfig config = new FacetsConfig(); config.setMultiValued("field", true); final AtomicBoolean stop = new AtomicBoolean(); // How many unique facets to index before stopping: final int ordLimit = TEST_NIGHTLY ? 100000 : 600; Thread indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop); indexer.start(); try { while (!stop.get()) { SearcherAndTaxonomy pair = mgr.acquire(); try { //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize()); FacetsCollector sfc = new FacetsCollector(); pair.searcher.search(new MatchAllDocsQuery(), sfc); Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc); FacetResult result = facets.getTopChildren(10, "field"); if (pair.searcher.getIndexReader().numDocs() > 0) { //System.out.println(pair.taxonomyReader.getSize()); assertTrue(result.childCount > 0); assertTrue(result.labelValues.length > 0); } //if (VERBOSE) { //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0))); //} } finally { mgr.release(pair); } } } finally { indexer.join(); } if (VERBOSE) { System.out.println("TEST: now stop"); } w.close(); IOUtils.close(mgr, tw, taxoDir, indexDir); }