Java Code Examples for org.apache.solr.common.util.NamedList#get()
The following examples show how to use
org.apache.solr.common.util.NamedList#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestXJoinSearchComponent.java From BioSolr with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings("rawtypes") public void testGroupedSimple() { ModifiableSolrParams params = new ModifiableSolrParams(); params.add("group", "true"); params.add("group.field", "colour"); params.add("group.format", "simple"); NamedList results = test(params, "xjoin"); testXJoinResults(results, "xjoin"); NamedList grouped = (NamedList)results.get("grouped"); NamedList colours = (NamedList)grouped.get("colour"); assertEquals(2, colours.get("matches")); DocList docs = (DocList)colours.get("doclist"); assertEquals(docs.size(), 2); DocIterator it = docs.iterator(); assertTrue(it.hasNext()); assertEquals(1, it.nextDoc()); assertTrue(it.hasNext()); assertEquals(3, it.nextDoc()); assertFalse(it.hasNext()); }
Example 2
Source File: HeatmapJsonFacet.java From lucene-solr with Apache License 2.0 | 6 votes |
@SuppressWarnings({"unchecked"}) public HeatmapJsonFacet(NamedList<Object> heatmapNL) { gridLevel = (int) heatmapNL.get("gridLevel"); columns = (int) heatmapNL.get("columns"); rows = (int) heatmapNL.get("rows"); minX = (double) heatmapNL.get("minX"); maxX = (double) heatmapNL.get("maxX"); minY = (double) heatmapNL.get("minY"); maxY = (double) heatmapNL.get("maxY"); log.debug("Rows is: {}", rows); log.debug("Cols is {}", columns); log.debug("Whole deal is: {}", heatmapNL); if (heatmapNL.get("counts_ints2D") == null) { countEncodedAsBase64PNG = (String) heatmapNL.get("counts_png"); } else { countGrid = (List<List<Integer>>) heatmapNL.get("counts_ints2D"); } }
Example 3
Source File: SpatialClusteringComponent.java From solr-spatial-clustering with Apache License 2.0 | 6 votes |
private static int getIntArgument(NamedList<?> values, String name, int minValue, int defaultValue) { Object value = values.get(name); if (value == null) { return defaultValue; } if (!(value instanceof Number)) { throw new IllegalArgumentException("Value for parameter '" + name + "' must be a number."); } int result = ((Number) value).intValue(); if (result < minValue) { throw new IllegalArgumentException( "Value for parameter '" + name + "' must be at least " + minValue + ", but it was " + result + "."); } return result; }
Example 4
Source File: TestUtilizeNode.java From lucene-solr with Apache License 2.0 | 6 votes |
@BeforeClass public static void setupCluster() throws Exception { configureCluster(3) .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) .configure(); NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus()); JettySolrRunner overseerJetty = null; String overseerLeader = (String) overSeerStatus.get("leader"); for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { JettySolrRunner jetty = cluster.getJettySolrRunner(i); if (jetty.getNodeName().equals(overseerLeader)) { overseerJetty = jetty; break; } } if (overseerJetty == null) { fail("no overseer leader!"); } }
Example 5
Source File: TestInitParams.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testElevateExample(){ SolrRequestHandler handler = h.getCore().getRequestHandler("/elevate"); SolrQueryResponse rsp = new SolrQueryResponse(); handler.handleRequest(req("initArgs", "true"), rsp); @SuppressWarnings({"rawtypes"}) NamedList nl = (NamedList) rsp.getValues().get("initArgs"); @SuppressWarnings({"rawtypes"}) NamedList def = (NamedList) nl.get(PluginInfo.DEFAULTS); assertEquals("text" ,def.get("df")); }
Example 6
Source File: IndexBasedSpellChecker.java From lucene-solr with Apache License 2.0 | 5 votes |
@Override public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { super.init(config, core); threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f : (Float) config.get(THRESHOLD_TOKEN_FREQUENCY); initSourceReader(); return name; }
Example 7
Source File: SchemaResponse.java From lucene-solr with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private static AnalyzerDefinition createAnalyzerDefinition(NamedList<Object> analyzerNamedList) { AnalyzerDefinition analyzerDefinition = new AnalyzerDefinition(); Map<String, Object> analyzerAttributes = extractAttributeMap(analyzerNamedList); analyzerDefinition.setAttributes(analyzerAttributes); List<NamedList<Object>> charFiltersList = (List<NamedList<Object>>) analyzerNamedList.get("charFilters"); if (charFiltersList != null) { List<Map<String, Object>> charFiltersAttributesList = new LinkedList<>(); for (NamedList<Object> charFilterNamedList : charFiltersList) { Map<String, Object> charFilterAttributes = extractAttributeMap(charFilterNamedList); charFiltersAttributesList.add(charFilterAttributes); } analyzerDefinition.setCharFilters(charFiltersAttributesList); } NamedList<Object> tokenizerNamedList = (NamedList<Object>) analyzerNamedList.get("tokenizer"); if (tokenizerNamedList != null) { Map<String, Object> tokenizerAttributes = extractAttributeMap(tokenizerNamedList); analyzerDefinition.setTokenizer(tokenizerAttributes); } List<NamedList<Object>> filtersList = (List<NamedList<Object>>) analyzerNamedList.get("filters"); List<Map<String, Object>> filtersAttributesList = new LinkedList<>(); if (filtersList != null) { for (NamedList<Object> filterNamedList : filtersList) { Map<String, Object> filterAttributes = extractAttributeMap(filterNamedList); filtersAttributesList.add(filterAttributes); } analyzerDefinition.setFilters(filtersAttributesList); } return analyzerDefinition; }
Example 8
Source File: QuerySegmenterConfig.java From query-segmenter with Apache License 2.0 | 5 votes |
@SuppressWarnings("rawtypes") public QuerySegmenterConfig(NamedList args) { segmenter = new QuerySegmenterDefaultImpl(); NamedList segments = (NamedList) args.get(INIT_ATTR_SEGMENTS); for (int i = 0; i < segments.size(); i++) { String name = segments.getName(i); NamedList values = (NamedList) segments.getVal(i); initSegmentType(name, values); } }
Example 9
Source File: AsyncBuildSuggestComponent.java From SearchServices with GNU Lesser General Public License v3.0 | 5 votes |
/** * Used in Distributed Search, merges the suggestion results from every shard * */ @Override public void finishStage(ResponseBuilder rb) { SolrParams params = rb.req.getParams(); LOG.debug("SuggestComponent finishStage with : " + params); if (!params.getBool(COMPONENT_NAME, false) || rb.stage != ResponseBuilder.STAGE_GET_FIELDS) return; int count = params.getInt(SUGGEST_COUNT, 1); List<SuggesterResult> suggesterResults = new ArrayList<>(); // Collect Shard responses for (ShardRequest sreq : rb.finished) { for (ShardResponse srsp : sreq.responses) { NamedList<Object> resp; if((resp = srsp.getSolrResponse().getResponse()) != null) { @SuppressWarnings("unchecked") Map<String, SimpleOrderedMap<NamedList<Object>>> namedList = (Map<String, SimpleOrderedMap<NamedList<Object>>>) resp.get(SuggesterResultLabels.SUGGEST); LOG.debug(srsp.getShard() + " : " + namedList); suggesterResults.add(toSuggesterResult(namedList)); } } } // Merge Shard responses SuggesterResult suggesterResult = merge(suggesterResults, count); Map<String, SimpleOrderedMap<NamedList<Object>>> namedListResults = new HashMap<>(); toNamedList(suggesterResult, namedListResults); rb.rsp.add(SuggesterResultLabels.SUGGEST, namedListResults); }
Example 10
Source File: TopGroupsResultTransformer.java From lucene-solr with Apache License 2.0 | 5 votes |
protected ScoreDoc[] transformToNativeShardDoc(List<NamedList<Object>> documents, Sort groupSort, String shard, IndexSchema schema) { ScoreDoc[] scoreDocs = new ScoreDoc[documents.size()]; int j = 0; for (NamedList<Object> document : documents) { Object docId = document.get(ID); if (docId != null) { docId = docId.toString(); } else { log.error("doc {} has null 'id'", document); } Float score = (Float) document.get("score"); if (score == null) { score = Float.NaN; } Object[] sortValues = null; Object sortValuesVal = document.get("sortValues"); if (sortValuesVal != null) { sortValues = ((List) sortValuesVal).toArray(); for (int k = 0; k < sortValues.length; k++) { SchemaField field = groupSort.getSort()[k].getField() != null ? schema.getFieldOrNull(groupSort.getSort()[k].getField()) : null; sortValues[k] = ShardResultTransformerUtils.unmarshalSortValue(sortValues[k], field); } } else { log.debug("doc {} has null 'sortValues'", document); } scoreDocs[j++] = new ShardDoc(score, sortValues, docId, shard); } return scoreDocs; }
Example 11
Source File: AlfrescoSpellCheckComponent.java From SearchServices with GNU Lesser General Public License v3.0 | 4 votes |
/** * <b>Disclaimer</b>: The code copied from the super class ( * {@link org.apache.solr.handler.component.SpellCheckComponent}) but only * replaced the collator with the AlfrescoSpellCheckCollator */ @SuppressWarnings({ "unchecked", "rawtypes" }) @Override protected void addCollationsToResponse(SolrParams params, SpellingResult spellingResult, ResponseBuilder rb, String q, NamedList response, boolean suggestionsMayOverlap) { int maxCollations = params.getInt(SPELLCHECK_MAX_COLLATIONS, 1); int maxCollationTries = params.getInt(SPELLCHECK_MAX_COLLATION_TRIES, 0); int maxCollationEvaluations = params.getInt(SPELLCHECK_MAX_COLLATION_EVALUATIONS, 10000); boolean collationExtendedResults = params.getBool(SPELLCHECK_COLLATE_EXTENDED_RESULTS, false); int maxCollationCollectDocs = params.getInt(SPELLCHECK_COLLATE_MAX_COLLECT_DOCS, 0); // If not reporting hits counts, don't bother collecting more than 1 document per try. if (!collationExtendedResults) { maxCollationCollectDocs = 1; } boolean shard = params.getBool(ShardParams.IS_SHARD, false); AlfrescoSpellCheckCollator collator = new AlfrescoSpellCheckCollator(); collator.setMaxCollations(maxCollations); collator.setMaxCollationTries(maxCollationTries); collator.setMaxCollationEvaluations(maxCollationEvaluations); collator.setSuggestionsMayOverlap(suggestionsMayOverlap); collator.setDocCollectionLimit(maxCollationCollectDocs); List<AlfrescoSpellCheckCollation> collations = collator.collate(spellingResult, q, rb); // by sorting here we guarantee a non-distributed request returns all // results in the same order as a distributed request would, // even in cases when the internal rank is the same. Collections.sort(collations); NamedList collationList = new NamedList(); for (AlfrescoSpellCheckCollation collation : collations) { if (collationExtendedResults) { NamedList extendedResult = new SimpleOrderedMap(); extendedResult.add("collationQuery", collation.getCollationQuery()); extendedResult.add("hits", collation.getHits()); extendedResult.add("misspellingsAndCorrections", collation.getMisspellingsAndCorrections()); if (maxCollationTries > 0 && shard) { extendedResult.add("collationInternalRank", collation.getInternalRank()); } extendedResult.add("collationQueryString", collation.getCollationQueryString()); collationList.add("collation", extendedResult); } else { collationList.add("collation", collation.getCollationQuery()); if (maxCollationTries > 0 && shard) { collationList.add("collationInternalRank", collation.getInternalRank()); } } } //Support Solr 4 output format NamedList suggestions = (NamedList)response.get("suggestions"); suggestions.addAll(collationList); //Support Solr distributed merge format. response.add("collations", collationList); }
Example 12
Source File: CarrotClusteringEngineTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") private List<String> getLabels(NamedList<Object> cluster) { return (List<String>) cluster.get("labels"); }
Example 13
Source File: JaegerTracerConfigurator.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public void init(@SuppressWarnings({"rawtypes"})NamedList args) { Object host = args.get(AGENT_HOST); if (!(host instanceof String)) { throw new IllegalArgumentException("Expected a required string for param '" + AGENT_HOST + "'"); } Object portArg = args.get(AGENT_PORT); if (!(portArg instanceof Integer)) { throw new IllegalArgumentException("Expected a required int for param '" + AGENT_PORT + "'"); } int port = (Integer) portArg; Boolean logSpans = args.getBooleanArg(LOG_SPANS); if (logSpans == null) logSpans = true; Object flushIntervalArg = args.get(FLUSH_INTERVAL); if (flushIntervalArg != null && !(flushIntervalArg instanceof Integer)) { throw new IllegalArgumentException("Expected a required int for param '" + FLUSH_INTERVAL +"'"); } int flushInterval = flushIntervalArg == null ? 1000 : (Integer) flushIntervalArg; Object maxQueueArgs = args.get(MAX_QUEUE_SIZE); if (maxQueueArgs != null && !(maxQueueArgs instanceof Integer)) { throw new IllegalArgumentException("Expected a required int for param '" + MAX_QUEUE_SIZE +"'"); } int maxQueue = maxQueueArgs == null ? 10000 : (Integer) maxQueueArgs; Configuration.SamplerConfiguration samplerConfig = new Configuration.SamplerConfiguration() .withType(ConstSampler.TYPE) .withParam(1); Configuration.ReporterConfiguration reporterConfig = Configuration.ReporterConfiguration.fromEnv(); Configuration.SenderConfiguration senderConfig = reporterConfig.getSenderConfiguration() .withAgentHost(host.toString()) .withAgentPort(port); reporterConfig.withLogSpans(logSpans) .withFlushInterval(flushInterval) .withMaxQueueSize(maxQueue) .withSender(senderConfig); tracer = new Configuration("solr") .withSampler(samplerConfig) .withReporter(reporterConfig) .getTracer(); }
Example 14
Source File: CdcrRequestHandlerTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test @ShardsFixed(num = 2) public void testCheckpointActions() throws Exception { // initial request on an empty index, must return -1 @SuppressWarnings({"rawtypes"}) NamedList rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); assertEquals(-1l, rsp.get(CdcrParams.CHECKPOINT)); index(SOURCE_COLLECTION, getDoc(id, "a","test_i_dvo",10)); // shard 2 // only one document indexed in shard 2, the checkpoint must be still -1 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); assertEquals(-1l, rsp.get(CdcrParams.CHECKPOINT)); index(SOURCE_COLLECTION, getDoc(id, "b")); // shard 1 // a second document indexed in shard 1, the checkpoint must come from shard 2 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); long checkpoint1 = (Long) rsp.get(CdcrParams.CHECKPOINT); long expected = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertEquals(expected, checkpoint1); index(SOURCE_COLLECTION, getDoc(id, "c")); // shard 1 // a third document indexed in shard 1, the checkpoint must still come from shard 2 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); assertEquals(checkpoint1, rsp.get(CdcrParams.CHECKPOINT)); index(SOURCE_COLLECTION, getDoc(id, "d")); // shard 2 // a fourth document indexed in shard 2, the checkpoint must come from shard 1 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); long checkpoint2 = (Long) rsp.get(CdcrParams.CHECKPOINT); expected = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertEquals(expected, checkpoint2); // send a delete by id long pre_op = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); deleteById(SOURCE_COLLECTION, Arrays.asList(new String[]{"c"})); //shard1 // document deleted in shard1, checkpoint should come from shard2 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); long checkpoint3 = (Long) rsp.get(CdcrParams.CHECKPOINT); expected = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertEquals(pre_op, expected); assertEquals(expected, checkpoint3); // send a in-place update SolrInputDocument in_place_doc = new SolrInputDocument(); in_place_doc.setField(id, "a"); in_place_doc.setField("test_i_dvo", ImmutableMap.of("inc", 10)); //shard2 index(SOURCE_COLLECTION, in_place_doc); // document updated in shard2, checkpoint should come from shard1 rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); long checkpoint4 = (Long) rsp.get(CdcrParams.CHECKPOINT); expected = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertEquals(expected, checkpoint4); // send a delete by query deleteByQuery(SOURCE_COLLECTION, "*:*"); // all the checkpoints must come from the DBQ rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.COLLECTIONCHECKPOINT); long checkpoint5= (Long) rsp.get(CdcrParams.CHECKPOINT); assertTrue(checkpoint5 > 0); // ensure that checkpoints from deletes are in absolute form checkpoint5 = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertTrue(checkpoint5 > 0); // ensure that checkpoints from deletes are in absolute form checkpoint5 = (Long) invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.SHARDCHECKPOINT).get(CdcrParams.CHECKPOINT); assertTrue(checkpoint5 > 0); // ensure that checkpoints from deletes are in absolute form // replication never started, lastProcessedVersion should be -1 for both shards rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD1), CdcrParams.CdcrAction.LASTPROCESSEDVERSION); long lastVersion = (Long) rsp.get(CdcrParams.LAST_PROCESSED_VERSION); assertEquals(-1l, lastVersion); rsp = invokeCdcrAction(shardToLeaderJetty.get(SOURCE_COLLECTION).get(SHARD2), CdcrParams.CdcrAction.LASTPROCESSEDVERSION); lastVersion = (Long) rsp.get(CdcrParams.LAST_PROCESSED_VERSION); assertEquals(-1l, lastVersion); }
Example 15
Source File: MtasSolrTestDistributedSearchConsistency.java From mtas with Apache License 2.0 | 4 votes |
/** * Creates the collection assertions. * * @param create * the create * @param collection * the collection * @param id * the id * @param version * the version * @param size * the size * @param shards * the shards */ private static void createCollectionAssertions(NamedList<Object> create, String collection, String id, String version, Number size, int shards) { assertFalse(collection + ": create - not found", create == null); assertTrue(collection + ": create - no valid version", create.get("id") != null && create.get("id") instanceof String); assertTrue(collection + ": create - id incorrect, '" + id + "' not equal to '" + create.get("id") + "'", ((String) create.get("id")).equals(id)); assertTrue(collection + ": create - no valid version", create.get("version") != null && create.get("version") instanceof String); if (version != null) { assertTrue( collection + ": create - version incorrect, '" + version + "' not equal to '" + create.get("version") + "'", ((String) create.get("version")).equals(version)); } assertTrue(collection + ": create - no valid size", create.get("size") != null && create.get("size") instanceof Number); Number createSize = (Number) create.get("size"); assertEquals(collection + ": number of values", size.longValue(), createSize.longValue()); if (shards > 0) { assertTrue("no (valid) shards", create.get("shards") != null && create.get("shards") instanceof List && ((List) create.get("shards")).size() == shards); for (Object shardItem : (List<Object>) create.get("shards")) { assertTrue(collection + ": invalid shardItem", shardItem instanceof NamedList); Object sizeRaw = ((NamedList<Object>) shardItem).get("size"); assertTrue(collection + ": incorrect size", sizeRaw != null && sizeRaw instanceof Number && ((Number) sizeRaw).longValue() == createSize.longValue()); } } else { assertFalse(collection + ": shards found : " + create.get("shards"), create.get("shards") != null && create.get("shards") instanceof List && !((List) create.get("shards")).isEmpty()); } }
Example 16
Source File: TestReplicationHandler.java From lucene-solr with Apache License 2.0 | 4 votes |
private NamedList<Object> getDetails(SolrClient s) throws Exception { ModifiableSolrParams params = new ModifiableSolrParams(); params.set("command","details"); params.set("_trace","getDetails"); params.set("qt",ReplicationHandler.PATH); QueryRequest req = new QueryRequest(params); NamedList<Object> res = s.request(req); assertReplicationResponseSucceeded(res); @SuppressWarnings("unchecked") NamedList<Object> details = (NamedList<Object>) res.get("details"); assertNotNull("null details", details); return details; }
Example 17
Source File: CurrencyRangeFacetCloudTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testJsonRangeFacetAsSubFacet() throws Exception { // limit=1, overrequest=1, with refinement enabled // filter out the first 5 docs (by id), which should ensure that 'x2' is the top bucket overall... // ...except in some rare sharding cases where it doesn't make it into the top 2 terms. // // So the filter also explicitly accepts all 'x2' docs -- ensuring we have enough matches containing that term for it // to be enough of a candidate in phase#1, but for many shard arrangements it won't be returned by all shards resulting // in refinement being neccessary to get the x_s:x2 sub-shard ranges from shard(s) where x_s:x2 is only tied for the // (shard local) top term count and would lose the (index order) tie breaker with x_s:x0 or x_s:x1 final String filter = "id_i1:["+VALUES.size()+" TO *] OR x_s:x2"; // the *facet* results should be the same regardless of wether we filter via fq, or using a domain filter on the top facet for (boolean use_domain : Arrays.asList(true, false)) { final String domain = use_domain ? "domain: { filter:'" + filter + "'}," : ""; final SolrQuery solrQuery = new SolrQuery("q", (use_domain ? "*:*" : filter), "rows", "0", "json.facet", "{ foo:{ type:terms, field:x_s, refine:true, limit:1, overrequest:1, " + domain + " facet: { bar:{ type:range, field:"+FIELD+", other:all, " + " start:'8,EUR', gap:'2,EUR', end:'22,EUR' }} } }"); final QueryResponse rsp = cluster.getSolrClient().query(solrQuery); try { // this top level result count sanity check that should vary based on how we are filtering our facets... assertEquals(use_domain ? 15 : 11, rsp.getResults().getNumFound()); @SuppressWarnings({"unchecked"}) final NamedList<Object> foo = ((NamedList<NamedList<Object>>)rsp.getResponse().get("facets")).get("foo"); // sanity check... // because of the facet limit, foo should only have 1 bucket // because of the fq, the val should be "x2" and the count=5 @SuppressWarnings({"unchecked"}) final List<NamedList<Object>> foo_buckets = (List<NamedList<Object>>) foo.get("buckets"); assertEquals(1, foo_buckets.size()); assertEquals("x2", foo_buckets.get(0).get("val")); assertEquals("foo bucket count", 5L, foo_buckets.get(0).get("count")); @SuppressWarnings({"unchecked"}) final NamedList<Object> bar = (NamedList<Object>)foo_buckets.get(0).get("bar"); // these are the 'x2' specific counts, based on our fq... assertEquals("before", 2L, ((NamedList)bar.get("before")).get("count")); assertEquals("after", 1L, ((NamedList)bar.get("after")).get("count")); assertEquals("between", 2L, ((NamedList)bar.get("between")).get("count")); @SuppressWarnings({"unchecked", "rawtypes"}) final List<NamedList> buckets = (List<NamedList>) bar.get("buckets"); assertEquals(7, buckets.size()); for (int i = 0; i < 7; i++) { @SuppressWarnings({"rawtypes"}) NamedList bucket = buckets.get(i); assertEquals((8 + (i * 2)) + ".00,EUR", bucket.get("val")); // 12,EUR & 15,EUR are the 2 values that align with x2 docs assertEquals("bucket #" + i, (i == 2 || i == 3) ? 1L : 0L, bucket.get("count")); } } catch (AssertionError|RuntimeException ae) { throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae); } } }
Example 18
Source File: FileBasedSpellChecker.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public String init(@SuppressWarnings({"rawtypes"})NamedList config, SolrCore core) { super.init(config, core); characterEncoding = (String) config.get(SOURCE_FILE_CHAR_ENCODING); return name; }
Example 19
Source File: TestReplicationHandler.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void doTestIndexAndConfigReplication() throws Exception { TestInjection.delayBeforeSlaveCommitRefresh = random().nextInt(10); clearIndexWithReplication(); nDocs--; for (int i = 0; i < nDocs; i++) index(masterClient, "id", i, "name", "name = " + i); masterClient.commit(); @SuppressWarnings({"rawtypes"}) NamedList masterQueryRsp = rQuery(nDocs, "*:*", masterClient); SolrDocumentList masterQueryResult = (SolrDocumentList) masterQueryRsp.get("response"); assertEquals(nDocs, numFound(masterQueryRsp)); //get docs from slave and check if number is equal to master @SuppressWarnings({"rawtypes"}) NamedList slaveQueryRsp = rQuery(nDocs, "*:*", slaveClient); SolrDocumentList slaveQueryResult = (SolrDocumentList) slaveQueryRsp.get("response"); assertEquals(nDocs, numFound(slaveQueryRsp)); //compare results String cmp = BaseDistributedSearchTestCase.compare(masterQueryResult, slaveQueryResult, 0, null); assertEquals(null, cmp); assertVersions(masterClient, slaveClient); //start config files replication test masterClient.deleteByQuery("*:*"); masterClient.commit(); //change the schema on master master.copyConfigFile(CONF_DIR + "schema-replication2.xml", "schema.xml"); masterJetty.stop(); masterJetty = createAndStartJetty(master); masterClient.close(); masterClient = createNewSolrClient(masterJetty.getLocalPort()); slave.setTestPort(masterJetty.getLocalPort()); slave.copyConfigFile(slave.getSolrConfigFile(), "solrconfig.xml"); slaveJetty.stop(); // setup an xslt dir to force subdir file replication File masterXsltDir = new File(master.getConfDir() + File.separator + "xslt"); File masterXsl = new File(masterXsltDir, "dummy.xsl"); assertTrue("could not make dir " + masterXsltDir, masterXsltDir.mkdirs()); assertTrue(masterXsl.createNewFile()); File slaveXsltDir = new File(slave.getConfDir() + File.separator + "xslt"); File slaveXsl = new File(slaveXsltDir, "dummy.xsl"); assertFalse(slaveXsltDir.exists()); slaveJetty = createAndStartJetty(slave); slaveClient.close(); slaveClient = createNewSolrClient(slaveJetty.getLocalPort()); //add a doc with new field and commit on master to trigger index fetch from slave. index(masterClient, "id", "2000", "name", "name = " + 2000, "newname", "newname = " + 2000); masterClient.commit(); assertEquals(1, numFound( rQuery(1, "*:*", masterClient))); slaveQueryRsp = rQuery(1, "*:*", slaveClient); assertVersions(masterClient, slaveClient); SolrDocument d = ((SolrDocumentList) slaveQueryRsp.get("response")).get(0); assertEquals("newname = 2000", (String) d.getFieldValue("newname")); assertTrue(slaveXsltDir.isDirectory()); assertTrue(slaveXsl.exists()); checkForSingleIndex(masterJetty); checkForSingleIndex(slaveJetty, true); }
Example 20
Source File: ClusteringEngine.java From lucene-solr with Apache License 2.0 | 4 votes |
public String init(NamedList<?> config, SolrCore core) { name = (String) config.get(ENGINE_NAME); return name; }