Java Code Examples for org.apache.solr.client.solrj.SolrQuery#setQuery()
The following examples show how to use
org.apache.solr.client.solrj.SolrQuery#setQuery() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestFeatureExtractionFromMultipleSegments.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testFeatureExtractionFromMultipleSegments() throws Exception { final SolrQuery query = new SolrQuery(); query.setQuery("{!edismax qf='description^1' boost='sum(product(pow(normHits, 0.7), 1600), .1)' v='apple'}"); // request 100 rows, if any rows are fetched from the second or subsequent segments the tests should succeed if LTRRescorer::extractFeaturesInfo() advances the doc iterator properly int numRows = 100; query.add("rows", Integer.toString(numRows)); query.add("wt", "json"); query.add("fq", "popularity:201"); query.add("fl", "*, score,id,normHits,description,fv:[features store='feature-store-6' format='dense' efi.user_text='apple']"); String res = restTestHarness.query("/query" + query.toQueryString()); @SuppressWarnings({"unchecked"}) Map<String,Object> resultJson = (Map<String,Object>) Utils.fromJSONString(res); @SuppressWarnings({"unchecked"}) List<Map<String,Object>> docs = (List<Map<String,Object>>)((Map<String,Object>)resultJson.get("response")).get("docs"); int passCount = 0; for (final Map<String,Object> doc : docs) { String features = (String)doc.get("fv"); assert(features.length() > 0); ++passCount; } assert(passCount == numRows); }
Example 2
Source File: SearchService.java From DataHubSystem with GNU Affero General Public License v3.0 | 6 votes |
/** * Search. * <p> * Set `start` and `rows` values in the SolrQuery parameter to paginate the results.<br> * <strong>If no `rows` have been set, solr will only return 10 documents, no more.</strong> * <p> * To get the total number of document matching the given query, use {@code res.getNumFound()}. * * @param query a SolrQuery with at least a 'q' parameter set. * @return A list of solr document matching the given query. */ @PreAuthorize("hasRole('ROLE_SEARCH')") public SolrDocumentList search(SolrQuery query) { Objects.requireNonNull(query); query.setQuery(solrDao.updateQuery(query.getQuery())); try { return solrDao.search(query).getResults(); } catch (SolrServerException | IOException ex) { LOGGER.error(ex); throw new DHusSearchException("An exception occured while searching", ex); } }
Example 3
Source File: TestUserTermScorereQDF.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testUserTermScorerQWithDF() throws Exception { // before(); loadFeature("matchedTitleDF", SolrFeature.class.getName(), "{\"q\":\"w5\",\"df\":\"title\"}"); loadModel("Term-matchedTitleDF", LinearModel.class.getName(), new String[] {"matchedTitleDF"}, "{\"weights\":{\"matchedTitleDF\":1.0}}"); final SolrQuery query = new SolrQuery(); query.setQuery("title:w1"); query.add("fl", "*, score"); query.add("rows", "2"); query.add("rq", "{!ltr model=Term-matchedTitleDF reRankDocs=4}"); query.set("debugQuery", "on"); assertJQ("/query" + query.toQueryString(), "/response/numFound/==4"); assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='7'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0"); }
Example 4
Source File: MetadataRequestQueryConverter.java From ambari-logsearch with Apache License 2.0 | 6 votes |
@Override public SolrQuery convert(MetadataRequest metadataRequest) { SolrQuery metadataQuery = new SolrQuery(); metadataQuery.setQuery("*:*"); metadataQuery.addFilterQuery(String.format("%s:%s", TYPE, metadataRequest.getType())); if (StringUtils.isNotBlank(metadataRequest.getName())) { metadataQuery.addFilterQuery(String.format("%s:%s", NAME, metadataRequest.getName())); } SolrQuery.SortClause sortOrder = SolrQuery.SortClause.create(NAME, SolrQuery.ORDER.asc); List<SolrQuery.SortClause> sort = new ArrayList<>(); sort.add(sortOrder); metadataQuery.setRows(10000); metadataQuery.setSorts(sort); SolrUtil.addListFilterToSolrQuery(metadataQuery, CLUSTER + "_string", metadataRequest.getClusters()); return metadataQuery; }
Example 5
Source File: MCRRestAPIClassifications.java From mycore with GNU General Public License v3.0 | 6 votes |
private void filterNonEmpty(String classId, Element e) { SolrClient solrClient = MCRSolrClientFactory.getMainSolrClient(); Element[] categories = e.getChildren("category").toArray(Element[]::new); for (Element cat : categories) { SolrQuery solrQquery = new SolrQuery(); solrQquery.setQuery( "category:\"" + MCRSolrUtils.escapeSearchValue(classId + ":" + cat.getAttributeValue("ID")) + "\""); solrQquery.setRows(0); try { QueryResponse response = solrClient.query(solrQquery); SolrDocumentList solrResults = response.getResults(); if (solrResults.getNumFound() == 0) { cat.detach(); } else { filterNonEmpty(classId, cat); } } catch (SolrServerException | IOException exc) { LOGGER.error(exc); } } }
Example 6
Source File: ItemSearchServiceLiveTest.java From tutorials with MIT License | 6 votes |
@Test public void whenSearchingWithHitHighlighting_thenKeywordsShouldBeHighlighted() throws Exception { itemSearchService.index("hm0001", "Brand1 Washing Machine", "Home Appliances", 100f); itemSearchService.index("hm0002", "Brand1 Refrigerator", "Home Appliances", 300f); itemSearchService.index("hm0003", "Brand2 Ceiling Fan", "Home Appliances", 200f); itemSearchService.index("hm0004", "Brand2 Dishwasher", "Washing equipments", 250f); SolrQuery query = new SolrQuery(); query.setQuery("Appliances"); query.setHighlight(true); query.addHighlightField("category"); query.setHighlightSimplePre("<strong>"); query.setHighlightSimplePost("</strong>"); QueryResponse response = solrClient.query(query); Map<String, Map<String, List<String>>> hitHighlightedMap = response.getHighlighting(); Map<String, List<String>> highlightedFieldMap = hitHighlightedMap.get("hm0001"); List<String> highlightedList = highlightedFieldMap.get("category"); String highLightedText = highlightedList.get(0); assertEquals("Home <strong>Appliances</strong>", highLightedText); }
Example 7
Source File: TestMultipleAdditiveTreesModel.java From lucene-solr with Apache License 2.0 | 5 votes |
private void doTestMultipleAdditiveTreesExplain() throws Exception { final SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.add("fl", "*,score,[fv]"); query.add("rows", "3"); query.add("rq", "{!ltr reRankDocs=3 model=multipleadditivetreesmodel efi.user_query=w3}"); // test out the explain feature, make sure it returns something query.setParam("debugQuery", "on"); String qryResult = JQ("/query" + query.toQueryString()); qryResult = qryResult.replaceAll("\n", " "); assertThat(qryResult, containsString("\"debug\":{")); qryResult = qryResult.substring(qryResult.indexOf("debug")); assertThat(qryResult, containsString("\"explain\":{")); qryResult = qryResult.substring(qryResult.indexOf("explain")); assertThat(qryResult, containsString("multipleadditivetreesmodel")); assertThat(qryResult, containsString(MultipleAdditiveTreesModel.class.getSimpleName())); assertThat(qryResult, containsString("-100.0 = tree 0")); assertThat(qryResult, containsString("50.0 = tree 0")); assertThat(qryResult, containsString("-20.0 = tree 1")); assertThat(qryResult, containsString("'matchedTitle':1.0 > 0.5")); assertThat(qryResult, containsString("'matchedTitle':0.0 <= 0.5")); assertThat(qryResult, containsString(" Go Right ")); assertThat(qryResult, containsString(" Go Left ")); }
Example 8
Source File: IndexSchemaRuntimeFieldTest.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void testRuntimeFieldCreation() { // any field manipulation needs to happen when you know the core will not // be accepting any requests. Typically this is done within the inform() // method. Since this is a single threaded test, we can change the fields // willi-nilly SolrCore core = h.getCore(); IndexSchema schema = core.getLatestSchema(); final String fieldName = "runtimefield"; SchemaField sf = new SchemaField( fieldName, schema.getFieldTypes().get( "string" ) ); schema.getFields().put( fieldName, sf ); // also register a new copy field (from our new field) schema.registerCopyField( fieldName, "dynamic_runtime" ); schema.refreshAnalyzers(); assertU(adoc("id", "10", "title", "test", fieldName, "aaa")); assertU(commit()); SolrQuery query = new SolrQuery( fieldName+":aaa" ); query.set( "indent", "true" ); SolrQueryRequest req = new LocalSolrQueryRequest( core, query ); assertQ("Make sure they got in", req ,"//*[@numFound='1']" ,"//result/doc[1]/str[@name='id'][.='10']" ); // Check to see if our copy field made it out safely query.setQuery( "dynamic_runtime:aaa" ); assertQ("Make sure they got in", req ,"//*[@numFound='1']" ,"//result/doc[1]/str[@name='id'][.='10']" ); clearIndex(); }
Example 9
Source File: SolrSteps.java From ambari-logsearch with Apache License 2.0 | 5 votes |
@Then("the number of <component> docs is: <docSize>") public void numberOfDocsForComponent(@Named("component") String component, @Named("docSize") int docSize) throws IOException, SolrServerException, InterruptedException { SolrClient solrClient = StoryDataRegistry.INSTANCE.getSolrClient(); SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(String.format("type:%s", component)); solrQuery.setStart(0); solrQuery.setRows(20); QueryResponse queryResponse = solrClient.query(solrQuery); SolrDocumentList list = queryResponse.getResults(); Assert.assertEquals(docSize, list.size()); }
Example 10
Source File: ItemSearchServiceLiveTest.java From tutorials with MIT License | 5 votes |
@Test public void whenSearchingWithFacetQuery_thenAllMatchingFacetsShouldAvialble() throws Exception { itemSearchService.index("hm0001", "Brand1 Washing Machine", "CategoryA", 100f); itemSearchService.index("hm0002", "Brand1 Refrigerator", "CategoryA", 300f); itemSearchService.index("hm0003", "Brand2 Ceiling Fan", "CategoryB", 200f); itemSearchService.index("hm0004", "Brand2 Dishwasher", "CategoryB", 250f); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.addFacetQuery("Washing OR Refrigerator"); query.addFacetQuery("Brand2"); QueryResponse response = solrClient.query(query); Map<String, Integer> facetQueryMap = response.getFacetQuery(); assertEquals(2, facetQueryMap.size()); for (Map.Entry<String, Integer> entry : facetQueryMap.entrySet()) { String facetQuery = entry.getKey(); if ("Washing OR Refrigerator".equals(facetQuery)) { assertEquals(Integer.valueOf(2), entry.getValue()); } else if ("Brand2".equals(facetQuery)) { assertEquals(Integer.valueOf(2), entry.getValue()); } else { fail("unexpected query"); } } }
Example 11
Source File: ServiceLogsManager.java From ambari-logsearch with Apache License 2.0 | 5 votes |
private <T extends LogData> GroupListResponse getFields(String field, String clusters, Class<T> clazz) { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("*:*"); SolrUtil.addListFilterToSolrQuery(solrQuery, CLUSTER, clusters); GroupListResponse collection = new GroupListResponse(); SolrUtil.setFacetField(solrQuery, field); SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX); QueryResponse response = serviceLogsSolrDao.process(solrQuery); if (response == null) { return collection; } FacetField facetField = response.getFacetField(field); if (facetField == null) { return collection; } List<Count> fieldList = facetField.getValues(); if (fieldList == null) { return collection; } SolrDocumentList docList = response.getResults(); if (docList == null) { return collection; } List<LogData> groupList = new ArrayList<>(getLogDataListByFieldType(clazz, response, fieldList)); collection.setGroupList(groupList); if (!docList.isEmpty()) { collection.setStartIndex((int) docList.getStart()); collection.setTotalCount(docList.getNumFound()); } return collection; }
Example 12
Source File: ServiceLogsManager.java From ambari-logsearch with Apache License 2.0 | 5 votes |
public GraphDataListResponse getAggregatedInfo(ServiceLogAggregatedInfoRequest request) { SimpleQuery solrDataQuery = new BaseServiceLogRequestQueryConverter().convert(request); SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(solrDataQuery); String hierarchy = String.format("%s,%s,%s", HOST, COMPONENT, LEVEL); solrQuery.setQuery("*:*"); SolrUtil.setFacetPivot(solrQuery, 1, hierarchy); QueryResponse response = serviceLogsSolrDao.process(solrQuery); return responseDataGenerator.generateSimpleGraphResponse(response, hierarchy); }
Example 13
Source File: TestLTRQParserExplain.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void multipleAdditiveTreesScoreExplainMissingEfiFeatureShouldReturnDefaultScore() throws Exception { loadFeatures("external_features_for_sparse_processing.json"); loadModels("multipleadditivetreesmodel_external_binary_features.json"); SolrQuery query = new SolrQuery(); query.setQuery("title:bloomberg"); query.setParam("debugQuery", "on"); query.add("rows", "4"); query.add("rq", "{!ltr reRankDocs=4 model=external_model_binary_feature efi.user_device_tablet=1}"); query.add("fl", "*,score"); final String tree1 = "(weight=1.0,root=(feature=user_device_smartphone,threshold=0.5,left=0.0,right=50.0))"; final String tree2 = "(weight=1.0,root=(feature=user_device_tablet,threshold=0.5,left=0.0,right=65.0))"; final String trees = "["+tree1+","+tree2+"]"; query.add("wt", "json"); assertJQ( "/query" + query.toQueryString(), "/debug/explain/7=='\n" + "65.0 = MultipleAdditiveTreesModel(name=external_model_binary_feature,trees="+trees+") model applied to features, sum of:\n" + " 0.0 = tree 0 | \\'user_device_smartphone\\':0.0 <= 0.500001, Go Left | val: 0.0\n" + " 65.0 = tree 1 | \\'user_device_tablet\\':1.0 > 0.500001, Go Right | val: 65.0\n'}"); assertJQ( "/query" + query.toQueryString(), "/debug/explain/9=='\n" + "65.0 = MultipleAdditiveTreesModel(name=external_model_binary_feature,trees="+trees+") model applied to features, sum of:\n" + " 0.0 = tree 0 | \\'user_device_smartphone\\':0.0 <= 0.500001, Go Left | val: 0.0\n" + " 65.0 = tree 1 | \\'user_device_tablet\\':1.0 > 0.500001, Go Right | val: 65.0\n'}"); }
Example 14
Source File: TestExternalFeatures.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void featureExtraction_valueFeatureRequiredInFq_shouldThrowException() throws Exception { final String userTitlePhrase1 = "userTitlePhrase1"; final String userTitlePhrase2 = "userTitlePhrase2"; final String userTitlePhrasePresent = (random().nextBoolean() ? userTitlePhrase1 : userTitlePhrase2); final SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.add("rows", "1"); query.add("fl", "score,features:[fv efi.user_query=uq "+userTitlePhrasePresent+"=utpp]"); assertJQ("/query" + query.toQueryString(), "/error/msg=='Exception from createWeight for " + "SolrFeature [name=titlePhrasesMatch, params={fq=[{!field f=title}${"+userTitlePhrase1+"}, {!field f=title}${"+userTitlePhrase2+"}]}] " + "SolrFeatureWeight requires efi parameter that was not passed in request.'"); }
Example 15
Source File: TestFeatureLogging.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testSparseDenseFeatures() throws Exception { loadFeature("match", SolrFeature.class.getName(), "test4", "{\"q\":\"{!terms f=title}different\"}"); loadFeature("c4", ValueFeature.class.getName(), "test4", "{\"value\":1.0}"); loadModel("sum4", LinearModel.class.getName(), new String[] { "match"}, "test4", "{\"weights\":{\"match\":1.0}}"); final String docs0fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector("match", "1.0", "c4", "1.0"); final String docs1fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector( "c4", "1.0"); final String docs0fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector("match", "1.0", "c4", "1.0"); final String docs1fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector("match", "0.0", "c4", "1.0"); final String docs0fv_default_csv = chooseDefaultFeatureVector(docs0fv_dense_csv, docs0fv_sparse_csv); final String docs1fv_default_csv = chooseDefaultFeatureVector(docs1fv_dense_csv, docs1fv_sparse_csv); final SolrQuery query = new SolrQuery(); query.setQuery("title:bloomberg"); query.add("rows", "10"); query.add("rq", "{!ltr reRankDocs=10 model=sum4}"); //csv - no feature format specified i.e. use default query.remove("fl"); query.add("fl", "*,score,fv:[fv store=test4]"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[0]/fv/=='"+docs0fv_default_csv+"'"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[1]/fv/=='"+docs1fv_default_csv+"'"); //csv - sparse feature format check query.remove("fl"); query.add("fl", "*,score,fv:[fv store=test4 format=sparse]"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[0]/fv/=='"+docs0fv_sparse_csv+"'"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[1]/fv/=='"+docs1fv_sparse_csv+"'"); //csv - dense feature format check query.remove("fl"); query.add("fl", "*,score,fv:[fv store=test4 format=dense]"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[0]/fv/=='"+docs0fv_dense_csv+"'"); assertJQ( "/query" + query.toQueryString(), "/response/docs/[1]/fv/=='"+docs1fv_dense_csv+"'"); }
Example 16
Source File: TestSelectiveWeightCreation.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testSelectiveWeightsRequestFeaturesFromDifferentStore() throws Exception { // final String docs0fv_sparse = FeatureLoggerTestUtils.toFeatureVector( // "matchedTitle","1.0", "titlePhraseMatch","0.6103343"); // final String docs0fv_dense = FeatureLoggerTestUtils.toFeatureVector( // "matchedTitle","1.0", "titlePhraseMatch","0.6103343", "titlePhrasesMatch","0.0"); // final String docs0fv_fstore4= FeatureLoggerTestUtils.toFeatureVector( // "popularity","3.0", "originalScore","1.0"); // // final String docs0fv = chooseDefaultFeatureVector(docs0fv_dense, docs0fv_sparse); // extract all features in externalmodel's store (default store) // rerank using externalmodel (default store) final SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.add("fl", "*,score,fv:[fv]"); query.add("rows", "5"); query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3 efi.userTitlePhrase1=w2 efi.userTitlePhrase2=w1}"); // SOLR-10710, feature based on query with term w3 now scores higher on doc 4, updated assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='4'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='3'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='1'"); // FIXME design better way to test this, we can't rely on absolute scores // assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='"+docs0fv+"'"); // extract all features from fstore4 // rerank using externalmodel (default store) query.remove("fl"); query.remove("rq"); query.add("fl", "*,score,fv:[fv store=fstore4 efi.myPop=3]"); query.add("rq", "{!ltr reRankDocs=10 model=externalmodel efi.user_query=w3}"); // SOLR-10710, feature based on query with term w3 now scores higher on doc 4, updated assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='4'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='3'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='1'"); // FIXME design better way to test this, we can't rely on absolute scores // assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='"+docs0fv_fstore4+"'"); // extract all features from fstore4 // rerank using externalmodel2 (fstore2) query.remove("fl"); query.remove("rq"); query.add("fl", "*,score,fv:[fv store=fstore4 efi.myPop=3]"); query.add("rq", "{!ltr reRankDocs=10 model=externalmodel2 efi.user_query=w3}"); assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/id=='5'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/id=='4'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/id=='3'"); // FIXME design better way to test this, we can't rely on absolute scores // assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='"+docs0fv_fstore4+"'"); }
Example 17
Source File: TestSolrDocLevelOperations.java From incubator-sentry with Apache License 2.0 | 4 votes |
@Test public void testDocLevelOperations() throws Exception { setupCollectionWithDocSecurity(TEST_COLLECTION_NAME1); createDocument(TEST_COLLECTION_NAME1); CloudSolrServer server = getCloudSolrServer(TEST_COLLECTION_NAME1); try { // queries SolrQuery query = new SolrQuery(); query.setQuery("*:*"); // as admin setAuthenticationUser(ADMIN_USER); QueryResponse rsp = server.query(query); SolrDocumentList docList = rsp.getResults(); assertEquals(NUM_DOCS, docList.getNumFound()); // as user0 setAuthenticationUser("user0"); grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role0", SearchConstants.QUERY); rsp = server.query(query); docList = rsp.getResults(); assertEquals(NUM_DOCS/4, rsp.getResults().getNumFound()); //as user1 setAuthenticationUser("user1"); grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role1", SearchConstants.QUERY); rsp = server.query(query); docList = rsp.getResults(); assertEquals(NUM_DOCS/4, rsp.getResults().getNumFound()); docList = rsp.getResults(); assertEquals(NUM_DOCS/4, rsp.getResults().getNumFound()); //as user2 setAuthenticationUser("user2"); grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role2", SearchConstants.QUERY); rsp = server.query(query); docList = rsp.getResults(); assertEquals(NUM_DOCS/4, rsp.getResults().getNumFound()); //as user3 setAuthenticationUser("user3"); grantCollectionPrivilege(TEST_COLLECTION_NAME1, ADMIN_USER, "role3", SearchConstants.QUERY); rsp = server.query(query); docList = rsp.getResults(); assertEquals(NUM_DOCS/4, rsp.getResults().getNumFound()); } finally { server.shutdown(); } deleteCollection(TEST_COLLECTION_NAME1); }
Example 18
Source File: TestNoMatchSolrFeature.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void test1NoMatchFeatureReturnsFvWith1MatchingFeatureFromStoreAndDocWith0Score() throws Exception { // Tests model with all no matching features, but 1 feature store feature matching for extraction final SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.add("fl", "*, score,fv:[fv]"); query.add("rows", "4"); query.add("rq", "{!ltr model=nomatchmodel2 reRankDocs=4}"); final SolrQuery yesMatchFeatureQuery = new SolrQuery(); yesMatchFeatureQuery.setQuery("title:w1"); yesMatchFeatureQuery.add("fl", "score"); yesMatchFeatureQuery.add("rows", "4"); String res = restTestHarness.query("/query" + yesMatchFeatureQuery.toQueryString()); @SuppressWarnings({"unchecked"}) final Map<String,Object> jsonParse = (Map<String,Object>) Utils .fromJSONString(res); @SuppressWarnings({"unchecked"}) final Double doc0Score = (Double) ((Map<String,Object>) ((ArrayList<Object>) ((Map<String,Object>) jsonParse .get("response")).get("docs")).get(0)).get("score"); final String docs0fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector( "nomatchfeature","0.0", "yesmatchfeature",doc0Score.toString(), "nomatchfeature2","0.0", "nomatchfeature3","0.0"); final String docs1fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector( "nomatchfeature","0.0", "yesmatchfeature","0.0", "nomatchfeature2","0.0", "nomatchfeature3","0.0"); final String docs2fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector( "nomatchfeature","0.0", "yesmatchfeature","0.0", "nomatchfeature2","0.0", "nomatchfeature3","0.0"); final String docs3fv_dense_csv = FeatureLoggerTestUtils.toFeatureVector( "nomatchfeature","0.0", "yesmatchfeature","0.0", "nomatchfeature2","0.0", "nomatchfeature3","0.0"); final String docs0fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector( "yesmatchfeature",doc0Score.toString()); final String docs1fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector(); final String docs2fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector(); final String docs3fv_sparse_csv = FeatureLoggerTestUtils.toFeatureVector(); final String docs0fv_default_csv = chooseDefaultFeatureVector(docs0fv_dense_csv, docs0fv_sparse_csv); final String docs1fv_default_csv = chooseDefaultFeatureVector(docs1fv_dense_csv, docs1fv_sparse_csv); final String docs2fv_default_csv = chooseDefaultFeatureVector(docs2fv_dense_csv, docs2fv_sparse_csv); final String docs3fv_default_csv = chooseDefaultFeatureVector(docs3fv_dense_csv, docs3fv_sparse_csv); assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/score==0.0"); assertJQ("/query" + query.toQueryString(), "/response/docs/[0]/fv=='"+docs0fv_default_csv+"'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/score==0.0"); assertJQ("/query" + query.toQueryString(), "/response/docs/[1]/fv=='"+docs1fv_default_csv+"'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/score==0.0"); assertJQ("/query" + query.toQueryString(), "/response/docs/[2]/fv=='"+docs2fv_default_csv+"'"); assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/score==0.0"); assertJQ("/query" + query.toQueryString(), "/response/docs/[3]/fv=='"+docs3fv_default_csv+"'"); }
Example 19
Source File: AmbariInfraWithStormLogSearch.java From streamline with Apache License 2.0 | 4 votes |
/** * {@inheritDoc} */ @Override public LogSearchResult search(LogSearchCriteria logSearchCriteria) { SolrQuery query = new SolrQuery(); query.setQuery(buildColumnAndValue(COLUMN_NAME_LOG_MESSAGE, buildValue(logSearchCriteria.getSearchString()))); query.addFilterQuery(buildColumnAndValue(COLUMN_NAME_TYPE, COLUMN_VALUE_TYPE_WORKER_LOG)); query.addFilterQuery(buildColumnAndValue(COLUMN_NAME_STREAMLINE_TOPOLOGY_ID, buildValue(logSearchCriteria.getAppId()))); query.addFilterQuery(buildColumnAndValue(COLUMN_NAME_LOG_TIME, buildDateRangeValue(logSearchCriteria.getFrom(), logSearchCriteria.getTo()))); List<String> componentNames = logSearchCriteria.getComponentNames(); if (componentNames != null && !componentNames.isEmpty()) { query.addFilterQuery(buildColumnAndValue(COLUMN_NAME_STREAMLINE_COMPONENT_NAME, buildORValues(componentNames))); } List<String> logLevels = logSearchCriteria.getLogLevels(); if (logLevels == null || logLevels.isEmpty()) { logLevels = DEFAULT_LOG_LEVELS; } query.addFilterQuery(buildColumnAndValue(COLUMN_NAME_LOG_LEVEL, buildORValues(logLevels))); if (logSearchCriteria.getAscending() == null || logSearchCriteria.getAscending()) { query.addSort(COLUMN_NAME_LOG_TIME, SolrQuery.ORDER.asc); } else { query.addSort(COLUMN_NAME_LOG_TIME, SolrQuery.ORDER.desc); } if (logSearchCriteria.getStart() != null) { query.setStart(logSearchCriteria.getStart()); } if (logSearchCriteria.getLimit() != null) { query.setRows(logSearchCriteria.getLimit()); } LOG.debug("Querying to Solr: query => {}", query); long numFound; List<LogSearchResult.LogDocument> results = new ArrayList<>(); try { QueryResponse response = solr.query(query); SolrDocumentList docList = response.getResults(); numFound = docList.getNumFound(); for (SolrDocument document : docList) { String appId = (String) document.getFieldValue(COLUMN_NAME_STREAMLINE_TOPOLOGY_ID); String componentName = (String) document.getFieldValue(COLUMN_NAME_STREAMLINE_COMPONENT_NAME); String logLevel = (String) document.getFieldValue(COLUMN_NAME_LOG_LEVEL); String logMessage = (String) document.getFieldValue(COLUMN_NAME_LOG_MESSAGE); String host = (String) document.getFieldValue(COLUMN_NAME_HOST); String port = (String) document.getFieldValue(COLUMN_NAME_STORM_WORKER_PORT); Date logDate = (Date) document.getFieldValue(COLUMN_NAME_LOG_TIME); long timestamp = logDate.toInstant().toEpochMilli(); LogSearchResult.LogDocument logDocument = new LogSearchResult.LogDocument(appId, componentName, logLevel, logMessage, host, port != null ? Integer.parseInt(port) : null, timestamp); results.add(logDocument); } } catch (SolrServerException | IOException e) { // TODO: any fine-grained control needed? throw new RuntimeException(e); } return new LogSearchResult(numFound, results); }
Example 20
Source File: SolrController.java From Spring-Boot-Book with Apache License 2.0 | 4 votes |
@RequestMapping("/queryAll") public Object queryAll() throws IOException, SolrServerException { //第二种方式 SolrQuery solrQuery = new SolrQuery(); // 设置默认搜索域 solrQuery.setQuery("*:*"); // solrQuery.addField("*"); solrQuery.set("q", "知然"); solrQuery.add("q", "name:然"); // 设置返回结果的排序规则 solrQuery.setSort("id", SolrQuery.ORDER.asc); //设置查询的条数 solrQuery.setRows(50); //设置查询的开始 solrQuery.setStart(0); // 设置分页参数 solrQuery.setStart(0); solrQuery.setRows(20); //设置高亮 solrQuery.setHighlight(true); //设置高亮的字段 solrQuery.addHighlightField("name"); //设置高亮的样式 solrQuery.setHighlightSimplePre("<font color='red'>"); solrQuery.setHighlightSimplePost("</font>"); System.out.println(solrQuery); QueryResponse response = solrClient.query(solrQuery); //返回高亮显示结果 Map<String, Map<String, List<String>>> highlighting = response.getHighlighting(); //response.getResults();查询返回的结果 SolrDocumentList documentList = response.getResults(); long numFound = documentList.getNumFound(); System.out.println("总共查询到的文档数量: " + numFound); for (SolrDocument solrDocument : documentList) { System.out.println("solrDocument==============" + solrDocument); System.out.println("solrDocument==============" + solrDocument.get("name")); } return highlighting; }