Java Code Examples for org.elasticsearch.search.SearchHit#getSourceAsMap()
The following examples show how to use
org.elasticsearch.search.SearchHit#getSourceAsMap() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ComperableHitResult.java From elasticsearch-sql with Apache License 2.0 | 7 votes |
public ComperableHitResult(SearchHit hit , String[] fieldsOrder ,String seperator) { this.hit = hit; Map<String, Object> hitAsMap = hit.getSourceAsMap(); this.flattenMap = new HashMap<>(); List<String> results = new ArrayList<>(); this.isAllNull = true; for(int i = 0 ; i< fieldsOrder.length ;i++){ String field = fieldsOrder[i]; Object result = Util.deepSearchInMap(hitAsMap,field); if(result == null){ results.add(""); } else { this.isAllNull = false; results.add(result.toString()); this.flattenMap.put(field,result); } } this.comperator = Joiner.on(seperator).join(results); }
Example 2
Source File: QueryTest.java From elasticsearch-sql with Apache License 2.0 | 6 votes |
@Test public void complexConditionQuery() throws IOException, SqlParseException, SQLFeatureNotSupportedException{ String errorMessage = "Result does not exist to the condition (gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8)"; SearchHits response = query(String.format("SELECT * FROM %s/account WHERE (gender='m' AND (age> 25 OR account_number>5)) OR (gender='f' AND (age>30 OR account_number < 8))", TEST_INDEX_ACCOUNT)); SearchHit[] hits = response.getHits(); for(SearchHit hit : hits) { Map<String, Object> source = hit.getSourceAsMap(); String gender = ((String)source.get("gender")).toLowerCase(); int age = (int)source.get("age"); int account_number = (int) source.get("account_number"); Assert.assertTrue(errorMessage, (gender.equals("m") && (age> 25 || account_number>5)) || (gender.equals("f") && (age>30 || account_number < 8))); } }
Example 3
Source File: QueryTest.java From elasticsearch-sql with Apache License 2.0 | 6 votes |
@Test public void dateSearchBraces() throws IOException, SqlParseException, SQLFeatureNotSupportedException, ParseException { DateTimeFormatter formatter = DateTimeFormat.forPattern(TS_DATE_FORMAT); DateTime dateToCompare = new DateTime(2015, 3, 15, 0, 0, 0); SearchHits response = query(String.format("SELECT odbc_time FROM %s/odbc WHERE odbc_time < {ts '2015-03-15 00:00:00.000'}", TEST_INDEX_ODBC)); SearchHit[] hits = response.getHits(); for(SearchHit hit : hits) { Map<String, Object> source = hit.getSourceAsMap(); String insertTimeStr = (String) source.get("odbc_time"); insertTimeStr = insertTimeStr.replace("{ts '", "").replace("'}", ""); DateTime insertTime = formatter.parseDateTime(insertTimeStr); String errorMessage = String.format("insert_time must be smaller then 2015-03-15. found: %s", insertTime); Assert.assertTrue(errorMessage, insertTime.isBefore(dateToCompare)); } }
Example 4
Source File: ScrollSpout.java From storm-crawler with Apache License 2.0 | 6 votes |
@Override public void onResponse(SearchResponse response) { SearchHits hits = response.getHits(); LOG.info("{} ES query returned {} hits in {} msec", logIdprefix, hits.getHits().length, response.getTook().getMillis()); hasFinished = hits.getHits().length == 0; synchronized (this.queue) { // Unlike standard spouts, the scroll queries should never return // the same // document twice -> no need to look in the buffer or cache for (SearchHit hit : hits) { Map<String, Object> keyValues = hit.getSourceAsMap(); String url = (String) keyValues.get("url"); String status = (String) keyValues.get("status"); String nextFetchDate = (String) keyValues.get("nextFetchDate"); Metadata metadata = fromKeyValues(keyValues); metadata.setValue( AbstractStatusUpdaterBolt.AS_IS_NEXTFETCHDATE_METADATA, nextFetchDate); this.queue.add(new Values(url, metadata, Status.valueOf(status))); } } scrollId = response.getScrollId(); // remove lock markQueryReceivedNow(); }
Example 5
Source File: ThreadAnalysisQueryHandler.java From uavstack with Apache License 2.0 | 6 votes |
private List<Map<String, Object>> getRecords(SearchResponse sr) { SearchHits shits = sr.getHits(); List<Map<String, Object>> records = new ArrayList<Map<String, Object>>(); for (SearchHit sh : shits) { Map<String, Object> record = sh.getSourceAsMap(); if (record == null) { continue; } records.add(record); } return records; }
Example 6
Source File: Generator.java From elasticsearch-report-engine with GNU General Public License v3.0 | 6 votes |
/** * @param response * @return */ @SuppressWarnings("unchecked") public List<Map> extractData(SearchResponse response) throws NoDataFoundException, ReportGenerationException { List<Map> data = new LinkedList<>(); SearchHits hits = response.getHits(); if (response.getShardFailures().length > 0) { throw new ReportGenerationException("Report failed to get data. Kindly try again."); } if (hits.getTotalHits() == 0) { throw new NoDataFoundException("No data found"); } try { for (SearchHit hit : hits) { Map<String, Object> sourceMap = hit.getSourceAsMap(); data.add(sourceMap); } } catch (Exception e) { throw new NoDataFoundException("Error extracting data : " + e.getMessage()); } return data; }
Example 7
Source File: TestSchema.java From kafka-connect-elasticsearch-source with Apache License 2.0 | 6 votes |
public void testSearch() throws Exception { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(QueryBuilders.matchAllQuery()); searchRequest.source(searchSourceBuilder); searchRequest.indices("metricbeat-6.2.4-2018.05.20"); SearchResponse searchResponse = es.getClient().search(searchRequest); SearchHits hits = searchResponse.getHits(); SearchHit[] searchHits = hits.getHits(); for (SearchHit hit : searchHits) { // do something with the SearchHit Map<String, Object> sourceAsMap = hit.getSourceAsMap(); System.out.println(sourceAsMap); Schema schema = SchemaConverter.convertElasticMapping2AvroSchema(sourceAsMap, "test"); schema.toString(); Struct struct = StructConverter.convertElasticDocument2AvroStruct(sourceAsMap,schema); struct.toString(); } }
Example 8
Source File: MetadataQueryEsDAO.java From skywalking with Apache License 2.0 | 6 votes |
private List<Service> buildServices(SearchResponse response) { List<Service> services = new ArrayList<>(); for (SearchHit searchHit : response.getHits()) { Map<String, Object> sourceAsMap = searchHit.getSourceAsMap(); final ServiceTraffic.Builder builder = new ServiceTraffic.Builder(); final ServiceTraffic serviceTraffic = builder.map2Data(sourceAsMap); Service service = new Service(); service.setId(serviceTraffic.id()); service.setName(serviceTraffic.getName()); services.add(service); } return services; }
Example 9
Source File: ScanQueryPageSource.java From presto with Apache License 2.0 | 5 votes |
@Override public Page getNextPage() { long size = 0; while (size < PageBuilderStatus.DEFAULT_MAX_PAGE_SIZE_IN_BYTES && iterator.hasNext()) { SearchHit hit = iterator.next(); Map<String, Object> document = hit.getSourceAsMap(); for (int i = 0; i < decoders.size(); i++) { String field = columns.get(i).getName(); decoders.get(i).decode(hit, () -> getField(document, field), columnBuilders[i]); } if (hit.getSourceRef() != null) { totalBytes += hit.getSourceRef().length(); } size = Arrays.stream(columnBuilders) .mapToLong(BlockBuilder::getSizeInBytes) .sum(); } Block[] blocks = new Block[columnBuilders.length]; for (int i = 0; i < columnBuilders.length; i++) { blocks[i] = columnBuilders[i].build(); columnBuilders[i] = columnBuilders[i].newBlockBuilderLike(null); } return new Page(blocks); }
Example 10
Source File: SearchResult.java From elasticsearch-sql with Apache License 2.0 | 5 votes |
public SearchResult(SearchResponse resp) { SearchHits hits = resp.getHits(); this.total = hits.getTotalHits().value; results = new ArrayList<>(hits.getHits().length); for (SearchHit searchHit : hits.getHits()) { if (searchHit.getSourceAsMap() != null) { results.add(searchHit.getSourceAsMap()); } else if (searchHit.getFields() != null) { Map<String, DocumentField> fields = searchHit.getFields(); results.add(toFieldsMap(fields)); } } }
Example 11
Source File: UnionExecutor.java From elasticsearch-sql with Apache License 2.0 | 5 votes |
private void fillInternalSearchHits(List<SearchHit> unionHits, SearchHit[] hits, Map<String, String> fieldNameToAlias) { for(SearchHit hit : hits){ SearchHit searchHit = new SearchHit(currentId, hit.getId(), new Text(hit.getType()), hit.getFields(), null); searchHit.sourceRef(hit.getSourceRef()); searchHit.getSourceAsMap().clear(); Map<String, Object> sourceAsMap = hit.getSourceAsMap(); if(!fieldNameToAlias.isEmpty()){ updateFieldNamesToAlias(sourceAsMap, fieldNameToAlias); } searchHit.getSourceAsMap().putAll(sourceAsMap); currentId++; unionHits.add(searchHit); } }
Example 12
Source File: Test.java From dht-spider with MIT License | 5 votes |
public static void search(Map<String, Object> m) throws Exception{ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.from(0); searchSourceBuilder.size(5); searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS)); searchSourceBuilder.query(QueryBuilders.termQuery("message", "视频")); SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("torrent"); searchRequest.source(searchSourceBuilder); SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); RestStatus status = searchResponse.status(); TimeValue took = searchResponse.getTook(); Boolean terminatedEarly = searchResponse.isTerminatedEarly(); boolean timedOut = searchResponse.isTimedOut(); SearchHits hits = searchResponse.getHits(); SearchHit[] searchHits = hits.getHits(); for (SearchHit hit : searchHits) { // do something with the SearchHit Map<String, Object> sourceAsMap = hit.getSourceAsMap(); System.out.print(sourceAsMap+"==="); } }
Example 13
Source File: QueryTest.java From elasticsearch-sql with Apache License 2.0 | 5 votes |
@Test public void notLikeTests() throws IOException, SqlParseException, SQLFeatureNotSupportedException{ //cant use string.format cause of %d SearchHits response = query("SELECT name FROM " +TEST_INDEX_GAME_OF_THRONES + "/gotCharacters where name.firstname not like '%d' and name is not null LIMIT 1000"); Assert.assertEquals(3, response.getTotalHits().value); for(SearchHit hit : response.getHits()) { Map<String, Object> sourceAsMap = hit.getSourceAsMap(); String name = ((HashMap<String, Object>) sourceAsMap.get("name")).get("firstname").toString(); Assert.assertFalse(name+" was in not like %d",name.startsWith("d")); } }
Example 14
Source File: EsDataQuerierOld.java From moql with Apache License 2.0 | 5 votes |
protected RecordSet toRecordSet(SelectorDefinition selectorDefinition, SearchResponse searchResponse) { SearchHits hits = searchResponse.getHits(); for(SearchHit hit : hits) { Map<String, Object> map = hit.getSourceAsMap(); } return null; }
Example 15
Source File: ElasticSearchProvider.java From inception with Apache License 2.0 | 4 votes |
@Override public List<ExternalSearchResult> executeQuery(DocumentRepository aRepository, ElasticSearchProviderTraits aTraits, String aQuery) throws IOException { List<ExternalSearchResult> results = new ArrayList<>(); try (RestHighLevelClient client = makeClient(aTraits)) { HighlightBuilder highlightBuilder = new HighlightBuilder() .field(new HighlightBuilder.Field(aTraits.getDefaultField()) .highlighterType("unified")); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .fetchSource(null, ELASTIC_HIT_DOC_KEY) .highlighter(highlightBuilder) .size(aTraits.getResultSize()); QueryBuilder qb = QueryBuilders.simpleQueryStringQuery(aQuery) .field(aTraits.getDefaultField()); if (aTraits.isRandomOrder()) { RandomScoreFunctionBuilder randomFunc = ScoreFunctionBuilders.randomFunction(); randomFunc.seed(aTraits.getSeed()); searchSourceBuilder.query(QueryBuilders.functionScoreQuery( QueryBuilders.constantScoreQuery(qb).boost(1.0f), randomFunc)); } else { searchSourceBuilder.query(qb); } SearchRequest searchRequest = new SearchRequest(aTraits.getIndexName()) .source(searchSourceBuilder); SearchResponse response = client.search(searchRequest); for (SearchHit hit: response.getHits().getHits()) { if (hit.getSourceAsMap() == null || hit.getSourceAsMap().get(ELASTIC_HIT_METADATA_KEY) == null) { log.warn("Result has no document metadata: " + hit); continue; } ExternalSearchResult result = new ExternalSearchResult(aRepository, aTraits.getIndexName(), hit.getId()); // If the order is random, then the score doesn't reflect the quality, so we do not // forward it to the user if (!aTraits.isRandomOrder()) { result.setScore((double) hit.getScore()); } fillResultWithMetadata(result, hit.getSourceAsMap()); if (hit.getHighlightFields().size() != 0) { // There are highlights, set them in the result List<ExternalSearchHighlight> highlights = new ArrayList<>(); if (hit.getHighlightFields().get(aTraits.getDefaultField()) != null) { for (Text highlight : hit.getHighlightFields() .get(aTraits.getDefaultField()) .getFragments()) { highlights.add(new ExternalSearchHighlight(highlight.toString())); } } result.setHighlights(highlights); } results.add(result); } } return results; }
Example 16
Source File: CsvContent.java From elasticsearch-dataformat with Apache License 2.0 | 4 votes |
@Override public void onResponse(final SearchResponse response) { final String scrollId = response.getScrollId(); final SearchHits hits = response.getHits(); final int size = hits.getHits().length; currentCount += size; if (logger.isDebugEnabled()) { logger.debug("scrollId: {}, totalHits: {}, hits: {}, current: {}", scrollId, hits.getTotalHits(), size, currentCount); } try { for (final SearchHit hit : hits) { final Map<String, Object> sourceMap = hit.getSourceAsMap(); final Map<String, Object> dataMap = new HashMap<>(); MapUtils.convertToFlatMap("", sourceMap, dataMap); for (final String key : dataMap.keySet()) { if (modifiableFieldSet && !headerSet.contains(key)) { headerSet.add(key); } } final List<String> dataList = new ArrayList<>( dataMap.size()); for (final String name : headerSet) { final Object value = dataMap.get(name); dataList.add(value != null ? value.toString() : null); } csvWriter.writeValues(dataList); } if (size == 0 || scrollId == null) { // end csvWriter.flush(); close(); if (appendHeader) { boolean finished = false; final Path tempFile = Files .createTempFile("dataformat_", ".csv"); try (final OutputStream out = Files .newOutputStream(tempFile); final CsvWriter writer = new CsvWriter( new OutputStreamWriter(out, charsetName), csvConfig)) { writer.writeValues(headerSet.stream() .collect(Collectors.toList())); writer.flush(); Files.copy(outputFile.toPath(), out); finished = true; } finally { if (finished) { Files.copy(tempFile, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); } Files.delete(tempFile); } } listener.onResponse(null); } else { client.prepareSearchScroll(scrollId) .setScroll(RequestUtil.getScroll(request)) .execute(this); } } catch (final Exception e) { onFailure(e); } }
Example 17
Source File: ElasticSourceTask.java From kafka-connect-elasticsearch-source with Apache License 2.0 | 4 votes |
private SearchHit[] parseSearchResult(String index, String lastValue, List<SourceRecord> results, SearchResponse searchResponse, Object scrollId) { if (results.size() > size) { return null; //nothing to do: limit reached } SearchHits hits = searchResponse.getHits(); int totalShards = searchResponse.getTotalShards(); int successfulShards = searchResponse.getSuccessfulShards(); logger.info("total shard {}, successuful: {}", totalShards, successfulShards); logger.info("retrived {}, scroll id : {}", hits, scrollId); int failedShards = searchResponse.getFailedShards(); for (ShardSearchFailure failure : searchResponse.getShardFailures()) { // failures should be handled here logger.error("failed {}", failure); } if (failedShards > 0) { throw new RuntimeException("failed shard in search"); } SearchHit[] searchHits = hits.getHits(); for (SearchHit hit : searchHits) { // do something with the SearchHit Map<String, Object> sourceAsMap = hit.getSourceAsMap(); Map sourcePartition = Collections.singletonMap(INDEX, index); Map sourceOffset = Collections.singletonMap(POSITION, sourceAsMap.get(incrementingField).toString()); Schema schema = SchemaConverter.convertElasticMapping2AvroSchema(sourceAsMap, index); Struct struct = StructConverter.convertElasticDocument2AvroStruct(sourceAsMap, schema); //document key String key = String.join("_", hit.getIndex(), hit.getType(), hit.getId()); SourceRecord sourceRecord = new SourceRecord( sourcePartition, sourceOffset, topic + index, //KEY Schema.STRING_SCHEMA, key, //VALUE schema, struct); results.add(sourceRecord); last.put(index,sourceAsMap.get(incrementingField).toString()); sent.merge(index, 1, Integer::sum); } return searchHits; }
Example 18
Source File: LocationElasticsearchTest.java From baleen with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @Test public void testLocationsAreSavedToES() throws AnalysisEngineProcessException { String text = "(1,2)"; // (lat, lon) jCas.setDocumentText(text); Coordinate coordinateToBePersisted = new Coordinate(jCas, 0, 5); // NB [lon, lan] in geoJson coordinateToBePersisted.setGeoJson("{\"type\": \"Point\", \"coordinates\": [2, 1]}"); coordinateToBePersisted.setCoordinateValue("1,2"); coordinateToBePersisted.setConfidence(0.5); coordinateToBePersisted.setValue(text); coordinateToBePersisted.addToIndexes(jCas); Coordinate coordinateNotToBePersisted = new Coordinate(jCas, 0, 5); coordinateNotToBePersisted.setCoordinateValue(null); coordinateNotToBePersisted.setGeoJson(null); coordinateNotToBePersisted.addToIndexes(jCas); ae.process(jCas); elasticsearch.flush(LOCATION_INDEX); SearchResponse sr = elasticsearch.client().prepareSearch(LOCATION_INDEX).execute().actionGet(); assertEquals("Should be 1 result in Elasticsaearch", 1, sr.getHits().getTotalHits()); SearchHit searchHit = sr.getHits().getHits()[0]; Map<String, Object> sourceAsMap = searchHit.getSourceAsMap(); assertEquals("Should be text", text, sourceAsMap.get(FIELD_VALUE)); assertEquals("Should be 0", 0, sourceAsMap.get(FIELD_BEGIN)); assertEquals("Should be 5", 5, sourceAsMap.get(FIELD_END)); Map<String, Object> location = (Map<String, Object>) sourceAsMap.get(FIELD_LOCATION); assertEquals("Should be Point", "Point", location.get("type")); assertEquals("Should be [2.0, 1.0]", ImmutableList.of(2.0, 1.0), location.get("coordinates")); assertEquals("Should be in es format", "1.0, 2.0", sourceAsMap.get(FIELD_COORDINATE)); assertEquals("Should be 0.5", 0.5, sourceAsMap.get(FIELD_CONFIDENCE)); assertEquals( "Should be external id", "9b1da85a5367efa0001a076ab20943e568f635e00159dedc868c0f4c7bab3773", sourceAsMap.get(FIELD_EXTERNAL_ID)); assertEquals( "Should be doc id", "418abde75db9a5538d0798bf957a63d2b7976de9a4136992176b6de58d562e79", sourceAsMap.get(FIELD_DOC_ID)); }
Example 19
Source File: ElasticsearchDocument.java From rdf4j with BSD 3-Clause "New" or "Revised" License | 4 votes |
public ElasticsearchDocument(SearchHit hit, Function<? super String, ? extends SpatialContext> geoContextMapper) { this(hit.getId(), hit.getType(), hit.getIndex(), hit.getVersion(), hit.getSourceAsMap(), geoContextMapper); }
Example 20
Source File: AwsRestHighLevelClient.java From aws-athena-query-federation with Apache License 2.0 | 2 votes |
/** * Gets the Document from the search hit. * @param searchHit is the search hit containing the document source. * @return the Document as a Map object. */ public Map<String, Object> getDocument(SearchHit searchHit) { return searchHit.getSourceAsMap(); }