Java Code Examples for play.libs.Json#newArray()

The following examples show how to use play.libs.Json#newArray() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GenericFilterExpander.java    From samantha with MIT License 6 votes vote down vote up
private boolean evaluateComponents(JsonNode entity, JsonNode filter, boolean not, boolean must) {
    if (!filter.isArray()) {
        ArrayNode arr = Json.newArray();
        arr.add(filter);
        filter = arr;
    }
    for (JsonNode one : filter) {
        boolean value = evaluateFilter(entity, one);
        if (!must && value) {
            return true;
        } else if (must) {
            if (!not && !value) {
                return false;
            } else if (not && value) {
                return false;
            }
        }
    }
    if (must) {
        return true;
    } else {
        return false;
    }
}
 
Example 2
Source File: ESBasedIndexer.java    From samantha with MIT License 6 votes vote down vote up
private void bulkIndex(String indexType, JsonNode data) {
    if (data.size() == 0) {
        return;
    }
    if (uniqueFields.size() > 0) {
        Set<String> keys = new HashSet<>();
        ArrayNode uniqued = Json.newArray();
        for (JsonNode point : data) {
            String key = FeatureExtractorUtilities.composeConcatenatedKey(point, dataFields);
            if (!keys.contains(key)) {
                keys.add(key);
                uniqued.add(point);
            }
        }
        data = uniqued;
    }
    BulkResponse resp = elasticSearchService.bulkIndex(elasticSearchIndex, indexType, data);
    if (resp.hasFailures()) {
        throw new BadRequestException(resp.buildFailureMessage());
    }
}
 
Example 3
Source File: CSVFileIndexer.java    From samantha with MIT License 6 votes vote down vote up
public void index(JsonNode documents, RequestContext requestContext) {
    JsonNode reqBody = requestContext.getRequestBody();
    String operation = JsonHelpers.getOptionalString(reqBody, ConfigKey.DATA_OPERATION.get(),
            DataOperation.INSERT.get());
    if (operation.equals(DataOperation.INSERT.get()) || operation.equals(DataOperation.UPSERT.get())) {
        JsonNode arr;
        if (!documents.isArray()) {
            ArrayNode tmp = Json.newArray();
            tmp.add(documents);
            arr = tmp;
        } else {
            arr = documents;
        }
        int timestamp = (int) (System.currentTimeMillis() / 1000);
        for (JsonNode document : arr) {
            if (document.has(timestampField)) {
                timestamp = document.get(timestampField).asInt();
            } else {
                logger.warn("Time field {} is not present in the entity to be indexed.", timestampField);
            }
            dataService.writeCSV(indexType, document, dataFields, timestamp);
        }
    } else {
        throw new BadRequestException("Data operation " + operation + " is not supported");
    }
}
 
Example 4
Source File: AbstractIndexer.java    From samantha with MIT License 6 votes vote down vote up
public void index(RequestContext requestContext) {
    JsonNode reqBody = requestContext.getRequestBody();
    EntityDAO entityDAO = EntityDAOUtilities.getEntityDAO(daoConfigs, requestContext,
            reqBody.get(daoConfigKey), injector);
    ArrayNode toIndex = Json.newArray();
    ExpandedEntityDAO expandedEntityDAO = new ExpandedEntityDAO(expanders, entityDAO, requestContext);
    while (expandedEntityDAO.hasNextEntity()) {
        toIndex.add(expandedEntityDAO.getNextEntity());
        if (toIndex.size() >= batchSize) {
            index(toIndex, requestContext);
            notifyDataSubscribers(toIndex, requestContext);
            toIndex.removeAll();
        }
    }
    if (toIndex.size() > 0) {
        index(toIndex, requestContext);
        notifyDataSubscribers(toIndex, requestContext);
    }
    expandedEntityDAO.close();
    entityDAO.close();
}
 
Example 5
Source File: JsonFileIndexer.java    From samantha with MIT License 6 votes vote down vote up
public void index(JsonNode documents, RequestContext requestContext) {
    JsonNode reqBody = requestContext.getRequestBody();
    String operation = JsonHelpers.getOptionalString(reqBody, ConfigKey.DATA_OPERATION.get(),
            DataOperation.INSERT.get());
    if (operation.equals(DataOperation.INSERT.get()) || operation.equals(DataOperation.UPSERT.get())) {
        JsonNode arr;
        if (!documents.isArray()) {
            ArrayNode tmp = Json.newArray();
            tmp.add(documents);
            arr = tmp;
        } else {
            arr = documents;
        }
        int timestamp = (int) (System.currentTimeMillis() / 1000);
        for (JsonNode document : arr) {
            if (document.has(timestampField)) {
                timestamp = document.get(timestampField).asInt();
            } else {
                logger.warn("Time field {} is not present in the entity to be indexed.", timestampField);
            }
            dataService.writeJson(indexType, document, timestamp);
        }
    } else {
        throw new BadRequestException("Data operation " + operation + " is not supported");
    }
}
 
Example 6
Source File: SQLBasedIndexer.java    From samantha with MIT License 6 votes vote down vote up
public void index(JsonNode data, RequestContext requestContext) {
    if (!data.isArray()) {
        ArrayNode arr = Json.newArray();
        arr.add(data);
        data = arr;
    }
    JsonNode reqBody = requestContext.getRequestBody();
    String table = JsonHelpers.getOptionalString(reqBody, tableKey, this.table);
    String operation = JsonHelpers.getOptionalString(reqBody, ConfigKey.DATA_OPERATION.get(),
            DataOperation.INSERT.get());
    if (operation.equals(DataOperation.DELETE.get()) || operation.equals(DataOperation.UPSERT.get())) {
        bulkDelete(table, data);
    }
    if (operation.equals(DataOperation.INSERT.get()) || operation.equals(DataOperation.UPSERT.get())) {
        bulkInsert(table, data);
    }
}
 
Example 7
Source File: ElasticSearchService.java    From samantha with MIT License 6 votes vote down vote up
public MultiSearchResponse bulkSearch(String index, String type, JsonNode query) {
    if (!query.isArray()) {
        ArrayNode array = Json.newArray();
        array.add(query);
        query = array;
    }
    MultiSearchRequestBuilder builder = client.prepareMultiSearch();
    for (JsonNode request : query) {
        builder.add(client.prepareSearch(index)
                .setTypes(type)
                .setFrom(defaultFrom)
                .setSize(defaultSize)
                .setQuery(request.toString()));
    }
    return builder.execute().actionGet();
}
 
Example 8
Source File: Prediction.java    From samantha with MIT License 6 votes vote down vote up
public JsonNode toJson() {
    ObjectNode obj = Json.newObject();
    obj.put("score", score);
    obj.set("attributes", entity);
    if (scores != null) {
        ArrayNode scoreArr = Json.newArray();
        for (int i = 0; i < scores.length; i++) {
            scoreArr.add(scores[i]);
        }
        obj.set("scores", scoreArr);
    }
    if (instance != null) {
        obj.set("instance", Json.toJson(instance));
    }
    return obj;
}
 
Example 9
Source File: RedisLettuceService.java    From samantha with MIT License 5 votes vote down vote up
private JsonNode getArrayNode(JsonNode data) {
    if (!data.isArray()) {
        ArrayNode arr = Json.newArray();
        arr.add(data);
        return arr;
    } else {
        return data;
    }
}
 
Example 10
Source File: ElasticSearchService.java    From samantha with MIT License 5 votes vote down vote up
public BulkResponse bulkIndex(String index, String type, JsonNode documents) {
    if (!documents.isArray()) {
        ArrayNode array = Json.newArray();
        array.add(documents);
        documents = array;
    }
    BulkRequestBuilder bulkRequest = client.prepareBulk();
    for (JsonNode document : documents) {
        bulkRequest.add(client.prepareIndex(index, type)
                .setSource(document.toString()));
    }
    return bulkRequest.execute().actionGet();
}
 
Example 11
Source File: RankedResult.java    From samantha with MIT License 5 votes vote down vote up
public JsonNode toJson() {
    ObjectNode obj = Json.newObject();
    obj.put("limit", limit);
    obj.put("offset", offset);
    obj.put("maxHits", maxHits);
    ArrayNode arr = Json.newArray();
    for (Prediction prediction : ranking) {
        arr.add(prediction.toJson());
    }
    obj.set("ranking", arr);
    if (params != null) {
        obj.set("params", params);
    }
    return obj;
}
 
Example 12
Source File: RedisVariableSpace.java    From samantha with MIT License 5 votes vote down vote up
public void setVectorVarByNameIndex(String name, int index, RealVector var) {
    String varIdxName = "IDX_V_" + name + "_" + Integer.valueOf(index).toString();
    ArrayNode values = Json.newArray();
    values.add(index);
    for (int i=0; i<var.getDimension(); i++) {
        values.add(var.getEntry(i));
    }
    redisService.setValue(spaceIdentifier, varIdxName, values);
}
 
Example 13
Source File: RedisVariableSpace.java    From samantha with MIT License 5 votes vote down vote up
public void setScalarVarByNameIndex(String name, int index, double var) {
    String varIdxName = "IDX_S_" + name + "_" + Integer.valueOf(index).toString();
    ArrayNode values = Json.newArray();
    values.add(index);
    values.add(var);
    redisService.setValue(spaceIdentifier, varIdxName, values);
}
 
Example 14
Source File: CatIndexer.java    From samantha with MIT License 5 votes vote down vote up
public ObjectNode getIndexedDataDAOConfig(RequestContext requestContext) {
    ObjectNode ret = Json.newObject();
    ArrayNode daos = Json.newArray();
    ret.set(indexersDaoConfigKey, daos);
    for (String indexerName: indexerNames) {
        Indexer indexer = configService.getIndexer(indexerName, requestContext);
        daos.add(indexer.getIndexedDataDAOConfig(requestContext));
    }
    ret.put(daoNameKey, daoName);
    return ret;
}
 
Example 15
Source File: ESBasedJoinExpander.java    From samantha with MIT License 5 votes vote down vote up
public List<ObjectNode> expand(List<ObjectNode> initialResult,
                              RequestContext requestContext) {
    ArrayNode arr = Json.newArray();
    for (ObjectNode one : initialResult) {
        arr.add(one);
    }
    for (Configuration config : configList) {
        String type = config.getString("type");
        List<String> keys = config.getStringList("keys");
        List<String> entityFields = config.getStringList("fields");
        Map<Map<String, String>, List<SearchHit>> keyVals = elasticSearchService
                .searchFieldsByKeys(elasticSearchIndex, type, keys,
                        entityFields, arr);
        for (ObjectNode entity : initialResult) {
            Map<String, String> keyVal = IOUtilities.getKeyValueFromEntity(entity,
                    keys);
            if (keyVals.containsKey(keyVal) && keyVal.size() > 0 && keyVals.get(keyVal) != null) {
                ExpanderUtilities.parseEntityFromSearchHit(entityFields,
                        null, keyVals.get(keyVal).get(0), entity);
            } else {
                logger.warn("Can not find the key {} while joining: {}", keyVal.toString(),
                        entity.toString());
            }
        }
    }
    return initialResult;
}
 
Example 16
Source File: ResponsePacker.java    From samantha with MIT License 5 votes vote down vote up
public JsonNode packPrediction(Predictor predictor, List<Prediction> predictedResult,
                               RequestContext requestContext) {
    ObjectNode result = Json.newObject();
    ArrayNode predictions = Json.newArray();
    for (Prediction pred : predictedResult) {
        predictions.add(pred.toJson());
    }
    result.set("predictions", predictions);
    result.set("configuration", Json.toJson(predictor.getConfig().asMap()));
    result.put("engine", requestContext.getEngineName());
    return result;
}
 
Example 17
Source File: CategoriesWithProductCountQuery.java    From commercetools-sunrise-java with Apache License 2.0 5 votes vote down vote up
@Nullable
@Override
public JsonNode getVariables() {
    final ObjectNode objectNode = Json.newObject();
    objectNode.put("limit", limit);
    objectNode.put("offset", offset);
    final ArrayNode sortArrayNode = Json.newArray();
    sort.forEach(querySort -> sortArrayNode.add(querySort.toSphereSort()));
    objectNode.set("sort", sortArrayNode);
    predicates.stream()
            .reduce(QueryPredicate::and)
            .ifPresent(joinedPredicates -> objectNode.put("where", joinedPredicates.toSphereQuery()));
    return objectNode;
}
 
Example 18
Source File: TensorFlowBatchIndexerTest.java    From samantha with MIT License 4 votes vote down vote up
@Test
public void testTensorFlowBatchIndex() {
    SamanthaConfigService configService = injector.instanceOf(SamanthaConfigService.class);
    MockIndexer mockIndexer = new MockIndexer(
            config, configService, injector, "daoConfig", config);
    SpaceProducer spaceProducer = injector.instanceOf(SpaceProducer.class);
    List<FeatureExtractor> featureExtractors = new ArrayList<>();
    FeatureExtractor itemExtractor = new SeparatedStringExtractor(
            "ITEM", "item", "item", "\\|",
            false, null, null
    );
    featureExtractors.add(itemExtractor);
    FeatureExtractor attrExtractor = new SeparatedStringExtractor(
            "ATTR", "attr", "attr", "\\|",
            false, "null", null
    );
    featureExtractors.add(attrExtractor);
    FeatureExtractor sizeExtractor = new SeparatedStringSizeExtractor(
            "SEQ_LEN", "item", "sequence_length",
            "|", null
    );
    featureExtractors.add(sizeExtractor);
    TensorFlowModel model = new TensorFlowModelProducer(spaceProducer)
            .createTensorFlowModelModelFromGraphDef(
                    "name", SpaceMode.DEFAULT, "shouldNotExist.graph",
                    null, new ArrayList<>(), null,
                    Lists.newArrayList("ITEM", "ATTR", "SEQ_LEN"),
                    featureExtractors, "loss", "update",
                    "output", "init", "top_k",
                    "topKId", "topKValue", "ITEM");
    TensorFlowBatchIndexer batchIndexer = new TensorFlowBatchIndexer(
            configService, config, injector, config, "daoConfig", mockIndexer,
            model, 1, "tstamp");
    ArrayNode batch = Json.newArray();
    ObjectNode user1 = Json.newObject();
    user1.put("item", "20|49|10|2|4");
    user1.put("attr", "jid|cjk|je|je|cjk");
    batch.add(user1);
    ObjectNode user2 = Json.newObject();
    user2.put("item", "14|19|2|5|20|15|2");
    user2.put("attr", "cjk|mn|je|lk|jid|null|je");
    batch.add(user2);
    RequestContext requestContext = new RequestContext(Json.newObject(), "test");
    batchIndexer.index(batch, requestContext);
    ArrayNode indexed = mockIndexer.getIndexed();
    assertEquals("1,2,3,4,5,6,7,4,8,1,9,4", indexed.get(0).get("item_idx").asText());
    assertEquals("1,2,3,3,2,2,4,3,5,1,6,3", indexed.get(0).get("attr_idx").asText());
    assertEquals("5.0,7.0", indexed.get(0).get("sequence_length_val").asText());
    batch.removeAll();
    indexed.removeAll();
    ObjectNode item1 = Json.newObject();
    item1.put("item", "20");
    item1.put("attr", "jid");
    batch.add(item1);
    ObjectNode item2 = Json.newObject();
    item2.put("item", "15");
    batch.add(item2);
    ObjectNode item3 = Json.newObject();
    item3.put("item", "40");
    item3.put("attr", "cjk");
    batch.add(item3);
    ObjectNode item4 = Json.newObject();
    item4.put("item", "41");
    item4.put("attr", "djkfds");
    batch.add(item4);
    batchIndexer.index(batch, requestContext);
    assertEquals("1,9,10,11", indexed.get(0).get("item_idx").asText());
    assertEquals("1,6,2,7", indexed.get(0).get("attr_idx").asText());
}