Java Code Examples for org.elasticsearch.rest.RestRequest#paramAsInt()
The following examples show how to use
org.elasticsearch.rest.RestRequest#paramAsInt() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SetupAction.java From zentity with Apache License 2.0 | 5 votes |
@Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { // Parse request Boolean pretty = restRequest.paramAsBoolean("pretty", false); int numberOfShards = restRequest.paramAsInt("number_of_shards", 1); int numberOfReplicas = restRequest.paramAsInt("number_of_replicas", 1); Method method = restRequest.method(); return channel -> { try { if (method == POST) { createIndex(client, numberOfShards, numberOfReplicas); XContentBuilder content = XContentFactory.jsonBuilder(); if (pretty) content.prettyPrint(); content.startObject().field("acknowledged", true).endObject(); channel.sendResponse(new BytesRestResponse(RestStatus.OK, content)); } else { throw new NotImplementedException("Method and endpoint not implemented."); } } catch (NotImplementedException e) { channel.sendResponse(new BytesRestResponse(channel, RestStatus.NOT_IMPLEMENTED, e)); } }; }
Example 2
Source File: RestSimpleFeatureStore.java From elasticsearch-learning-to-rank with Apache License 2.0 | 5 votes |
RestChannelConsumer search(NodeClient client, String type, String indexName, RestRequest request) { String prefix = request.param("prefix"); int from = request.paramAsInt("from", 0); int size = request.paramAsInt("size", 20); BoolQueryBuilder qb = boolQuery().filter(termQuery("type", type)); if (prefix != null && !prefix.isEmpty()) { qb.must(matchQuery("name.prefix", prefix)); } return (channel) -> client.prepareSearch(indexName) .setTypes(IndexFeatureStore.ES_TYPE) .setQuery(qb) .setSize(size) .setFrom(from) .execute(new RestStatusToXContentListener<>(channel)); }
Example 3
Source File: TasteSearchRestAction.java From elasticsearch-taste with Apache License 2.0 | 5 votes |
Info(final RestRequest request) { size = request.paramAsInt("size", 10); from = request.paramAsInt("from", 0); targetIndex = request.param("index"); targetType = request.param("type"); userIndex = request.param(TasteConstants.REQUEST_PARAM_USER_INDEX, targetIndex); userType = request.param(TasteConstants.REQUEST_PARAM_USER_TYPE, TasteConstants.USER_TYPE); itemIndex = request.param(TasteConstants.REQUEST_PARAM_ITEM_INDEX, targetIndex); itemType = request.param(TasteConstants.REQUEST_PARAM_ITEM_TYPE, TasteConstants.ITEM_TYPE); userIdField = request.param( TasteConstants.REQUEST_PARAM_USER_ID_FIELD, TasteConstants.USER_ID_FIELD); itemIdField = request.param( TasteConstants.REQUEST_PARAM_ITEM_ID_FIELD, TasteConstants.ITEM_ID_FIELD); timestampField = request.param( TasteConstants.REQUEST_PARAM_TIMESTAMP_FIELD, TasteConstants.TIMESTAMP_FIELD); objectType = request.param("objectType"); if (USER.equals(objectType)) { idIndex = userIndex; idType = userType; idField = request.param(TasteConstants.REQUEST_PARAM_ID_FIELD, userIdField); } else if (ITEM.equals(objectType)) { idIndex = itemIndex; idType = itemType; idField = request.param(TasteConstants.REQUEST_PARAM_ID_FIELD, itemIdField); } targetIdField = request.param( TasteConstants.REQUEST_PARAM_TARGET_ID_FIELD, idField); }
Example 4
Source File: RestDataAction.java From elasticsearch-dataformat with Apache License 2.0 | 5 votes |
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { SearchRequest searchRequest = new SearchRequest(); request.withContentOrSourceParamParserOrNull( parser -> RestSearchAction.parseSearchRequest(searchRequest, request, parser, size -> searchRequest.source().size(size))); if (request.paramAsInt("size", -1) == -1) { searchRequest.source().size(100); } final String file = request.param("file"); final long limitBytes; String limitParamStr = request.param("limit"); if (Strings.isNullOrEmpty(limitParamStr)) { limitBytes = defaultLimit; } else { if (limitParamStr.endsWith("%")) { limitParamStr = limitParamStr.substring(0, limitParamStr.length() - 1); } limitBytes = (long) (maxMemory * (Float.parseFloat(limitParamStr) / 100F)); } final ContentType contentType = getContentType(request); if (contentType == null) { final String msg = "Unknown content type:" + request.header("Content-Type"); throw new IllegalArgumentException(msg); } final DataContent dataContent = contentType.dataContent(client, request); return channel -> client.search(searchRequest, new SearchResponseListener( channel, file, limitBytes, dataContent)); }
Example 5
Source File: KafkaStreamRestHandler.java From elasticsearch-rest-command with The Unlicense | 4 votes |
@Override protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { final String topic = request.param("topic", ""); final boolean schema = request.paramAsBoolean("schema", false); final String master = request.param("masterAddress", "local"); final String hdfs = request.param("hdfs", "hdfs://localhost:50070"); final String memory = request.param("memory", "2g"); final String appName = request.param("appName", "appName-"+topic); final int duration = request.paramAsInt("duration", 1000); Thread exec = new Thread(new Runnable(){ @Override public void run() { SparkConf sparkConf = new SparkConf().setAppName(appName).setMaster(master).set("spark.executor.memory", memory); JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(duration)); Map<String, Integer> topicMap = new HashMap<String, Integer>(); topicMap.put(topic, 3); JavaPairReceiverInputDStream<String, byte[]> kafkaStream = KafkaUtils.createStream(jssc, String.class, byte[].class, kafka.serializer.DefaultDecoder.class, kafka.serializer.DefaultDecoder.class, null, topicMap, StorageLevel.MEMORY_ONLY()); //JobConf confHadoop = new JobConf(); //confHadoop.set("mapred.output.compress", "true"); //confHadoop.set("mapred.output.compression.codec", "com.hadoop.compression.lzo.LzopCodec"); kafkaStream.saveAsHadoopFiles(hdfs, "seq", Text.class, BytesWritable.class, KafkaStreamSeqOutputFormat.class); topicContextMap.put(topic, jssc); jssc.start(); jssc.awaitTermination(); } }); exec.start(); channel.sendResponse(new BytesRestResponse(RestStatus.OK, String.format("{\"topic\":\"%s\"}", topic))); }