Java Code Examples for org.apache.solr.client.solrj.io.Tuple#getString()
The following examples show how to use
org.apache.solr.client.solrj.io.Tuple#getString() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EvalStream.java From lucene-solr with Apache License 2.0 | 6 votes |
public void open() throws IOException { try { stream.open(); Tuple tuple = stream.read(); String expr = tuple.getString("expr_s"); if(expr == null) { throw new IOException("expr_s cannot be empty for the EvalStream"); } evalStream = streamFactory.constructStream(expr); evalStream.setStreamContext(streamContext); evalStream.open(); } finally { stream.close(); } }
Example 2
Source File: StreamExpressionTest.java From lucene-solr with Apache License 2.0 | 6 votes |
private void assertTopicSubject(TupleStream stream, String... textArray) throws Exception { long version = -1; int count = 0; List<String> texts = new ArrayList<>(); for(String text : textArray) { texts.add(text); } try { stream.open(); while (true) { Tuple tuple = stream.read(); if (tuple.EOF) { break; } else { ++count; String subject = tuple.getString("subject"); if (!texts.contains(subject)) { throw new Exception("Expecting subject in topic run not found:" + subject); } } } } finally { stream.close(); } }
Example 3
Source File: CsvStream.java From lucene-solr with Apache License 2.0 | 5 votes |
public Tuple read() throws IOException { Tuple tuple = originalStream.read(); ++lineNumber; if(tuple.EOF) { return tuple; } else { String file = formatFile(tuple.getString("file")); String line = tuple.getString("line"); if (file.equals(currentFile)) { String[] fields = split(line); if(fields.length != headers.length) { throw new IOException("Headers and lines must have the same number of fields [file:"+file+" line number:"+lineNumber+"]"); } Tuple out = new Tuple(); out.put("id", file+"_"+lineNumber); for(int i=0; i<headers.length; i++) { if(fields[i] != null && fields[i].length() > 0) { out.put(headers[i], fields[i]); } } return out; } else { this.currentFile = file; this.headers = split(line); this.lineNumber = 1; //New file so reset the lineNumber return read(); } } }
Example 4
Source File: TopicStream.java From lucene-solr with Apache License 2.0 | 5 votes |
public Tuple read() throws IOException { Tuple tuple = _read(); if(tuple.EOF) { if(runCount > 0) { tuple.put("sleepMillis", 0); } else { tuple.put("sleepMillis", 1000); } return tuple; } ++count; ++runCount; if(checkpointEvery > -1 && (count % checkpointEvery) == 0) { persistCheckpoints(); } long version = tuple.getLong(VERSION_FIELD); String slice = tuple.getString("_SLICE_"); checkpoints.put(slice, version); tuple.remove("_SLICE_"); tuple.remove("_CORE_"); return tuple; }
Example 5
Source File: SelectWithEvaluatorsTest.java From lucene-solr with Apache License 2.0 | 5 votes |
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, int... ids) throws Exception { int i = 0; for(int val : ids) { Tuple t = tuples.get(i); String tip = t.getString(fieldName); if(!tip.equals(Integer.toString(val))) { throw new Exception("Found value:"+tip+" expecting:"+val); } ++i; } return true; }
Example 6
Source File: StreamExpressionTest.java From lucene-solr with Apache License 2.0 | 5 votes |
protected boolean assertOrderOf(List<Tuple> tuples, String fieldName, int... ids) throws Exception { int i = 0; for(int val : ids) { Tuple t = tuples.get(i); String tip = t.getString(fieldName); if(!tip.equals(Integer.toString(val))) { throw new Exception("Found value:"+tip+" expecting:"+val); } ++i; } return true; }
Example 7
Source File: DaemonStreamApiTest.java From lucene-solr with Apache License 2.0 | 5 votes |
private void checkAlive(String daemonName) throws InterruptedException, IOException { TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME); while (timeout.hasTimedOut() == false) { Tuple tuple = getTupleOfInterest(TestSQLHandler.mapParams("qt", "/stream", "action", "list"), daemonName); String state = tuple.getString("state"); if (state.equals("RUNNABLE") || state.equals("WAITING") || state.equals("TIMED_WAITING")) { return; } TimeUnit.MILLISECONDS.sleep(100); } fail("State for daemon '" + daemonName + "' did not become RUNNABLE, WAITING or TIMED_WAITING in 10 seconds"); }
Example 8
Source File: LatLonVectorsEvaluator.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public Object doWork(Object... objects) throws IOException { if (objects.length == 1) { //Just docs if(!(objects[0] instanceof List)) { throw new IOException("The latlonVectors function expects a list of Tuples as a parameter."); } else { @SuppressWarnings({"rawtypes"}) List list = (List)objects[0]; if(list.size() > 0) { Object o = list.get(0); if(!(o instanceof Tuple)) { throw new IOException("The latlonVectors function expects a list of Tuples as a parameter."); } } else { throw new IOException("Empty list was passed as a parameter to termVectors function."); } } @SuppressWarnings({"unchecked"}) List<Tuple> tuples = (List<Tuple>) objects[0]; double[][] locationVectors = new double[tuples.size()][2]; List<String> features = new ArrayList<>(); features.add("lat"); features.add("lon"); List<String> rowLabels = new ArrayList<>(); for(int i=0; i< tuples.size(); i++) { Tuple tuple = tuples.get(i); String value = tuple.getString(field); String[] latLong = null; if(value.contains(",")) { latLong = value.split(","); } else { latLong = value.split(" "); } locationVectors[i][0] = Double.parseDouble(latLong[0].trim()); locationVectors[i][1] = Double.parseDouble(latLong[1].trim()); if(tuple.get("id") != null) { rowLabels.add(tuple.get("id").toString()); } } Matrix matrix = new Matrix(locationVectors); matrix.setColumnLabels(features); matrix.setRowLabels(rowLabels); return matrix; } else { throw new IOException("The latlonVectors function takes a single positional parameter."); } }
Example 9
Source File: ShortestPathStream.java From lucene-solr with Apache License 2.0 | 4 votes |
public List<Edge> call() { ModifiableSolrParams joinParams = new ModifiableSolrParams(queryParams); String fl = fromField + "," + toField; joinParams.set("fl", fl); joinParams.set("qt", "/export"); joinParams.set(SORT, toField + " asc,"+fromField +" asc"); StringBuffer nodeQuery = new StringBuffer(); for(String node : nodes) { nodeQuery.append(node).append(" "); } String q = fromField + ":(" + nodeQuery.toString().trim() + ")"; joinParams.set("q", q); TupleStream stream = null; try { stream = new UniqueStream(new CloudSolrStream(zkHost, collection, joinParams), new MultipleFieldEqualitor(new FieldEqualitor(toField), new FieldEqualitor(fromField))); stream.setStreamContext(streamContext); stream.open(); BATCH: while (true) { Tuple tuple = stream.read(); if (tuple.EOF) { break BATCH; } String _toNode = tuple.getString(toField); String _fromNode = tuple.getString(fromField); Edge edge = new Edge(_fromNode, _toNode); edges.add(edge); } } catch (Exception e) { throw new RuntimeException(e); } finally { try { stream.close(); } catch(Exception ce) { throw new RuntimeException(ce); } } return edges; }
Example 10
Source File: StreamingTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testTuple() throws Exception { new UpdateRequest() .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "5.1", "s_multi", "a", "s_multi", "b", "i_multi", "1", "i_multi", "2", "f_multi", "1.2", "f_multi", "1.3") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); streamContext.setSolrClientCache(solrClientCache); try { SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f,s_multi,i_multi,f_multi", "sort", "a_s asc"); CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTIONORALIAS, sParams); stream.setStreamContext(streamContext); List<Tuple> tuples = getTuples(stream); Tuple tuple = tuples.get(0); String s = tuple.getString("a_s"); assertEquals("hello0", s); long l = tuple.getLong("a_i"); assertEquals(0, l); double d = tuple.getDouble("a_f"); assertEquals(5.1, d, 0.001); List<String> stringList = tuple.getStrings("s_multi"); assertEquals("a", stringList.get(0)); assertEquals("b", stringList.get(1)); List<Long> longList = tuple.getLongs("i_multi"); assertEquals(1, longList.get(0).longValue()); assertEquals(2, longList.get(1).longValue()); List<Double> doubleList = tuple.getDoubles("f_multi"); assertEquals(1.2, doubleList.get(0).doubleValue(), 0.001); assertEquals(1.3, doubleList.get(1).doubleValue(), 0.001); } finally { solrClientCache.close(); } }
Example 11
Source File: StreamExpressionTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testDrillStream() throws Exception { new UpdateRequest() .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1") .add(id, "2", "a_s", "hello0", "a_i", "2", "a_f", "2") .add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3") .add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4") .add(id, "1", "a_s", "hello0", "a_i", "1", "a_f", "5") .add(id, "5", "a_s", "hello3", "a_i", "10", "a_f", "6") .add(id, "6", "a_s", "hello4", "a_i", "11", "a_f", "7") .add(id, "7", "a_s", "hello3", "a_i", "12", "a_f", "8") .add(id, "8", "a_s", "hello3", "a_i", "13", "a_f", "9") .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); List<Tuple> tuples; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); String expr = "rollup(select(drill(" + " collection1, " + " q=\"*:*\", " + " fl=\"a_s, a_f\", " + " sort=\"a_s desc\", " + " rollup(input(), over=\"a_s\", count(*), sum(a_f)))," + " a_s, count(*) as cnt, sum(a_f) as saf)," + " over=\"a_s\"," + " sum(cnt), sum(saf)" + ")"; paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; TupleStream solrStream = new SolrStream(url, paramsLoc); StreamContext context = new StreamContext(); solrStream.setStreamContext(context); tuples = getTuples(solrStream); Tuple tuple = tuples.get(0); String bucket = tuple.getString("a_s"); Double count = tuple.getDouble("sum(cnt)"); Double saf = tuple.getDouble("sum(saf)"); assertTrue(bucket.equals("hello4")); assertEquals(count.doubleValue(), 2, 0); assertEquals(saf.doubleValue(), 11, 0); tuple = tuples.get(1); bucket = tuple.getString("a_s"); count = tuple.getDouble("sum(cnt)"); saf = tuple.getDouble("sum(saf)"); assertTrue(bucket.equals("hello3")); assertEquals(count.doubleValue(), 4, 0); assertEquals(saf.doubleValue(), 26, 0); tuple = tuples.get(2); bucket = tuple.getString("a_s"); count = tuple.getDouble("sum(cnt)"); saf = tuple.getDouble("sum(saf)"); assertTrue(bucket.equals("hello0")); assertTrue(count.doubleValue() == 4); assertEquals(saf.doubleValue(), 18, 0); }
Example 12
Source File: GraphMLResponseWriter.java From lucene-solr with Apache License 2.0 | 4 votes |
public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse res) throws IOException { Exception e1 = res.getException(); if(e1 != null) { e1.printStackTrace(new PrintWriter(writer)); return; } TupleStream stream = (TupleStream)req.getContext().get("stream"); if(stream instanceof GraphHandler.DummyErrorStream) { GraphHandler.DummyErrorStream d = (GraphHandler.DummyErrorStream)stream; Exception e = d.getException(); e.printStackTrace(new PrintWriter(writer)); return; } Traversal traversal = (Traversal)req.getContext().get("traversal"); PrintWriter printWriter = new PrintWriter(writer); try { stream.open(); Tuple tuple = null; int edgeCount = 0; printWriter.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); printWriter.println("<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\" "); printWriter.println("xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" "); printWriter.print("xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns "); printWriter.println("http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd\">"); printWriter.println("<graph id=\"G\" edgedefault=\"directed\">"); while (true) { //Output the graph tuple = stream.read(); if (tuple.EOF) { break; } String id = tuple.getString("node"); if (traversal.isMultiCollection()) { id = tuple.getString("collection") + "." + id; } printWriter.write("<node id=\""+ xmlEscape(id)+"\""); List<String> outfields = new ArrayList<>(); Iterator<Object> keys = tuple.getFields().keySet().iterator(); while(keys.hasNext()) { String key = String.valueOf(keys.next()); if(key.equals("node") || key.equals("ancestors") || key.equals("collection")) { continue; } else { outfields.add(key); } } if (outfields.size() > 0) { printWriter.println(">"); for (String nodeAttribute : outfields) { Object o = tuple.get(nodeAttribute); if (o != null) { printWriter.println("<data key=\"" + xmlEscape(nodeAttribute) + "\">" + xmlEscape(o.toString()) + "</data>"); } } printWriter.println("</node>"); } else { printWriter.println("/>"); } List<String> ancestors = tuple.getStrings("ancestors"); if(ancestors != null) { for (String ancestor : ancestors) { ++edgeCount; printWriter.write("<edge id=\"" + edgeCount + "\" "); printWriter.write(" source=\"" + xmlEscape(ancestor) + "\" "); printWriter.println(" target=\"" + xmlEscape(id) + "\"/>"); } } } printWriter.write("</graph></graphml>"); } finally { stream.close(); } }
Example 13
Source File: ClassifyStream.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public Tuple read() throws IOException { if (modelTuple == null) { modelTuple = modelStream.read(); if (modelTuple == null || modelTuple.EOF) { throw new IOException("Model tuple not found for classify stream!"); } termToIndex = new HashMap<>(); List<String> terms = modelTuple.getStrings("terms_ss"); for (int i = 0; i < terms.size(); i++) { termToIndex.put(terms.get(i), i); } idfs = modelTuple.getDoubles("idfs_ds"); modelWeights = modelTuple.getDoubles("weights_ds"); } Tuple docTuple = docStream.read(); if (docTuple.EOF) return docTuple; String text = docTuple.getString(field); double tfs[] = new double[termToIndex.size()]; TokenStream tokenStream = analyzer.tokenStream(analyzerField, text); CharTermAttribute termAtt = tokenStream.getAttribute(CharTermAttribute.class); tokenStream.reset(); int termCount = 0; while (tokenStream.incrementToken()) { termCount++; if (termToIndex.containsKey(termAtt.toString())) { tfs[termToIndex.get(termAtt.toString())]++; } } tokenStream.end(); tokenStream.close(); List<Double> tfidfs = new ArrayList<>(termToIndex.size()); tfidfs.add(1.0); for (int i = 0; i < tfs.length; i++) { if (tfs[i] != 0) { tfs[i] = 1 + Math.log(tfs[i]); } tfidfs.add(this.idfs.get(i) * tfs[i]); } double total = 0.0; for (int i = 0; i < tfidfs.size(); i++) { total += tfidfs.get(i) * modelWeights.get(i); } double score = total * ((float) (1.0 / Math.sqrt(termCount))); double positiveProb = sigmoid(total); docTuple.put("probability_d", positiveProb); docTuple.put("score_d", score); return docTuple; }
Example 14
Source File: ModelTupleStreamIntegrationTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
private void doTest(String expr, String[] expectedIds, Object[] expectedLefts, Object[] expectedRights) throws Exception { ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", expr); paramsLoc.set("qt", "/stream"); String url = cluster.getRandomJetty(random()).getBaseUrl().toString()+"/"+MY_COLLECTION_NAME; TupleStream tupleStream = new SolrStream(url, paramsLoc); StreamContext context = new StreamContext(); tupleStream.setStreamContext(context); try { tupleStream.open(); for (int ii=0; ii<expectedIds.length; ++ii) { final Tuple tuple = tupleStream.read(); assertFalse(tuple.EOF); final String expectedId = expectedIds[ii]; final String actualId = tuple.getString("id"); assertEquals(expectedId, actualId); if (expectedLefts != null) { final Object expectedLeft = expectedLefts[ii]; final String actualLeft = tuple.getString("left"); assertEquals(tuple.getMap().toString(), expectedLeft, actualLeft); } if (expectedRights != null) { final Object expectedRight = expectedRights[ii]; final String actualRight = tuple.getString("right"); assertEquals(tuple.getMap().toString(), expectedRight, actualRight); } } final Tuple lastTuple = tupleStream.read(); assertTrue(lastTuple.EOF); } finally { tupleStream.close(); } }