Java Code Examples for org.apache.zeppelin.interpreter.InterpreterResult#message()
The following examples show how to use
org.apache.zeppelin.interpreter.InterpreterResult#message() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BaseLivyInterpreter.java From zeppelin with Apache License 2.0 | 6 votes |
private InterpreterResult appendSessionExpireDead(InterpreterResult result, boolean sessionExpired, boolean sessionDead) { InterpreterResult result2 = new InterpreterResult(result.code()); if (sessionExpired) { result2.add(InterpreterResult.Type.HTML, "<font color=\"red\">Previous livy session is expired, new livy session is created. " + "Paragraphs that depend on this paragraph need to be re-executed!</font>"); } if (sessionDead) { result2.add(InterpreterResult.Type.HTML, "<font color=\"red\">Previous livy session is dead, new livy session is created. " + "Paragraphs that depend on this paragraph need to be re-executed!</font>"); } for (InterpreterResultMessage message : result.message()) { result2.add(message.getType(), message.getData()); } return result2; }
Example 2
Source File: RemoteInterpreterServer.java From zeppelin with Apache License 2.0 | 6 votes |
private RemoteInterpreterResult convert(InterpreterResult result, Map<String, Object> config, GUI gui, GUI noteGui) { List<RemoteInterpreterResultMessage> msg = new LinkedList<>(); for (InterpreterResultMessage m : result.message()) { msg.add(new RemoteInterpreterResultMessage( m.getType().name(), m.getData())); } return new RemoteInterpreterResult( result.code().name(), msg, gson.toJson(config), gui.toJson(), noteGui.toJson()); }
Example 3
Source File: LivySparkSQLInterpreter.java From zeppelin with Apache License 2.0 | 4 votes |
@Override public InterpreterResult interpret(String line, InterpreterContext context) { try { if (StringUtils.isEmpty(line)) { return new InterpreterResult(InterpreterResult.Code.SUCCESS, ""); } // use triple quote so that we don't need to do string escape. String sqlQuery = null; if (isSpark2) { sqlQuery = "spark.sql(\"\"\"" + line + "\"\"\").show(" + maxResult + ", " + truncate + ")"; } else { sqlQuery = "sqlContext.sql(\"\"\"" + line + "\"\"\").show(" + maxResult + ", " + truncate + ")"; } InterpreterResult result = sparkInterpreter.interpret(sqlQuery, context); if (result.code() == InterpreterResult.Code.SUCCESS) { InterpreterResult result2 = new InterpreterResult(InterpreterResult.Code.SUCCESS); for (InterpreterResultMessage message : result.message()) { // convert Text type to Table type. We assume the text type must be the sql output. This // assumption is correct for now. Ideally livy should return table type. We may do it in // the future release of livy. if (message.getType() == InterpreterResult.Type.TEXT) { List<String> rows = parseSQLOutput(message.getData()); result2.add(InterpreterResult.Type.TABLE, StringUtils.join(rows, "\n")); if (rows.size() >= (maxResult + 1)) { result2.add(ResultMessages.getExceedsLimitRowsMessage(maxResult, ZEPPELIN_LIVY_SPARK_SQL_MAX_RESULT)); } } else { result2.add(message.getType(), message.getData()); } } return result2; } else { return result; } } catch (Exception e) { LOGGER.error("Exception in LivySparkSQLInterpreter while interpret ", e); return new InterpreterResult(InterpreterResult.Code.ERROR, InterpreterUtils.getMostRelevantMessage(e)); } }
Example 4
Source File: InfluxDBInterpeterTest.java From zeppelin with Apache License 2.0 | 4 votes |
@Test public void testSigleTable() throws InterpreterException { InfluxDBInterpreter t = new InfluxDBInterpreter(properties); t.open(); //just for testing with real influxdb (not used in mock) String flux = "from(bucket: \"my-bucket\")\n" + " |> range(start:-1m)\n" + " |> filter(fn: (r) => r._measurement == \"cpu\")\n" + " |> filter(fn: (r) => r._field == \"usage_user\")\n" + " |> filter(fn: (r) => r.cpu == \"cpu-total\")\n" + " |> limit(n:5, offset: 0)" + " |> keep(columns: [\"_field\", \"_value\", \"_time\"])"; InterpreterContext context = InterpreterContext.builder() .setAuthenticationInfo(new AuthenticationInfo("testUser")) .build(); mockServer.enqueue(createResponse(SINGLE_TABLE_RESPONSE)); InterpreterResult interpreterResult = t.interpret(flux, context); // if prefix not found return ERROR and Prefix not found. assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> message = interpreterResult.message(); Assert.assertEquals(1, message.size()); Assert.assertEquals(InterpreterResult.Type.TABLE, message.get(0).getType()); Assert.assertEquals("result\ttable\t_time\t_value\t_field\n" + "_result\t0\t2020-01-24T10:23:56Z\t12.114014251781473\tusage_user\n" + "_result\t0\t2020-01-24T10:23:57Z\t12.048493938257717\tusage_user\n" + "_result\t0\t2020-01-24T10:24:06Z\t12.715678919729932\tusage_user\n" + "_result\t0\t2020-01-24T10:24:07Z\t11.876484560570072\tusage_user\n" + "_result\t0\t2020-01-24T10:24:16Z\t10.044977511244378\tusage_user\n" + "_result\t0\t2020-01-24T10:24:17Z\t10.594702648675662\tusage_user\n" + "_result\t0\t2020-01-24T10:24:26Z\t12.092034512942353\tusage_user\n" + "_result\t0\t2020-01-24T10:24:27Z\t12.131065532766383\tusage_user\n" + "_result\t0\t2020-01-24T10:24:36Z\t14.332125452955141\tusage_user\n" + "_result\t0\t2020-01-24T10:24:37Z\t15.153788447111777\tusage_user\n", message.get(0).getData()); t.close(); }
Example 5
Source File: InfluxDBInterpeterTest.java From zeppelin with Apache License 2.0 | 4 votes |
@Test public void testMultiTable() throws InterpreterException { InfluxDBInterpreter t = new InfluxDBInterpreter(properties); t.open(); //just for testing with real influxdb (not used in mock) String flux = "from(bucket: \"my-bucket\")\n" + " |> range(start: -1h)\n" + " |> filter(fn: (r) => r._measurement == \"cpu\")\n" + " |> filter(fn: (r) => r._field == \"usage_user\")\n" + " |> aggregateWindow(every: 1m, fn: mean)\n" + " |> limit(n:5, offset: 0)"; InterpreterContext context = InterpreterContext.builder() .setAuthenticationInfo(new AuthenticationInfo("testUser")) .build(); mockServer.enqueue(createResponse(MULTI_TABLE_RESPONSE)); InterpreterResult interpreterResult = t.interpret(flux, context); // if prefix not found return ERROR and Prefix not found. if (InterpreterResult.Code.ERROR.equals(interpreterResult.code())) { Assert.fail(interpreterResult.toString()); } assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); List<InterpreterResultMessage> message = interpreterResult.message(); Assert.assertEquals(9, message.size()); message.forEach(m -> Assert.assertEquals(InterpreterResult.Type.TABLE, m.getType())); Assert.assertEquals( "result\ttable\t_start\t_stop\t_field\t_measurement\tcpu\thost\t_value\t_time\n" + "_result\t0\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu-total\tmacek.local\t12.381414297598637\t2020-01-24T09:28:00Z\n" + "_result\t0\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu-total\tmacek.local\t18.870254041431455\t2020-01-24T09:29:00Z\n" + "_result\t0\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu-total\tmacek.local\t26.64080311971415\t2020-01-24T09:30:00Z\n" + "_result\t0\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu-total\tmacek.local\t11.644120979499911\t2020-01-24T09:31:00Z\n" + "_result\t0\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu-total\tmacek.local\t16.046354351571846\t2020-01-24T09:32:00Z\n", message.get(0).getData()); Assert.assertEquals("result\ttable\t_start\t_stop\t_field\t_measurement\tcpu\thost\t_value" + "\t_time\n" + "_result\t8\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu7\tmacek.local\t3.4507517507517504\t2020-01-24T09:28:00Z\n" + "_result\t8\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu7\tmacek.local\t8.817554700888033\t2020-01-24T09:29:00Z\n" + "_result\t8\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu7\tmacek.local\t16.957243048909714\t2020-01-24T09:30:00Z\n" + "_result\t8\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu7\tmacek.local\t3.408601950268617\t2020-01-24T09:31:00Z\n" + "_result\t8\t2020-01-24T09:27:44.845218500Z\t2020-01-24T10:27:44.845218500Z\tusage_user" + "\tcpu\tcpu7\tmacek.local\t10.672760839427506\t2020-01-24T09:32:00Z\n", message.get(8).getData()); t.close(); }
Example 6
Source File: RemoteInterpreterServer.java From zeppelin with Apache License 2.0 | 4 votes |
@Override public InterpreterResult jobRun() throws Throwable { ClassLoader currentThreadContextClassloader = Thread.currentThread().getContextClassLoader(); try { InterpreterContext.set(context); // clear the result of last run in frontend before running this paragraph. context.out.clear(); InterpreterResult result = null; // Open the interpreter instance prior to calling interpret(). // This is necessary because the earliest we can register a hook // is from within the open() method. LazyOpenInterpreter lazy = (LazyOpenInterpreter) interpreter; if (!lazy.isOpen()) { lazy.open(); result = lazy.executePrecode(context); } if (result == null || result.code() == Code.SUCCESS) { // Add hooks to script from registry. // note scope first, followed by global scope. // Here's the code after hooking: // global_pre_hook // note_pre_hook // script // note_post_hook // global_post_hook processInterpreterHooks(context.getNoteId()); processInterpreterHooks(null); LOGGER.debug("Script after hooks: " + script); result = interpreter.interpret(script, context); } // data from context.out is prepended to InterpreterResult if both defined context.out.flush(); List<InterpreterResultMessage> resultMessages = context.out.toInterpreterResultMessage(); for (InterpreterResultMessage resultMessage : result.message()) { // only add non-empty InterpreterResultMessage if (!StringUtils.isBlank(resultMessage.getData())) { resultMessages.add(resultMessage); } } List<String> stringResult = new ArrayList<>(); for (InterpreterResultMessage msg : resultMessages) { if (msg.getType() == InterpreterResult.Type.IMG) { LOGGER.debug("InterpreterResultMessage: IMAGE_DATA"); } else { LOGGER.debug("InterpreterResultMessage: " + msg.toString()); } stringResult.add(msg.getData()); } // put result into resource pool if (context.getLocalProperties().containsKey("saveAs")) { if (stringResult.size() == 1) { LOGGER.info("Saving result into ResourcePool as single string: " + context.getLocalProperties().get("saveAs")); context.getResourcePool().put( context.getLocalProperties().get("saveAs"), stringResult.get(0)); } else { LOGGER.info("Saving result into ResourcePool as string list: " + context.getLocalProperties().get("saveAs")); context.getResourcePool().put( context.getLocalProperties().get("saveAs"), stringResult); } } return new InterpreterResult(result.code(), resultMessages); } catch (Throwable e) { return new InterpreterResult(Code.ERROR, ExceptionUtils.getStackTrace(e)); } finally { Thread.currentThread().setContextClassLoader(currentThreadContextClassloader); InterpreterContext.remove(); } }