Java Code Examples for org.apache.calcite.tools.Frameworks#getPlanner()
The following examples show how to use
org.apache.calcite.tools.Frameworks#getPlanner() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StreamlineSqlImpl.java From streamline with Apache License 2.0 | 6 votes |
@Override public void execute( Iterable<String> statements, ChannelHandler result) throws Exception { Map<String, DataSource> dataSources = new HashMap<>(); for (String sql : statements) { StreamlineParser parser = new StreamlineParser(sql); SqlNode node = parser.impl().parseSqlStmtEof(); if (node instanceof SqlCreateTable) { handleCreateTable((SqlCreateTable) node, dataSources); } else if (node instanceof SqlCreateFunction) { handleCreateFunction((SqlCreateFunction) node); } else { FrameworkConfig config = buildFrameWorkConfig(); Planner planner = Frameworks.getPlanner(config); SqlNode parse = planner.parse(sql); SqlNode validate = planner.validate(parse); RelNode tree = planner.convert(validate); PlanCompiler compiler = new PlanCompiler(typeFactory); AbstractValuesProcessor proc = compiler.compile(tree); proc.initialize(dataSources, result); } } }
Example 2
Source File: LexEscapeTest.java From calcite with Apache License 2.0 | 6 votes |
private static Planner getPlanner(List<RelTraitDef> traitDefs, Config parserConfig, Program... programs) { final SchemaPlus rootSchema = Frameworks.createRootSchema(true); rootSchema.add("TMP", new AbstractTable() { @Override public RelDataType getRowType(RelDataTypeFactory typeFactory) { return typeFactory.createStructType( ImmutableList.of(typeFactory.createSqlType(SqlTypeName.VARCHAR), typeFactory.createSqlType(SqlTypeName.INTEGER)), ImmutableList.of("localtime", "current_timestamp")); } }); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(parserConfig) .defaultSchema(rootSchema) .traitDefs(traitDefs) .programs(programs) .operatorTable(SqlStdOperatorTable.instance()) .build(); return Frameworks.getPlanner(config); }
Example 3
Source File: RelBuilderTest.java From calcite with Apache License 2.0 | 6 votes |
@Test void testExpandViewShouldKeepAlias() throws SQLException { try (Connection connection = DriverManager.getConnection("jdbc:calcite:")) { final Frameworks.ConfigBuilder configBuilder = expandingConfig(connection); final RelOptTable.ViewExpander viewExpander = (RelOptTable.ViewExpander) Frameworks.getPlanner(configBuilder.build()); configBuilder.context(Contexts.of(viewExpander)); final RelBuilder builder = RelBuilder.create(configBuilder.build()); RelNode node = builder.scan("MYVIEW") .project( builder.field(1, "MYVIEW", "EMPNO"), builder.field(1, "MYVIEW", "ENAME")) .build(); String expected = "LogicalProject(EMPNO=[$0], ENAME=[$1])\n" + " LogicalFilter(condition=[=(1, 1)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; assertThat(node, hasTree(expected)); } }
Example 4
Source File: SqlWorker.java From quark with Apache License 2.0 | 6 votes |
private Planner buildPlanner(QueryContext context) { final List<RelTraitDef> traitDefs = new ArrayList<RelTraitDef>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(RelCollationTraitDef.INSTANCE); final ChainedSqlOperatorTable opTab = new ChainedSqlOperatorTable( ImmutableList.of(SqlStdOperatorTable.instance(), HiveSqlOperatorTable.instance(), catalogReader)); FrameworkConfig config = Frameworks.newConfigBuilder() // .parserConfig(SqlParser.configBuilder() .setQuotedCasing(Casing.UNCHANGED) .setUnquotedCasing(Casing.TO_UPPER) .setQuoting(Quoting.DOUBLE_QUOTE) .build()) // .defaultSchema(context.getDefaultSchema()) // .operatorTable(opTab) // .traitDefs(traitDefs) // .convertletTable(StandardConvertletTable.INSTANCE)// .programs(getPrograms()) // .typeSystem(RelDataTypeSystem.DEFAULT) // .build(); return Frameworks.getPlanner(config); }
Example 5
Source File: TableEnv.java From marble with Apache License 2.0 | 6 votes |
protected RelRoot getSqlPlanRel(String sql) throws Throwable { try (Planner planner = Frameworks.getPlanner(frameworkConfig)) { RelRoot root; final SqlNode parsedSqlNode = planner.parse(sql); final Pair<SqlNode, RelDataType> validatedSqlNodeAndType = planner .validateAndGetType( parsedSqlNode); root = planner.rel(validatedSqlNodeAndType.getKey()); final Program program = createProgram(); //getDesiredTraits final RelTraitSet desiredTraits = root.rel.getTraitSet() .replace(EnumerableConvention.INSTANCE) .replace(root.collation) .simplify(); RelNode logicalRelNode = root.rel; final RelNode optimizedRelNode = program.run( root.rel.getCluster().getPlanner(), logicalRelNode, desiredTraits, Collections.emptyList(), Collections.emptyList()); root = root.withRel(optimizedRelNode); return root; } }
Example 6
Source File: RuleParser.java From streamline with Apache License 2.0 | 6 votes |
public void parse() { try { SchemaPlus schema = Frameworks.createRootSchema(true); FrameworkConfig config = Frameworks.newConfigBuilder().defaultSchema(schema).build(); Planner planner = Frameworks.getPlanner(config); SqlSelect sqlSelect = (SqlSelect) planner.parse(sql); // FROM streams = parseStreams(sqlSelect); // SELECT projection = parseProjection(sqlSelect); // WHERE condition = parseCondition(sqlSelect); // GROUP BY groupBy = parseGroupBy(sqlSelect); // HAVING having = parseHaving(sqlSelect); } catch (Exception ex) { LOG.error("Got Exception while parsing rule {}", sql); throw new RuntimeException(ex); } }
Example 7
Source File: SQLExecEnvironment.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
/** * This is the main method takes SQL statement as input and contructs a DAG using contructs registered with this * {@link SQLExecEnvironment}. * * @param sql SQL statement that should be converted to a DAG. */ public void executeSQL(DAG dag, String sql) { FrameworkConfig config = buildFrameWorkConfig(); Planner planner = Frameworks.getPlanner(config); try { logger.info("Parsing SQL statement: {}", sql); SqlNode parsedTree = planner.parse(sql); SqlNode validatedTree = planner.validate(parsedTree); RelNode relationalTree = planner.rel(validatedTree).rel; logger.info("RelNode relationalTree generate from SQL statement is:\n {}", Util.toLinux(RelOptUtil.toString(relationalTree))); RelNodeVisitor visitor = new RelNodeVisitor(dag, typeFactory); visitor.traverse(relationalTree); } catch (Exception e) { throw Throwables.propagate(e); } finally { planner.close(); } }
Example 8
Source File: RelBuilderTest.java From calcite with Apache License 2.0 | 6 votes |
/** * Ensures that relational algebra ({@link RelBuilder}) works with SQL views. * * <p>This test currently fails (thus ignored). */ @Test void testExpandViewInRelBuilder() throws SQLException { try (Connection connection = DriverManager.getConnection("jdbc:calcite:")) { final Frameworks.ConfigBuilder configBuilder = expandingConfig(connection); final RelOptTable.ViewExpander viewExpander = (RelOptTable.ViewExpander) Frameworks.getPlanner(configBuilder.build()); configBuilder.context(Contexts.of(viewExpander)); final RelBuilder builder = RelBuilder.create(configBuilder.build()); RelNode node = builder.scan("MYVIEW").build(); int count = 0; try (PreparedStatement statement = connection.unwrap(RelRunner.class).prepare(node); ResultSet resultSet = statement.executeQuery()) { while (resultSet.next()) { count++; } } assertTrue(count > 1); } }
Example 9
Source File: SamzaSqlQueryParser.java From samza with Apache License 2.0 | 5 votes |
private static Planner createPlanner() { Connection connection; SchemaPlus rootSchema; try { JavaTypeFactory typeFactory = new SamzaSqlJavaTypeFactoryImpl(); SamzaSqlDriver driver = new SamzaSqlDriver(typeFactory); DriverManager.deregisterDriver(DriverManager.getDriver("jdbc:calcite:")); DriverManager.registerDriver(driver); connection = driver.connect("jdbc:calcite:", new Properties()); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); rootSchema = calciteConnection.getRootSchema(); } catch (SQLException e) { throw new SamzaException(e); } final List<RelTraitDef> traitDefs = new ArrayList<>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(RelCollationTraitDef.INSTANCE); FrameworkConfig frameworkConfig = Frameworks.newConfigBuilder() .parserConfig(SqlParser.configBuilder().setLex(Lex.JAVA).build()) .defaultSchema(rootSchema) .operatorTable(SqlStdOperatorTable.instance()) .traitDefs(traitDefs) .context(Contexts.EMPTY_CONTEXT) .costFactory(null) .build(); return Frameworks.getPlanner(frameworkConfig); }
Example 10
Source File: TestCompilerUtils.java From streamline with Apache License 2.0 | 5 votes |
public static CalciteState sqlOverDummyTable(String sql) throws RelConversionException, ValidationException, SqlParseException { SchemaPlus schema = Frameworks.createRootSchema(true); JavaTypeFactory typeFactory = new JavaTypeFactoryImpl (RelDataTypeSystem.DEFAULT); StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory) .field("ID", SqlTypeName.INTEGER) .field("NAME", typeFactory.createType(String.class)) .field("ADDR", typeFactory.createType(String.class)) .build(); Table table = streamableTable.stream(); schema.add("FOO", table); schema.add("BAR", table); schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval")); List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>(); sqlOperatorTables.add(SqlStdOperatorTable.instance()); sqlOperatorTables.add(new CalciteCatalogReader(CalciteSchema.from(schema), false, Collections.<String>emptyList(), typeFactory)); SqlOperatorTable chainedSqlOperatorTable = new ChainedSqlOperatorTable(sqlOperatorTables); FrameworkConfig config = Frameworks.newConfigBuilder().defaultSchema( schema).operatorTable(chainedSqlOperatorTable).build(); Planner planner = Frameworks.getPlanner(config); SqlNode parse = planner.parse(sql); SqlNode validate = planner.validate(parse); RelNode tree = planner.convert(validate); System.out.println(RelOptUtil.toString(tree, SqlExplainLevel.ALL_ATTRIBUTES)); return new CalciteState(schema, tree); }
Example 11
Source File: ExtensionDdlExecutor.java From calcite with Apache License 2.0 | 5 votes |
/** Populates the table called {@code name} by executing {@code query}. */ protected static void populate(SqlIdentifier name, SqlNode query, CalcitePrepare.Context context) { // Generate, prepare and execute an "INSERT INTO table query" statement. // (It's a bit inefficient that we convert from SqlNode to SQL and back // again.) final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema( Objects.requireNonNull( Schemas.subSchema(context.getRootSchema(), context.getDefaultSchemaPath())).plus()) .build(); final Planner planner = Frameworks.getPlanner(config); try { final StringBuilder buf = new StringBuilder(); final SqlPrettyWriter w = new SqlPrettyWriter( SqlPrettyWriter.config() .withDialect(CalciteSqlDialect.DEFAULT) .withAlwaysUseParentheses(false), buf); buf.append("INSERT INTO "); name.unparse(w, 0, 0); buf.append(" "); query.unparse(w, 0, 0); final String sql = buf.toString(); final SqlNode query1 = planner.parse(sql); final SqlNode query2 = planner.validate(query1); final RelRoot r = planner.rel(query2); final PreparedStatement prepare = context.getRelRunner().prepare(r.rel); int rowCount = prepare.executeUpdate(); Util.discard(rowCount); prepare.close(); } catch (SqlParseException | ValidationException | RelConversionException | SQLException e) { throw new RuntimeException(e); } }
Example 12
Source File: InterpreterTest.java From calcite with Apache License 2.0 | 5 votes |
private void reset() { rootSchema = Frameworks.createRootSchema(true); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(SqlParser.Config.DEFAULT) .defaultSchema( CalciteAssert.addSchema(rootSchema, CalciteAssert.SchemaSpec.HR)) .build(); planner = Frameworks.getPlanner(config); dataContext = new MyDataContext(planner); }
Example 13
Source File: ServerDdlExecutor.java From calcite with Apache License 2.0 | 5 votes |
/** Populates the table called {@code name} by executing {@code query}. */ static void populate(SqlIdentifier name, SqlNode query, CalcitePrepare.Context context) { // Generate, prepare and execute an "INSERT INTO table query" statement. // (It's a bit inefficient that we convert from SqlNode to SQL and back // again.) final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema(context.getRootSchema().plus()) .build(); final Planner planner = Frameworks.getPlanner(config); try { final StringBuilder buf = new StringBuilder(); final SqlWriterConfig writerConfig = SqlPrettyWriter.config().withAlwaysUseParentheses(false); final SqlPrettyWriter w = new SqlPrettyWriter(writerConfig, buf); buf.append("INSERT INTO "); name.unparse(w, 0, 0); buf.append(' '); query.unparse(w, 0, 0); final String sql = buf.toString(); final SqlNode query1 = planner.parse(sql); final SqlNode query2 = planner.validate(query1); final RelRoot r = planner.rel(query2); final PreparedStatement prepare = context.getRelRunner().prepare(r.rel); int rowCount = prepare.executeUpdate(); Util.discard(rowCount); prepare.close(); } catch (SqlParseException | ValidationException | RelConversionException | SQLException e) { throw new RuntimeException(e); } }
Example 14
Source File: ToLogicalConverterTest.java From calcite with Apache License 2.0 | 5 votes |
private static RelNode rel(String sql) { final Planner planner = Frameworks.getPlanner(frameworkConfig()); try { SqlNode parse = planner.parse(sql); SqlNode validate = planner.validate(parse); return planner.rel(validate).rel; } catch (Exception e) { throw TestUtil.rethrow(e); } }
Example 15
Source File: RexSqlStandardConvertletTableTest.java From calcite with Apache License 2.0 | 5 votes |
private RelNode convertSqlToRel(String sql, boolean simplifyRex) { final FrameworkConfig config = Frameworks.newConfigBuilder() .defaultSchema(CalciteSchema.createRootSchema(false).plus()) .parserConfig(SqlParser.configBuilder().build()) .build(); final Planner planner = Frameworks.getPlanner(config); try (Closer closer = new Closer()) { closer.add(Hook.REL_BUILDER_SIMPLIFY.addThread(Hook.propertyJ(simplifyRex))); final SqlNode parsed = planner.parse(sql); final SqlNode validated = planner.validate(parsed); return planner.rel(validated).rel; } catch (SqlParseException | RelConversionException | ValidationException e) { throw TestUtil.rethrow(e); } }
Example 16
Source File: LexCaseSensitiveTest.java From calcite with Apache License 2.0 | 5 votes |
private static Planner getPlanner(List<RelTraitDef> traitDefs, SqlParser.Config parserConfig, Program... programs) { final SchemaPlus rootSchema = Frameworks.createRootSchema(true); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(parserConfig) .defaultSchema(CalciteAssert.addSchema(rootSchema, CalciteAssert.SchemaSpec.HR)) .traitDefs(traitDefs) .programs(programs) .build(); return Frameworks.getPlanner(config); }
Example 17
Source File: TestCompilerUtils.java From streamline with Apache License 2.0 | 4 votes |
public static CalciteState sqlOverNestedTable(String sql) throws RelConversionException, ValidationException, SqlParseException { SchemaPlus schema = Frameworks.createRootSchema(true); JavaTypeFactory typeFactory = new JavaTypeFactoryImpl (RelDataTypeSystem.DEFAULT); StreamableTable streamableTable = new CompilerUtil.TableBuilderInfo(typeFactory) .field("ID", SqlTypeName.INTEGER) .field("MAPFIELD", typeFactory.createTypeWithNullability( typeFactory.createMapType( typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.INTEGER), true)) , true)) .field("NESTEDMAPFIELD", typeFactory.createTypeWithNullability( typeFactory.createMapType( typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability( typeFactory.createMapType( typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.VARCHAR), true), typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.INTEGER), true)) , true)) , true)) .field("ARRAYFIELD", typeFactory.createTypeWithNullability( typeFactory.createArrayType( typeFactory.createTypeWithNullability( typeFactory.createSqlType(SqlTypeName.INTEGER), true), -1L) , true)) .build(); Table table = streamableTable.stream(); schema.add("FOO", table); schema.add("BAR", table); schema.add("MYPLUS", ScalarFunctionImpl.create(MyPlus.class, "eval")); List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>(); sqlOperatorTables.add(SqlStdOperatorTable.instance()); sqlOperatorTables.add(new CalciteCatalogReader(CalciteSchema.from(schema), false, Collections.<String>emptyList(), typeFactory)); SqlOperatorTable chainedSqlOperatorTable = new ChainedSqlOperatorTable(sqlOperatorTables); FrameworkConfig config = Frameworks.newConfigBuilder().defaultSchema( schema).operatorTable(chainedSqlOperatorTable).build(); Planner planner = Frameworks.getPlanner(config); SqlNode parse = planner.parse(sql); SqlNode validate = planner.validate(parse); RelNode tree = planner.convert(validate); System.out.println(RelOptUtil.toString(tree, SqlExplainLevel.ALL_ATTRIBUTES)); return new CalciteState(schema, tree); }
Example 18
Source File: QueryPlanner.java From samza with Apache License 2.0 | 4 votes |
public RelRoot plan(String query) { try { Connection connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); registerSourceSchemas(rootSchema); List<SamzaSqlScalarFunctionImpl> samzaSqlFunctions = udfMetadata.stream() .map(x -> new SamzaSqlScalarFunctionImpl(x)) .collect(Collectors.toList()); final List<RelTraitDef> traitDefs = new ArrayList<>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(RelCollationTraitDef.INSTANCE); List<SqlOperatorTable> sqlOperatorTables = new ArrayList<>(); sqlOperatorTables.add(new SamzaSqlOperatorTable()); sqlOperatorTables.add(new SamzaSqlUdfOperatorTable(samzaSqlFunctions)); // Using lenient so that !=,%,- are allowed. FrameworkConfig frameworkConfig = Frameworks.newConfigBuilder() .parserConfig(SqlParser.configBuilder() .setLex(Lex.JAVA) .setConformance(SqlConformanceEnum.LENIENT) .setCaseSensitive(false) // Make Udfs case insensitive .build()) .defaultSchema(rootSchema) .operatorTable(new ChainedSqlOperatorTable(sqlOperatorTables)) .sqlToRelConverterConfig(SqlToRelConverter.Config.DEFAULT) .traitDefs(traitDefs) .context(Contexts.EMPTY_CONTEXT) .costFactory(null) .build(); Planner planner = Frameworks.getPlanner(frameworkConfig); SqlNode sql = planner.parse(query); SqlNode validatedSql = planner.validate(sql); RelRoot relRoot = planner.rel(validatedSql); LOG.info("query plan:\n" + RelOptUtil.toString(relRoot.rel, SqlExplainLevel.ALL_ATTRIBUTES)); return relRoot; } catch (Exception e) { String errorMsg = SamzaSqlValidator.formatErrorString(query, e); LOG.error(errorMsg, e); throw new SamzaException(errorMsg, e); } }
Example 19
Source File: QueryPlanner.java From sql-gremlin with Apache License 2.0 | 4 votes |
public QueryPlanner(final FrameworkConfig frameworkConfig) { this.planner = Frameworks.getPlanner(frameworkConfig); }
Example 20
Source File: TestSqlBracketlessSyntax.java From dremio-oss with Apache License 2.0 | 4 votes |
@Test public void checkComplexExpressionParsing() throws Exception{ FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(SqlParser.configBuilder() .setLex(Lex.MYSQL) .setIdentifierMaxLength(PlannerSettings.DEFAULT_IDENTIFIER_MAX_LENGTH) .setParserFactory(ParserImpl.FACTORY) .build()) .defaultSchema(CalciteSchema.createRootSchema(false /* addMetadata */, false /* cache */).plus()) .convertletTable(new ConvertletTable(new ContextInformation() { @Override public String getQueryUser() { return null; } @Override public String getCurrentDefaultSchema() { return null; } @Override public long getQueryStartTime() { return 0; } @Override public int getRootFragmentTimeZone() { return 0; } @Override public QueryId getLastQueryId() { return null; } @Override public void registerAdditionalInfo(AdditionalContext object) { } @Override public <T extends AdditionalContext> T getAdditionalInfo(Class<T> claz) { return null; } })) .build(); Planner planner = Frameworks.getPlanner(config); SqlNode node = planner.parse("" + "select a[4].c \n" + "from x.y.z \n" + "where a.c.b = 5 and x[2] = 7 \n" + "group by d \n" + "having a.c < 5 \n" + "order by x.a.a.a.a.a"); String expected = "SELECT `a`[4]['c']\n" + "FROM `x`.`y`.`z`\n" + "WHERE `a`.`c`['b'] = 5 AND `x`[2] = 7\n" + "GROUP BY `d`\n" + "HAVING `a`.`c` < 5\n" + "ORDER BY `x`.`a`['a']['a']['a']['a']"; SqlNode rewritten = node.accept(new CompoundIdentifierConverter()); String rewrittenQuery = rewritten.toString(); DremioAssert.assertMultiLineStringEquals(expected, rewrittenQuery); }