org.apache.calcite.sql.SqlDialect Java Examples
The following examples show how to use
org.apache.calcite.sql.SqlDialect.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: QuerySqlStatisticProvider.java From calcite with Apache License 2.0 | 6 votes |
protected String toSql(RelNode rel, SqlDialect dialect) { final RelToSqlConverter converter = new RelToSqlConverter(dialect); SqlImplementor.Result result = converter.visitChild(0, rel); final SqlNode sqlNode = result.asStatement(); final String sql = sqlNode.toSqlString(dialect).getSql(); sqlConsumer.accept(sql); return sql; }
Example #2
Source File: SqlConverter.java From kylin-on-parquet-v2 with Apache License 2.0 | 6 votes |
public String convertSql(String orig) { // for jdbc source, convert quote from backtick to double quote String converted = orig.replaceAll("`", "\""); if (!configurer.skipHandleDefault()) { String escapedDefault = SqlDialect.CALCITE .quoteIdentifier(configurer.useUppercaseDefault() ? "DEFAULT" : "default"); converted = converted.replaceAll("(?i)default\\.", escapedDefault + "."); // use Calcite dialect to cater to SqlParser converted = converted.replaceAll("\"(?i)default\"\\.", escapedDefault + "."); } if (!configurer.skipDefaultConvert()) { try { SqlNode sqlNode = SqlParser.create(converted).parseQuery(); sqlNode = sqlNode.accept(sqlNodeConverter); converted = sqlWriter.format(sqlNode); } catch (Throwable e) { logger.error("Failed to default convert sql, will use the input: {}", orig, e); } finally { sqlWriter.reset(); } } converted = configurer.fixAfterDefaultConvert(converted); return converted; }
Example #3
Source File: LatticeSuggesterTest.java From calcite with Apache License 2.0 | 6 votes |
@Test void testRedshiftDialect() throws Exception { final Tester t = new Tester().foodmart().withEvolve(true) .withDialect(SqlDialect.DatabaseProduct.REDSHIFT.getDialect()) .withLibrary(SqlLibrary.POSTGRESQL); final String q0 = "select\n" + " CONCAT(\"fname\", ' ', \"lname\") as \"full_name\",\n" + " convert_timezone('UTC', 'America/Los_Angeles',\n" + " cast('2019-01-01 01:00:00' as timestamp)),\n" + " left(\"fname\", 1) as \"initial\",\n" + " to_date('2019-01-01', 'YYYY-MM-DD'),\n" + " to_timestamp('2019-01-01 01:00:00', 'YYYY-MM-DD HH:MM:SS'),\n" + " count(*) as c,\n" + " avg(\"total_children\" - \"num_children_at_home\")\n" + "from \"customer\" join \"sales_fact_1997\" using (\"customer_id\")\n" + "group by \"fname\", \"lname\""; t.addQuery(q0); assertThat(t.s.latticeMap.size(), is(1)); }
Example #4
Source File: JdbcCatalogSchema.java From calcite with Apache License 2.0 | 6 votes |
public static JdbcCatalogSchema create( SchemaPlus parentSchema, String name, DataSource dataSource, SqlDialectFactory dialectFactory, String catalog) { final Expression expression = parentSchema != null ? Schemas.subSchemaExpression(parentSchema, name, JdbcCatalogSchema.class) : Expressions.call(DataContext.ROOT, BuiltInMethod.DATA_CONTEXT_GET_ROOT_SCHEMA.method); final SqlDialect dialect = JdbcSchema.createDialect(dialectFactory, dataSource); final JdbcConvention convention = JdbcConvention.of(dialect, expression, name); return new JdbcCatalogSchema(dataSource, dialect, convention, catalog); }
Example #5
Source File: FlatTableSqlQuoteUtils.java From kylin with Apache License 2.0 | 6 votes |
/** * Used to quote identifiers for JDBC ext job when quoting cc expr * @param tableDesc * @param sqlExpr * @return */ public static String quoteIdentifierInSqlExpr(TableDesc tableDesc, String sqlExpr, SqlDialect sqlDialect) { String table = tableDesc.getName(); boolean tableMatched = false; List<String> tabPatterns = getTableNameOrAliasPatterns(table); if (isIdentifierNeedToQuote(sqlExpr, table, tabPatterns)) { sqlExpr = quoteIdentifier(sqlExpr, table, tabPatterns, sqlDialect); tableMatched = true; } if (tableMatched) { for (ColumnDesc columnDesc : tableDesc.getColumns()) { String column = columnDesc.getName(); List<String> colPatterns = getColumnNameOrAliasPatterns(column); if (isIdentifierNeedToQuote(sqlExpr, column, colPatterns)) { sqlExpr = quoteIdentifier(sqlExpr, column, colPatterns, sqlDialect); } } } return sqlExpr; }
Example #6
Source File: ResultProcessor.java From quark with Apache License 2.0 | 6 votes |
public static String getParsedSql(RelNode relNode, SqlDialect dialect) throws SQLException { if (dialect.getDatabaseProduct() == SqlDialect.DatabaseProduct.HIVE) { final HepProgram program = new HepProgramBuilder() .addRuleInstance(JoinCalcTransposeRule.LEFT_CALC) .addRuleInstance(JoinCalcTransposeRule.RIGHT_CALC) .addRuleInstance(CalcMergeRule.INSTANCE) .build(); final RelOptPlanner planner = relNode.getCluster().getPlanner(); final HepPlanner hepPlanner = new HepPlanner(program, planner.getContext()); hepPlanner.setRoot(relNode); relNode = hepPlanner.findBestExp(); } RelToSqlConverter relToSqlConverter = new RelToSqlConverter(dialect); RelToSqlConverter.Result res = relToSqlConverter.visitChild(0, relNode); SqlNode sqlNode = res.asQuery(); String result = sqlNode.toSqlString(dialect, false).getSql(); return result.replace("\n", " "); }
Example #7
Source File: JournalledJdbcSchema.java From calcite-sql-rewriter with Apache License 2.0 | 6 votes |
public static JournalledJdbcSchema create( SchemaPlus parentSchema, String name, Map<String, Object> operand ) { DataSource dataSource; try { dataSource = parseDataSource(operand); } catch (Exception e) { throw new IllegalArgumentException("Error while reading dataSource", e); } String catalog = (String) operand.get("jdbcCatalog"); String schema = (String) operand.get("jdbcSchema"); Expression expression = null; if (parentSchema != null) { expression = Schemas.subSchemaExpression(parentSchema, name, JdbcSchema.class); } final SqlDialect dialect = createDialect(dataSource); final JdbcConvention convention = JdbcConvention.of(dialect, expression, name); return new JournalledJdbcSchema(dataSource, dialect, convention, catalog, schema, operand); }
Example #8
Source File: SqlPrettyWriter.java From calcite with Apache License 2.0 | 5 votes |
@Deprecated public SqlPrettyWriter( SqlDialect dialect, boolean alwaysUseParentheses) { this(config().withDialect(Objects.requireNonNull(dialect)) .withAlwaysUseParentheses(alwaysUseParentheses)); }
Example #9
Source File: FlatTableSqlQuoteUtils.java From kylin with Apache License 2.0 | 5 votes |
public static String quoteIdentifier(SourceDialect sourceDialect, String identifier) { if (!kylinConfig.enableHiveDdlQuote()) { return identifier; } SqlDialect specificSqlDialect = sqlDialectMap.get(sourceDialect.name()); if (specificSqlDialect != null) { return specificSqlDialect.quoteIdentifier(identifier); } setQuote(); return defaultDialect.quoteIdentifier(identifier); }
Example #10
Source File: FlatTableSqlQuoteUtils.java From kylin with Apache License 2.0 | 5 votes |
static String quoteIdentifier(String sqlExpr, String identifier, List<String> identifierPatterns, SqlDialect sqlDialect) { String quotedIdentifier = quoteIdentifier(identifier.trim(), sqlDialect); for (String pattern : identifierPatterns) { Matcher matcher = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(sqlExpr); if (matcher.find()) { sqlExpr = matcher.replaceAll("$1" + quotedIdentifier + "$3"); } } return sqlExpr; }
Example #11
Source File: SqlConverter.java From kylin with Apache License 2.0 | 5 votes |
public String convertSql(String orig) { String converted = orig; if (!configurer.skipHandleDefault()) { String escapedDefault = SqlDialect.CALCITE .quoteIdentifier(configurer.useUppercaseDefault() ? "DEFAULT" : "default"); converted = converted.replaceAll("(?i)default\\.", escapedDefault + "."); // use Calcite dialect to cater to SqlParser converted = converted.replaceAll("\"(?i)default\"\\.", escapedDefault + "."); } if (!configurer.skipDefaultConvert()) { String beforeConvert = converted; try { // calcite cannot recognize `, convert ` to " before parse converted = converted.replaceAll("`", "\""); SqlNode sqlNode = SqlParser.create(converted).parseQuery(); sqlNode = sqlNode.accept(sqlNodeConverter); converted = sqlWriter.format(sqlNode); } catch (Throwable e) { logger.error("Failed to default convert sql, will use the input: {}", beforeConvert, e); // revert to beforeConvert when occur Exception converted = beforeConvert; } finally { sqlWriter.reset(); } } converted = configurer.fixAfterDefaultConvert(converted); return converted; }
Example #12
Source File: DbTapDb.java From quark with Apache License 2.0 | 5 votes |
@Override public SqlDialect getSqlDialect() { String type = this.getProductName(); final SqlDialect dialect; switch (type.toUpperCase()) { case "HIVE": dialect = SqlDialect.getProduct("Hive", null).getDialect(); break; case "REDSHIFT": dialect = SqlDialect.getProduct("REDSHIFT", null).getDialect(); break; case "SQLSERVER": case "MYSQL": dialect = SqlDialect.getProduct("MySQL", null).getDialect(); break; case "POSTGRESQL": dialect = SqlDialect.getProduct("PostgreSQL", null).getDialect(); break; case "VERTICA": dialect = SqlDialect.getProduct("Vertica", null).getDialect(); break; default: dialect = SqlDialect.getProduct("UNKNOWN", null).getDialect(); break; } return dialect; }
Example #13
Source File: QuarkTestUtil.java From quark with Apache License 2.0 | 5 votes |
public static void checkSqlParsing(String sql, Properties info, String expectedSql, SqlDialect dialect) throws QuarkException, SqlParseException { SqlQueryParser parser = new SqlQueryParser(info); SqlParser sqlParser = parser.getSqlParser(sql); SqlNode sqlNode = sqlParser.parseQuery(); String finalQuery = sqlNode.toSqlString(dialect).getSql(); assertEquals(expectedSql, finalQuery.replace("\n", " ")); }
Example #14
Source File: MaterializedViewJoinTest.java From quark with Apache License 2.0 | 5 votes |
@Ignore @Test public void testNoOptWithHiveOp() throws Exception { String sql = "select\n" + "to_date(qh.created_at) as dt, \n" + "count(qh.id) as num_queries\n" + "\n" + "from test_hist qh\n" + "join uinfo ui\n" + " on qh.qbol_user_id = ui.qu_id\n" + "join acc externals\n" + " on externals.id = ui.a_id\n" + "\n" + "where to_date(qh.created_at) >= date_sub(from_unixtime(unix_timestamp()),150)\n" + "and command_type = 'HiveCommand'\n" + "and qlog like '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%'\n" + "and customer_name like 'amogh'\n" + "\n" + "group by \n" + "to_date(qh.created_at)\n" + "\n" + "order by dt asc"; SqlQueryParser parser = new SqlQueryParser(connInfo); final SqlQueryParser.SqlQueryParserResult result = parser.parse(sql); final String hiveQuery = ResultProcessor.getParsedSql(result.getRelNode(), SqlDialect.DatabaseProduct.HIVE.getDialect()); assertEquals("SELECT TO_DATE(TEST_HIST.CREATED_AT) DT, " + "COUNT(TEST_HIST.ID) NUM_QUERIES " + "FROM H2.PUBLIC.TEST_HIST INNER JOIN H2.PUBLIC.UINFO " + "ON TEST_HIST.QBOL_USER_ID = UINFO.QU_ID INNER JOIN H2.PUBLIC.ACC " + "ON UINFO.A_ID = ACC.ID " + "WHERE ACC.CUSTOMER_NAME LIKE 'amogh' AND " + "(TO_DATE(TEST_HIST.CREATED_AT) >= " + "DATE_SUB(FROM_UNIXTIME(UNIX_TIMESTAMP()), 150) " + "AND TEST_HIST.COMMAND_TYPE = 'HiveCommand' " + "AND TEST_HIST.QLOG LIKE '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%') " + "GROUP BY TO_DATE(TEST_HIST.CREATED_AT) " + "ORDER BY DT", hiveQuery); }
Example #15
Source File: SqlString.java From calcite with Apache License 2.0 | 5 votes |
/** * Creates a SqlString. The SQL might contain dynamic parameters, dynamicParameters * designate the order of the parameters. * * @param sql text * @param dynamicParameters indices */ public SqlString(SqlDialect dialect, String sql, ImmutableList<Integer> dynamicParameters) { this.dialect = dialect; this.sql = sql; this.dynamicParameters = dynamicParameters; assert sql != null : "sql must be NOT null"; assert dialect != null : "dialect must be NOT null"; }
Example #16
Source File: Lattice.java From Bats with Apache License 2.0 | 5 votes |
SqlWriter(Lattice lattice, SqlDialect dialect, StringBuilder buf, SqlImplementor.SimpleContext context) { this.lattice = lattice; this.context = context; this.buf = buf; this.dialect = dialect; }
Example #17
Source File: MaterializedViewJoinTest.java From quark with Apache License 2.0 | 5 votes |
@Test public void testOptWithJoin() throws Exception { String sql = "select\n" + "qh.created_at as dt, \n" + "count(qh.id) as num_queries\n" + "\n" + "from test_hist qh\n" + "join uinfo ui\n" + " on qh.qbol_user_id = ui.qu_id\n" + "join acc externals\n" + " on externals.id = ui.a_id\n" + "\n" + "where qh.timeout >= 350\n" + "and command_type = 'HiveCommand'\n" + "and qlog like '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%'\n" + "and customer_name like 'amogh'\n" + "\n" + "group by \n" + "qh.created_at\n" + "\n" + "order by dt asc"; SqlQueryParser parser = new SqlQueryParser(connInfo); final SqlQueryParser.SqlQueryParserResult result = parser.parse(sql); final String hiveQuery = ResultProcessor.getParsedSql(result.getRelNode(), SqlDialect.DatabaseProduct.HIVE.getDialect()); assertEquals("SELECT CREATED_AT, COUNT(ID) NUM_QUERIES FROM " + "PUBLIC.TEST_HIST_PARTITION INNER JOIN H2.PUBLIC.UINFO " + "ON TEST_HIST_PARTITION.QBOL_USER_ID = UINFO.QU_ID " + "INNER JOIN H2.PUBLIC.ACC ON UINFO.A_ID = ACC.ID " + "WHERE ACC.CUSTOMER_NAME LIKE 'amogh' AND " + "(TEST_HIST_PARTITION.TIMEOUT >= 350 " + "AND TEST_HIST_PARTITION.COMMAND_TYPE = 'HiveCommand' " + "AND TEST_HIST_PARTITION.QLOG " + "LIKE '%\\\"HIVE_VERSION\\\":\\\"1.2\\\"%') " + "GROUP BY CREATED_AT ORDER BY CREATED_AT", hiveQuery); }
Example #18
Source File: QuerySqlStatisticProvider.java From calcite with Apache License 2.0 | 5 votes |
public double tableCardinality(RelOptTable table) { final SqlDialect dialect = table.unwrap(SqlDialect.class); final DataSource dataSource = table.unwrap(DataSource.class); return withBuilder( (cluster, relOptSchema, relBuilder) -> { // Generate: // SELECT COUNT(*) FROM `EMP` relBuilder.push(table.toRel(ViewExpanders.simpleContext(cluster))) .aggregate(relBuilder.groupKey(), relBuilder.count()); final String sql = toSql(relBuilder.build(), dialect); try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) { if (!resultSet.next()) { throw new AssertionError("expected exactly 1 row: " + sql); } final double cardinality = resultSet.getDouble(1); if (resultSet.next()) { throw new AssertionError("expected exactly 1 row: " + sql); } return cardinality; } catch (SQLException e) { throw handle(e, sql); } }); }
Example #19
Source File: QuerySqlStatisticProvider.java From calcite with Apache License 2.0 | 5 votes |
public boolean isKey(RelOptTable table, List<Integer> columns) { final SqlDialect dialect = table.unwrap(SqlDialect.class); final DataSource dataSource = table.unwrap(DataSource.class); return withBuilder( (cluster, relOptSchema, relBuilder) -> { // The collection of columns ['DEPTNO'] is a key for 'EMP' if the // following query returns no rows: // // SELECT 1 // FROM `EMP` // GROUP BY `DEPTNO` // HAVING COUNT(*) > 1 // final RelOptTable.ToRelContext toRelContext = ViewExpanders.simpleContext(cluster); relBuilder.push(table.toRel(toRelContext)) .aggregate(relBuilder.groupKey(relBuilder.fields(columns)), relBuilder.count()) .filter( relBuilder.call(SqlStdOperatorTable.GREATER_THAN, Util.last(relBuilder.fields()), relBuilder.literal(1))); final String sql = toSql(relBuilder.build(), dialect); try (Connection connection = dataSource.getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql)) { return !resultSet.next(); } catch (SQLException e) { throw handle(e, sql); } }); }
Example #20
Source File: FlatTableSqlQuoteUtils.java From kylin with Apache License 2.0 | 5 votes |
/** * If KylinConfig#enableHiveDdlQuote return false, disable quote. * If SqlDialect is specific, use it; else use the KylinConfig#getFactTableDialect to quote identifier. */ public static String quoteIdentifier(String identifier, SqlDialect specificSqlDialect) { if (!kylinConfig.enableHiveDdlQuote()) { return identifier; } if (specificSqlDialect != null) { return specificSqlDialect.quoteIdentifier(identifier); } setQuote(); return defaultDialect.quoteIdentifier(identifier); }
Example #21
Source File: RelToSqlConverterTest.java From quark with Apache License 2.0 | 5 votes |
@Test public void testSqlParsingOfLimitClauseForRedShift() throws QuarkException, SQLException, SqlParseException { String query = "select product_id from product limit 100 offset 10"; final SqlDialect redshiftDialect = SqlDialect.getProduct("REDSHIFT", null).getDialect(); QuarkTestUtil.checkSqlParsing( query, info, "SELECT \"PRODUCT_ID\" " + "FROM \"PRODUCT\" " + "LIMIT 100 OFFSET 10", redshiftDialect); }
Example #22
Source File: QuarkDDLExecutor.java From quark with Apache License 2.0 | 5 votes |
public int executeCreateView(SqlCreateQuarkView sqlNode) throws SQLException { DBI dbi = getDbi(); List<String> tableNameList = sqlNode.getTableName().names; String dataSourceName = tableNameList.get(0); ViewDAO viewDAO = dbi.onDemand(ViewDAO.class); JdbcSourceDAO jdbcDAO = dbi.onDemand(JdbcSourceDAO.class); QuboleDbSourceDAO quboleDAO = dbi.onDemand(QuboleDbSourceDAO.class); DataSource dataSource = jdbcDAO.findByName(dataSourceName, connection.getDSSet().getId()); if (dataSource == null) { dataSource = quboleDAO.findByName(dataSourceName, connection.getDSSet().getId()); } if (dataSource == null) { throw new SQLException("DataSource with name '" + dataSourceName + "' not found"); } SqlPrettyWriter writer = new SqlPrettyWriter(SqlDialect.CALCITE); writer.setAlwaysUseParentheses(false); writer.setSelectListItemsOnSeparateLines(false); writer.setIndentation(0); writer.setQuoteAllIdentifiers(true); sqlNode.getQuery().unparse(writer, 0, 0); final String sql = writer.toString(); LOG.debug(sql); return viewDAO.insert(sqlNode.getName(), "No Description", sql, 0L, dataSource.getId(), tableNameList.get(1), tableNameList.get(2), connection.getDSSet().getId()); }
Example #23
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 5 votes |
private String getQuotedSqlString(SqlNode sqlNode) { SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig(); SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT .withQuotedCasing(parserConfig.unquotedCasing()) .withConformance(parserConfig.conformance()) .withUnquotedCasing(parserConfig.unquotedCasing()) .withIdentifierQuoteString(parserConfig.quoting().string)); return sqlNode.toSqlString(dialect).getSql(); }
Example #24
Source File: SqlInsertTable.java From dremio-oss with Apache License 2.0 | 5 votes |
@Override public List<String> getFieldNames() { for (SqlNode fieldNode : insertFields.getList()) { if (!(fieldNode instanceof SqlIdentifier)) { throw SqlExceptionHelper.parseError("Column type specified", this.toSqlString(new SqlDialect(SqlDialect.EMPTY_CONTEXT)).getSql(), fieldNode.getParserPosition()).buildSilently(); } } return insertFields.getList().stream().map(SqlNode::toString).collect(Collectors.toList()); }
Example #25
Source File: FlatTableSqlQuoteUtilsTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
@Test public void testQuoteWithIdentifier() { Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", null)); Assert.assertEquals("abc", FlatTableSqlQuoteUtils.quoteIdentifier("abc", FlatTableSqlQuoteUtils.NON_QUOTE_DIALECT)); Assert.assertEquals("\"abc\"", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.POSTGRESQL.getDialect())); Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", FlatTableSqlQuoteUtils.HIVE_DIALECT)); Assert.assertEquals("[abc]", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.MSSQL.getDialect())); Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier("abc", SqlDialect.DatabaseProduct.MYSQL.getDialect())); Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.MYSQL, "abc")); Assert.assertEquals("`abc`", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.HIVE, "abc")); Assert.assertEquals("[abc]", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.MSSQL, "abc")); Assert.assertEquals("\"abc\"", FlatTableSqlQuoteUtils.quoteIdentifier(SourceDialect.POSTGRESQL, "abc")); }
Example #26
Source File: FlatTableSqlQuoteUtils.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
static String quoteIdentifier(String sqlExpr, String identifier, List<String> identifierPatterns, SqlDialect sqlDialect) { String quotedIdentifier = quoteIdentifier(identifier.trim(), sqlDialect); for (String pattern : identifierPatterns) { Matcher matcher = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE | Pattern.DOTALL).matcher(sqlExpr); if (matcher.find()) { sqlExpr = matcher.replaceAll("$1" + quotedIdentifier + "$3"); } } return sqlExpr; }
Example #27
Source File: SqlImplementor.java From calcite with Apache License 2.0 | 5 votes |
/** Removes cast from string. * * <p>For example, {@code x > CAST('2015-01-07' AS DATE)} * becomes {@code x > '2015-01-07'}. */ private static RexNode stripCastFromString(RexNode node, SqlDialect dialect) { switch (node.getKind()) { case EQUALS: case IS_NOT_DISTINCT_FROM: case NOT_EQUALS: case GREATER_THAN: case GREATER_THAN_OR_EQUAL: case LESS_THAN: case LESS_THAN_OR_EQUAL: final RexCall call = (RexCall) node; final RexNode o0 = call.operands.get(0); final RexNode o1 = call.operands.get(1); if (o0.getKind() == SqlKind.CAST && o1.getKind() != SqlKind.CAST) { if (!dialect.supportsImplicitTypeCoercion((RexCall) o0)) { // If the dialect does not support implicit type coercion, // we definitely can not strip the cast. return node; } final RexNode o0b = ((RexCall) o0).getOperands().get(0); return call.clone(call.getType(), ImmutableList.of(o0b, o1)); } if (o1.getKind() == SqlKind.CAST && o0.getKind() != SqlKind.CAST) { if (!dialect.supportsImplicitTypeCoercion((RexCall) o1)) { return node; } final RexNode o1b = ((RexCall) o1).getOperands().get(0); return call.clone(call.getType(), ImmutableList.of(o0, o1b)); } } return node; }
Example #28
Source File: SqlToOperationConverter.java From flink with Apache License 2.0 | 5 votes |
private String getQuotedSqlString(SqlNode sqlNode) { SqlParser.Config parserConfig = flinkPlanner.config().getParserConfig(); SqlDialect dialect = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT .withQuotedCasing(parserConfig.unquotedCasing()) .withConformance(parserConfig.conformance()) .withUnquotedCasing(parserConfig.unquotedCasing()) .withIdentifierQuoteString(parserConfig.quoting().string)); return sqlNode.toSqlString(dialect).getSql(); }
Example #29
Source File: FlatTableSqlQuoteUtils.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
/** * If KylinConfig#enableHiveDdlQuote return false, disable quote. * If SqlDialect is specific, use it; else use the KylinConfig#getFactTableDialect to quote identifier. */ public static String quoteIdentifier(String identifier, SqlDialect specificSqlDialect) { if (!kylinConfig.enableHiveDdlQuote()) { return identifier; } if (specificSqlDialect != null) { return specificSqlDialect.quoteIdentifier(identifier); } setQuote(); return defaultDialect.quoteIdentifier(identifier); }
Example #30
Source File: SqlPrettyWriter.java From calcite with Apache License 2.0 | 5 votes |
@Deprecated public SqlPrettyWriter( SqlDialect dialect, boolean alwaysUseParentheses, PrintWriter pw) { // NOTE that 'pw' is ignored; there is no place for it in the new API this(config().withDialect(Objects.requireNonNull(dialect)) .withAlwaysUseParentheses(alwaysUseParentheses)); }