org.apache.calcite.sql.validate.SqlNameMatchers Java Examples
The following examples show how to use
org.apache.calcite.sql.validate.SqlNameMatchers.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlinkCalciteCatalogReader.java From flink with Apache License 2.0 | 6 votes |
public FlinkCalciteCatalogReader( CalciteSchema rootSchema, List<List<String>> defaultSchemas, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { super( rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), Stream.concat( defaultSchemas.stream(), Stream.of(Collections.<String>emptyList()) ).collect(Collectors.toList()), typeFactory, config); }
Example #2
Source File: FlinkCalciteCatalogReader.java From flink with Apache License 2.0 | 6 votes |
public FlinkCalciteCatalogReader( CalciteSchema rootSchema, List<List<String>> defaultSchemas, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { super( rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), Stream.concat( defaultSchemas.stream(), Stream.of(Collections.<String>emptyList()) ).collect(Collectors.toList()), typeFactory, config); }
Example #3
Source File: RelJson.java From calcite with Apache License 2.0 | 6 votes |
SqlOperator toOp(Map<String, Object> map) { // in case different operator has the same kind, check with both name and kind. String name = map.get("name").toString(); String kind = map.get("kind").toString(); String syntax = map.get("syntax").toString(); SqlKind sqlKind = SqlKind.valueOf(kind); SqlSyntax sqlSyntax = SqlSyntax.valueOf(syntax); List<SqlOperator> operators = new ArrayList<>(); SqlStdOperatorTable.instance().lookupOperatorOverloads( new SqlIdentifier(name, new SqlParserPos(0, 0)), null, sqlSyntax, operators, SqlNameMatchers.liberal()); for (SqlOperator operator: operators) { if (operator.kind == sqlKind) { return operator; } } String class_ = (String) map.get("class"); if (class_ != null) { return AvaticaUtils.instantiatePlugin(SqlOperator.class, class_); } return null; }
Example #4
Source File: CalciteCatalogReader.java From Bats with Apache License 2.0 | 5 votes |
public CalciteCatalogReader(CalciteSchema rootSchema, List<String> defaultSchema, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { this(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), ImmutableList.of(Objects.requireNonNull(defaultSchema), ImmutableList.of()), typeFactory, config); }
Example #5
Source File: CatalogReader.java From flink with Apache License 2.0 | 5 votes |
public CatalogReader( CalciteSchema rootSchema, List<List<String>> defaultSchema, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { super(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), Stream.concat( defaultSchema.stream(), Stream.of(Collections.<String>emptyList()) ).collect(Collectors.toList()), typeFactory, config); }
Example #6
Source File: FlinkSqlOperatorTable.java From flink with Apache License 2.0 | 5 votes |
@Override public void lookupOperatorOverloads( SqlIdentifier opName, SqlFunctionCategory category, SqlSyntax syntax, List<SqlOperator> operatorList, SqlNameMatcher nameMatcher) { // set caseSensitive=false to make sure the behavior is same with before. super.lookupOperatorOverloads(opName, category, syntax, operatorList, SqlNameMatchers.withCaseSensitive(false)); }
Example #7
Source File: CatalogReader.java From flink with Apache License 2.0 | 5 votes |
public CatalogReader( CalciteSchema rootSchema, List<List<String>> defaultSchema, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { super(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), Stream.concat( defaultSchema.stream(), Stream.of(Collections.<String>emptyList()) ).collect(Collectors.toList()), typeFactory, config); }
Example #8
Source File: FlinkSqlOperatorTable.java From flink with Apache License 2.0 | 5 votes |
@Override public void lookupOperatorOverloads( SqlIdentifier opName, SqlFunctionCategory category, SqlSyntax syntax, List<SqlOperator> operatorList, SqlNameMatcher nameMatcher) { // set caseSensitive=false to make sure the behavior is same with before. super.lookupOperatorOverloads(opName, category, syntax, operatorList, SqlNameMatchers.withCaseSensitive(false)); }
Example #9
Source File: CalciteCatalogReader.java From calcite with Apache License 2.0 | 5 votes |
public CalciteCatalogReader(CalciteSchema rootSchema, List<String> defaultSchema, RelDataTypeFactory typeFactory, CalciteConnectionConfig config) { this(rootSchema, SqlNameMatchers.withCaseSensitive(config != null && config.caseSensitive()), ImmutableList.of(Objects.requireNonNull(defaultSchema), ImmutableList.of()), typeFactory, config); }
Example #10
Source File: MockCatalogReader.java From calcite with Apache License 2.0 | 5 votes |
/** * Creates a MockCatalogReader. * * <p>Caller must then call {@link #init} to populate with data.</p> * * @param typeFactory Type factory */ public MockCatalogReader(RelDataTypeFactory typeFactory, boolean caseSensitive) { super(CalciteSchema.createRootSchema(false, false, DEFAULT_CATALOG), SqlNameMatchers.withCaseSensitive(caseSensitive), ImmutableList.of(PREFIX, ImmutableList.of()), typeFactory, null); }
Example #11
Source File: DremioCatalogReader.java From dremio-oss with Apache License 2.0 | 4 votes |
@Override public SqlNameMatcher nameMatcher() { return SqlNameMatchers.withCaseSensitive(false); }
Example #12
Source File: TestSQLAnalyzer.java From dremio-oss with Apache License 2.0 | 4 votes |
@Override public SqlNameMatcher nameMatcher() { return SqlNameMatchers.withCaseSensitive(caseSensitive); }
Example #13
Source File: LookupOperatorOverloadsTest.java From calcite with Apache License 2.0 | 4 votes |
private void checkInternal(boolean caseSensitive) throws SQLException { final SqlNameMatcher nameMatcher = SqlNameMatchers.withCaseSensitive(caseSensitive); final String schemaName = "MySchema"; final String funcName = "MyFUNC"; final String anotherName = "AnotherFunc"; try (Connection connection = DriverManager.getConnection("jdbc:calcite:")) { CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); SchemaPlus rootSchema = calciteConnection.getRootSchema(); SchemaPlus schema = rootSchema.add(schemaName, new AbstractSchema()); final TableFunction table = TableFunctionImpl.create(Smalls.MAZE_METHOD); schema.add(funcName, table); schema.add(anotherName, table); final TableFunction table2 = TableFunctionImpl.create(Smalls.MAZE3_METHOD); schema.add(funcName, table2); final CalciteServerStatement statement = connection.createStatement().unwrap(CalciteServerStatement.class); final CalcitePrepare.Context prepareContext = statement.createPrepareContext(); final JavaTypeFactory typeFactory = prepareContext.getTypeFactory(); CalciteCatalogReader reader = new CalciteCatalogReader(prepareContext.getRootSchema(), ImmutableList.of(), typeFactory, prepareContext.config()); final List<SqlOperator> operatorList = new ArrayList<>(); SqlIdentifier myFuncIdentifier = new SqlIdentifier(Lists.newArrayList(schemaName, funcName), null, SqlParserPos.ZERO, null); reader.lookupOperatorOverloads(myFuncIdentifier, SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION, SqlSyntax.FUNCTION, operatorList, nameMatcher); checkFunctionType(2, funcName, operatorList); operatorList.clear(); reader.lookupOperatorOverloads(myFuncIdentifier, SqlFunctionCategory.USER_DEFINED_FUNCTION, SqlSyntax.FUNCTION, operatorList, nameMatcher); checkFunctionType(0, null, operatorList); operatorList.clear(); SqlIdentifier anotherFuncIdentifier = new SqlIdentifier(Lists.newArrayList(schemaName, anotherName), null, SqlParserPos.ZERO, null); reader.lookupOperatorOverloads(anotherFuncIdentifier, SqlFunctionCategory.USER_DEFINED_TABLE_FUNCTION, SqlSyntax.FUNCTION, operatorList, nameMatcher); checkFunctionType(1, anotherName, operatorList); } }