org.apache.spark.sql.sources.LessThan Java Examples
The following examples show how to use
org.apache.spark.sql.sources.LessThan.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FlightDataSourceReader.java From flight-spark-source with Apache License 2.0 | 6 votes |
private String generateWhereClause(List<Filter> pushed) { List<String> filterStr = Lists.newArrayList(); for (Filter filter : pushed) { if (filter instanceof IsNotNull) { filterStr.add(String.format("isnotnull(\"%s\")", ((IsNotNull) filter).attribute())); } else if (filter instanceof EqualTo) { filterStr.add(String.format("\"%s\" = %s", ((EqualTo) filter).attribute(), valueToString(((EqualTo) filter).value()))); } else if (filter instanceof GreaterThan) { filterStr.add(String.format("\"%s\" > %s", ((GreaterThan) filter).attribute(), valueToString(((GreaterThan) filter).value()))); } else if (filter instanceof GreaterThanOrEqual) { filterStr.add(String.format("\"%s\" <= %s", ((GreaterThanOrEqual) filter).attribute(), valueToString(((GreaterThanOrEqual) filter).value()))); } else if (filter instanceof LessThan) { filterStr.add(String.format("\"%s\" < %s", ((LessThan) filter).attribute(), valueToString(((LessThan) filter).value()))); } else if (filter instanceof LessThanOrEqual) { filterStr.add(String.format("\"%s\" <= %s", ((LessThanOrEqual) filter).attribute(), valueToString(((LessThanOrEqual) filter).value()))); } //todo fill out rest of Filter types } return WHERE_JOINER.join(filterStr); }
Example #2
Source File: FlightDataSourceReader.java From flight-spark-source with Apache License 2.0 | 6 votes |
private boolean canBePushed(Filter filter) { if (filter instanceof IsNotNull) { return true; } else if (filter instanceof EqualTo) { return true; } if (filter instanceof GreaterThan) { return true; } if (filter instanceof GreaterThanOrEqual) { return true; } if (filter instanceof LessThan) { return true; } if (filter instanceof LessThanOrEqual) { return true; } LOGGER.error("Cant push filter of type " + filter.toString()); return false; }
Example #3
Source File: TestFilteredScan.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testUnpartitionedTimestampFilter() { DataSourceOptions options = new DataSourceOptions(ImmutableMap.of( "path", unpartitioned.toString()) ); IcebergSource source = new IcebergSource(); DataSourceReader reader = source.createReader(options); pushFilters(reader, LessThan.apply("ts", "2017-12-22T00:00:00+00:00")); List<InputPartition<InternalRow>> tasks = reader.planInputPartitions(); Assert.assertEquals("Should only create one task for a small file", 1, tasks.size()); assertEqualsSafe(SCHEMA.asStruct(), expected(5, 6, 7, 8, 9), read(unpartitioned.toString(), "ts < cast('2017-12-22 00:00:00+00:00' as timestamp)")); }
Example #4
Source File: TestFilteredScan.java From iceberg with Apache License 2.0 | 6 votes |
@Test public void testUnpartitionedTimestampFilter() { CaseInsensitiveStringMap options = new CaseInsensitiveStringMap(ImmutableMap.of( "path", unpartitioned.toString()) ); SparkScanBuilder builder = new SparkScanBuilder(spark, TABLES.load(options.get("path")), options); pushFilters(builder, LessThan.apply("ts", "2017-12-22T00:00:00+00:00")); Batch scan = builder.build().toBatch(); InputPartition[] tasks = scan.planInputPartitions(); Assert.assertEquals("Should only create one task for a small file", 1, tasks.length); assertEqualsSafe(SCHEMA.asStruct(), expected(5, 6, 7, 8, 9), read(unpartitioned.toString(), "ts < cast('2017-12-22 00:00:00+00:00' as timestamp)")); }