io.prestosql.spi.type.TypeManager Java Examples
The following examples show how to use
io.prestosql.spi.type.TypeManager.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TableMetadataSystemTable.java From presto with Apache License 2.0 | 6 votes |
@Inject public TableMetadataSystemTable(@ForMetadata IDBI dbi, TypeManager typeManager) { this.dao = onDemandDao(dbi, MetadataDao.class); requireNonNull(typeManager, "typeManager is null"); this.tableMetadata = new ConnectorTableMetadata( new SchemaTableName("system", "tables"), ImmutableList.of( new ColumnMetadata(SCHEMA_NAME, VARCHAR), new ColumnMetadata(TABLE_NAME, VARCHAR), new ColumnMetadata("temporal_column", VARCHAR), new ColumnMetadata("ordering_columns", new ArrayType(VARCHAR)), new ColumnMetadata("distribution_name", VARCHAR), new ColumnMetadata("bucket_count", BIGINT), new ColumnMetadata("bucketing_columns", new ArrayType(VARCHAR)), new ColumnMetadata("organized", BOOLEAN))); }
Example #2
Source File: FilesTable.java From presto with Apache License 2.0 | 6 votes |
public FilesTable(SchemaTableName tableName, Table icebergTable, Optional<Long> snapshotId, TypeManager typeManager) { this.icebergTable = requireNonNull(icebergTable, "icebergTable is null"); tableMetadata = new ConnectorTableMetadata(requireNonNull(tableName, "tableName is null"), ImmutableList.<ColumnMetadata>builder() .add(new ColumnMetadata("file_path", VARCHAR)) .add(new ColumnMetadata("file_format", VARCHAR)) .add(new ColumnMetadata("record_count", BIGINT)) .add(new ColumnMetadata("file_size_in_bytes", BIGINT)) .add(new ColumnMetadata("column_sizes", typeManager.getType(mapType(INTEGER.getTypeSignature(), BIGINT.getTypeSignature())))) .add(new ColumnMetadata("value_counts", typeManager.getType(mapType(INTEGER.getTypeSignature(), BIGINT.getTypeSignature())))) .add(new ColumnMetadata("null_value_counts", typeManager.getType(mapType(INTEGER.getTypeSignature(), BIGINT.getTypeSignature())))) .add(new ColumnMetadata("lower_bounds", typeManager.getType(mapType(INTEGER.getTypeSignature(), VARCHAR.getTypeSignature())))) .add(new ColumnMetadata("upper_bounds", typeManager.getType(mapType(INTEGER.getTypeSignature(), VARCHAR.getTypeSignature())))) .add(new ColumnMetadata("key_metadata", VARBINARY)) .add(new ColumnMetadata("split_offsets", new ArrayType(BIGINT))) .build()); this.snapshotId = requireNonNull(snapshotId, "snapshotId is null"); }
Example #3
Source File: HiveCoercionRecordCursor.java From presto with Apache License 2.0 | 6 votes |
public HiveCoercionRecordCursor( List<ColumnMapping> columnMappings, TypeManager typeManager, RecordCursor delegate) { requireNonNull(columnMappings, "columns is null"); requireNonNull(typeManager, "typeManager is null"); this.delegate = requireNonNull(delegate, "delegate is null"); this.columnMappings = ImmutableList.copyOf(columnMappings); int size = columnMappings.size(); this.coercers = new Coercer[size]; BridgingRecordCursor bridgingRecordCursor = new BridgingRecordCursor(); for (int columnIndex = 0; columnIndex < size; columnIndex++) { ColumnMapping columnMapping = columnMappings.get(columnIndex); if (columnMapping.getBaseTypeCoercionFrom().isPresent()) { coercers[columnIndex] = createCoercer(typeManager, columnMapping.getBaseTypeCoercionFrom().get(), columnMapping.getHiveColumnHandle().getHiveType(), bridgingRecordCursor); } } }
Example #4
Source File: ThriftConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) { Bootstrap app = new Bootstrap( new MBeanModule(), new MBeanServerModule(), new ConnectorObjectNameGeneratorModule(catalogName, "io.prestosql.plugin.thrift", "presto.plugin.thrift"), new DriftNettyClientModule(), binder -> { binder.bind(TypeManager.class).toInstance(context.getTypeManager()); }, locationModule, new ThriftModule()); Injector injector = app .strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(ThriftConnector.class); }
Example #5
Source File: BigQueryConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) { requireNonNull(catalogName, "catalogName is null"); requireNonNull(config, "config is null"); Bootstrap app = new Bootstrap( new JsonModule(), new BigQueryConnectorModule(context.getNodeManager()), binder -> { binder.bind(TypeManager.class).toInstance(context.getTypeManager()); binder.bind(NodeManager.class).toInstance(context.getNodeManager()); }); Injector injector = app.strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(BigQueryConnector.class); }
Example #6
Source File: KafkaConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) { requireNonNull(catalogName, "catalogName is null"); requireNonNull(config, "config is null"); Bootstrap app = new Bootstrap( new JsonModule(), new KafkaConnectorModule(), extension, binder -> { binder.bind(ClassLoader.class).toInstance(KafkaConnectorFactory.class.getClassLoader()); binder.bind(TypeManager.class).toInstance(context.getTypeManager()); binder.bind(NodeManager.class).toInstance(context.getNodeManager()); }); Injector injector = app .strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(KafkaConnector.class); }
Example #7
Source File: HiveCoercionRecordCursor.java From presto with Apache License 2.0 | 6 votes |
public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, BridgingRecordCursor bridgingRecordCursor) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); this.bridgingRecordCursor = requireNonNull(bridgingRecordCursor, "bridgingRecordCursor is null"); HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); this.fromKeyValueTypes = fromHiveType.getType(typeManager).getTypeParameters(); this.toType = toHiveType.getType(typeManager); this.toKeyValueTypes = toType.getTypeParameters(); this.coercers = new Coercer[2]; coercers[0] = fromKeyHiveType.equals(toKeyHiveType) ? null : createCoercer(typeManager, fromKeyHiveType, toKeyHiveType, bridgingRecordCursor); coercers[1] = fromValueHiveType.equals(toValueHiveType) ? null : createCoercer(typeManager, fromValueHiveType, toValueHiveType, bridgingRecordCursor); this.pageBuilder = coercers[0] == null && coercers[1] == null ? null : new PageBuilder(ImmutableList.of(toType)); }
Example #8
Source File: JdbcConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> requiredConfig, ConnectorContext context) { requireNonNull(requiredConfig, "requiredConfig is null"); Bootstrap app = new Bootstrap( binder -> binder.bind(TypeManager.class).toInstance(context.getTypeManager()), binder -> binder.bind(NodeManager.class).toInstance(context.getNodeManager()), binder -> binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder()), new JdbcModule(catalogName), moduleProvider.getModule(catalogName)); Injector injector = app .strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(requiredConfig) .initialize(); return injector.getInstance(JdbcConnector.class); }
Example #9
Source File: OrcFileWriterFactory.java From presto with Apache License 2.0 | 6 votes |
public OrcFileWriterFactory( HdfsEnvironment hdfsEnvironment, TypeManager typeManager, NodeVersion nodeVersion, DateTimeZone hiveStorageTimeZone, boolean writeLegacyVersion, FileFormatDataSourceStats readStats, OrcWriterOptions orcWriterOptions) { this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.nodeVersion = requireNonNull(nodeVersion, "nodeVersion is null"); this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"); this.writeLegacyVersion = writeLegacyVersion; this.readStats = requireNonNull(readStats, "stats is null"); this.orcWriterOptions = requireNonNull(orcWriterOptions, "orcWriterOptions is null"); }
Example #10
Source File: HiveUtil.java From presto with Apache License 2.0 | 6 votes |
public static List<HiveColumnHandle> hiveColumnHandles(Table table, TypeManager typeManager) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); // add the data fields first columns.addAll(getRegularColumnHandles(table, typeManager)); // add the partition keys last (like Hive does) columns.addAll(getPartitionKeyColumnHandles(table, typeManager)); // add hidden columns columns.add(pathColumnHandle()); if (table.getStorage().getBucketProperty().isPresent()) { // TODO (https://github.com/prestosql/presto/issues/1706): support bucketing v2 for timestamp if (!containsTimestampBucketedV2(table.getStorage().getBucketProperty().get(), table)) { columns.add(bucketColumnHandle()); } } columns.add(fileSizeColumnHandle()); columns.add(fileModifiedTimeColumnHandle()); if (!table.getPartitionColumns().isEmpty()) { columns.add(partitionColumnHandle()); } return columns.build(); }
Example #11
Source File: ElasticsearchConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) { requireNonNull(catalogName, "catalogName is null"); requireNonNull(config, "config is null"); Bootstrap app = new Bootstrap( new MBeanModule(), new MBeanServerModule(), new ConnectorObjectNameGeneratorModule(catalogName, "io.prestosql.elasticsearch", "presto.plugin.elasticsearch"), new JsonModule(), new ElasticsearchConnectorModule(), binder -> { binder.bind(TypeManager.class).toInstance(context.getTypeManager()); binder.bind(NodeManager.class).toInstance(context.getNodeManager()); }); Injector injector = app.strictConfig() .doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(ElasticsearchConnector.class); }
Example #12
Source File: TypeRegistry.java From presto with Apache License 2.0 | 6 votes |
private Type instantiateParametricType(TypeManager typeManager, TypeSignature signature) { List<TypeParameter> parameters = new ArrayList<>(); for (TypeSignatureParameter parameter : signature.getParameters()) { TypeParameter typeParameter = TypeParameter.of(parameter, typeManager); parameters.add(typeParameter); } ParametricType parametricType = parametricTypes.get(signature.getBase().toLowerCase(Locale.ENGLISH)); if (parametricType == null) { throw new TypeNotFoundException(signature); } Type instantiatedType; try { instantiatedType = parametricType.createType(typeManager, parameters); } catch (IllegalArgumentException e) { throw new TypeNotFoundException(signature, e); } // TODO: reimplement this check? Currently "varchar(Integer.MAX_VALUE)" fails with "varchar" //checkState(instantiatedType.equalsSignature(signature), "Instantiated parametric type name (%s) does not match expected name (%s)", instantiatedType, signature); return instantiatedType; }
Example #13
Source File: HivePageSource.java From presto with Apache License 2.0 | 6 votes |
public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); List<HiveType> fromFieldTypes = extractStructFieldTypes(fromHiveType); List<HiveType> toFieldTypes = extractStructFieldTypes(toHiveType); ImmutableList.Builder<Optional<Function<Block, Block>>> coercers = ImmutableList.builder(); this.nullBlocks = new Block[toFieldTypes.size()]; for (int i = 0; i < toFieldTypes.size(); i++) { if (i >= fromFieldTypes.size()) { nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build(); coercers.add(Optional.empty()); } else if (!fromFieldTypes.get(i).equals(toFieldTypes.get(i))) { coercers.add(Optional.of(createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i)))); } else { coercers.add(Optional.empty()); } } this.coercers = coercers.build(); }
Example #14
Source File: MongoConnectorFactory.java From presto with Apache License 2.0 | 6 votes |
@Override public Connector create(String catalogName, Map<String, String> config, ConnectorContext context) { requireNonNull(config, "config is null"); Bootstrap app = new Bootstrap( new JsonModule(), new MongoClientModule(), binder -> binder.bind(TypeManager.class).toInstance(context.getTypeManager())); Injector injector = app.strictConfig().doNotInitializeLogging() .setRequiredConfigurationProperties(config) .initialize(); return injector.getInstance(MongoConnector.class); }
Example #15
Source File: OrcFileWriterFactory.java From presto with Apache License 2.0 | 6 votes |
@Inject public OrcFileWriterFactory( HdfsEnvironment hdfsEnvironment, TypeManager typeManager, NodeVersion nodeVersion, HiveConfig hiveConfig, OrcWriterConfig orcWriterConfig, FileFormatDataSourceStats readStats, OrcWriterConfig config) { this( hdfsEnvironment, typeManager, nodeVersion, requireNonNull(hiveConfig, "hiveConfig is null").getDateTimeZone(), requireNonNull(orcWriterConfig, "orcWriterConfig is null").isUseLegacyVersion(), readStats, requireNonNull(config, "config is null").toOrcWriterOptions()); }
Example #16
Source File: KuduModule.java From presto with Apache License 2.0 | 6 votes |
@Override protected void configure() { bind(TypeManager.class).toInstance(typeManager); bind(KuduConnector.class).in(Scopes.SINGLETON); bind(KuduMetadata.class).in(Scopes.SINGLETON); bind(KuduTableProperties.class).in(Scopes.SINGLETON); bind(ConnectorSplitManager.class).to(KuduSplitManager.class).in(Scopes.SINGLETON); bind(ConnectorPageSourceProvider.class).to(KuduPageSourceProvider.class) .in(Scopes.SINGLETON); bind(ConnectorPageSinkProvider.class).to(KuduPageSinkProvider.class).in(Scopes.SINGLETON); bind(KuduHandleResolver.class).in(Scopes.SINGLETON); bind(KuduRecordSetProvider.class).in(Scopes.SINGLETON); configBinder(binder()).bindConfig(KuduClientConfig.class); bind(RangePartitionProcedures.class).in(Scopes.SINGLETON); Multibinder.newSetBinder(binder(), Procedure.class); }
Example #17
Source File: OrcStorageManager.java From presto with Apache License 2.0 | 6 votes |
static Type toOrcFileType(Type raptorType, TypeManager typeManager) { // TIMESTAMPS are stored as BIGINT to void the poor encoding in ORC if (raptorType.equals(TimestampType.TIMESTAMP)) { return BIGINT; } if (raptorType instanceof ArrayType) { Type elementType = toOrcFileType(((ArrayType) raptorType).getElementType(), typeManager); return new ArrayType(elementType); } if (raptorType instanceof MapType) { TypeSignature keyType = toOrcFileType(((MapType) raptorType).getKeyType(), typeManager).getTypeSignature(); TypeSignature valueType = toOrcFileType(((MapType) raptorType).getValueType(), typeManager).getTypeSignature(); return typeManager.getParameterizedType(StandardTypes.MAP, ImmutableList.of(TypeSignatureParameter.typeParameter(keyType), TypeSignatureParameter.typeParameter(valueType))); } if (raptorType instanceof RowType) { List<Field> fields = ((RowType) raptorType).getFields().stream() .map(field -> new Field(field.getName(), toOrcFileType(field.getType(), typeManager))) .collect(toImmutableList()); return RowType.from(fields); } return raptorType; }
Example #18
Source File: CharParametricType.java From presto with Apache License 2.0 | 6 votes |
@Override public Type createType(TypeManager typeManager, List<TypeParameter> parameters) { if (parameters.isEmpty()) { return createCharType(1); } if (parameters.size() != 1) { throw new IllegalArgumentException("Expected at most one parameter for CHAR"); } TypeParameter parameter = parameters.get(0); if (!parameter.isLongLiteral()) { throw new IllegalArgumentException("CHAR length must be a number"); } return createCharType(parameter.getLongLiteral()); }
Example #19
Source File: HiveBucketing.java From presto with Apache License 2.0 | 6 votes |
public static Optional<HiveBucketHandle> getHiveBucketHandle(Table table, TypeManager typeManager) { Optional<HiveBucketProperty> hiveBucketProperty = table.getStorage().getBucketProperty(); if (hiveBucketProperty.isEmpty()) { return Optional.empty(); } Map<String, HiveColumnHandle> map = getRegularColumnHandles(table, typeManager).stream() .collect(Collectors.toMap(HiveColumnHandle::getName, identity())); ImmutableList.Builder<HiveColumnHandle> bucketColumns = ImmutableList.builder(); for (String bucketColumnName : hiveBucketProperty.get().getBucketedBy()) { HiveColumnHandle bucketColumnHandle = map.get(bucketColumnName); if (bucketColumnHandle == null) { throw new PrestoException( HIVE_INVALID_METADATA, format("Table '%s.%s' is bucketed on non-existent column '%s'", table.getDatabaseName(), table.getTableName(), bucketColumnName)); } bucketColumns.add(bucketColumnHandle); } BucketingVersion bucketingVersion = hiveBucketProperty.get().getBucketingVersion(); int bucketCount = hiveBucketProperty.get().getBucketCount(); return Optional.of(new HiveBucketHandle(bucketColumns.build(), bucketingVersion, bucketCount, bucketCount)); }
Example #20
Source File: MapParametricType.java From presto with Apache License 2.0 | 6 votes |
@Override public Type createType(TypeManager typeManager, List<TypeParameter> parameters) { checkArgument(parameters.size() == 2, "Expected two parameters, got %s", parameters); TypeParameter firstParameter = parameters.get(0); TypeParameter secondParameter = parameters.get(1); checkArgument( firstParameter.getKind() == ParameterKind.TYPE && secondParameter.getKind() == ParameterKind.TYPE, "Expected key and type to be types, got %s", parameters); Type keyType = firstParameter.getType(); Type valueType = secondParameter.getType(); MethodHandle keyNativeEquals = typeManager.resolveOperator(OperatorType.EQUAL, ImmutableList.of(keyType, keyType)); MethodHandle keyBlockNativeEquals = compose(keyNativeEquals, nativeValueGetter(keyType)); MethodHandle keyBlockEquals = compose(keyNativeEquals, nativeValueGetter(keyType), nativeValueGetter(keyType)); MethodHandle keyNativeHashCode = typeManager.resolveOperator(OperatorType.HASH_CODE, ImmutableList.of(keyType)); MethodHandle keyBlockHashCode = compose(keyNativeHashCode, nativeValueGetter(keyType)); return new MapType( keyType, valueType, keyBlockNativeEquals, keyBlockEquals, keyNativeHashCode, keyBlockHashCode); }
Example #21
Source File: HivePageSourceProvider.java From presto with Apache License 2.0 | 6 votes |
@Inject public HivePageSourceProvider( TypeManager typeManager, HiveConfig hiveConfig, HdfsEnvironment hdfsEnvironment, Set<HivePageSourceFactory> pageSourceFactories, Set<HiveRecordCursorProvider> cursorProviders, GenericHiveRecordCursorProvider genericCursorProvider) { this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.hiveStorageTimeZone = requireNonNull(hiveConfig, "hiveConfig is null").getDateTimeZone(); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.pageSourceFactories = ImmutableSet.copyOf(requireNonNull(pageSourceFactories, "pageSourceFactories is null")); this.cursorProviders = ImmutableSet.<HiveRecordCursorProvider>builder() .addAll(requireNonNull(cursorProviders, "cursorProviders is null")) .add(genericCursorProvider) // generic should be last, as a fallback option .build(); }
Example #22
Source File: RcFileFileWriterFactory.java From presto with Apache License 2.0 | 5 votes |
public RcFileFileWriterFactory( HdfsEnvironment hdfsEnvironment, TypeManager typeManager, NodeVersion nodeVersion, DateTimeZone hiveStorageTimeZone, FileFormatDataSourceStats stats) { this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.nodeVersion = requireNonNull(nodeVersion, "nodeVersion is null"); this.hiveStorageTimeZone = requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null"); this.stats = requireNonNull(stats, "stats is null"); }
Example #23
Source File: AtopModule.java From presto with Apache License 2.0 | 5 votes |
@Override public void configure(Binder binder) { binder.bind(TypeManager.class).toInstance(typeManager); binder.bind(NodeManager.class).toInstance(nodeManager); binder.bind(Environment.class).toInstance(new Environment(environment)); binder.bind(CatalogName.class).toInstance(new CatalogName(catalogName)); binder.bind(AtopConnector.class).in(Scopes.SINGLETON); binder.bind(AtopMetadata.class).in(Scopes.SINGLETON); binder.bind(AtopSplitManager.class).in(Scopes.SINGLETON); binder.bind(AtopFactory.class).to(atopFactoryClass).in(Scopes.SINGLETON); binder.bind(AtopPageSourceProvider.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(AtopConnectorConfig.class); }
Example #24
Source File: OrcStorageManager.java From presto with Apache License 2.0 | 5 votes |
public OrcStorageManager( String nodeId, StorageService storageService, Optional<BackupStore> backupStore, OrcReaderOptions orcReaderOptions, BackupManager backgroundBackupManager, ShardRecoveryManager recoveryManager, ShardRecorder shardRecorder, TypeManager typeManager, String connectorId, int deletionThreads, Duration shardRecoveryTimeout, long maxShardRows, DataSize maxShardSize, DataSize minAvailableSpace) { this.nodeId = requireNonNull(nodeId, "nodeId is null"); this.storageService = requireNonNull(storageService, "storageService is null"); this.backupStore = requireNonNull(backupStore, "backupStore is null"); this.orcReaderOptions = requireNonNull(orcReaderOptions, "orcReaderOptions is null") .withMaxReadBlockSize(HUGE_MAX_READ_BLOCK_SIZE); backupManager = requireNonNull(backgroundBackupManager, "backgroundBackupManager is null"); this.recoveryManager = requireNonNull(recoveryManager, "recoveryManager is null"); this.recoveryTimeout = requireNonNull(shardRecoveryTimeout, "shardRecoveryTimeout is null"); checkArgument(maxShardRows > 0, "maxShardRows must be > 0"); this.maxShardRows = min(maxShardRows, MAX_ROWS); this.maxShardSize = requireNonNull(maxShardSize, "maxShardSize is null"); this.minAvailableSpace = requireNonNull(minAvailableSpace, "minAvailableSpace is null"); this.shardRecorder = requireNonNull(shardRecorder, "shardRecorder is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.deletionExecutor = newFixedThreadPool(deletionThreads, daemonThreadsNamed("raptor-delete-" + connectorId + "-%s")); this.commitExecutor = newCachedThreadPool(daemonThreadsNamed("raptor-commit-" + connectorId + "-%s")); }
Example #25
Source File: HivePageSinkProvider.java From presto with Apache License 2.0 | 5 votes |
@Inject public HivePageSinkProvider( Set<HiveFileWriterFactory> fileWriterFactories, HdfsEnvironment hdfsEnvironment, PageSorter pageSorter, HiveMetastore metastore, PageIndexerFactory pageIndexerFactory, TypeManager typeManager, HiveConfig config, LocationService locationService, JsonCodec<PartitionUpdate> partitionUpdateCodec, NodeManager nodeManager, EventClient eventClient, HiveSessionProperties hiveSessionProperties, HiveWriterStats hiveWriterStats) { this.fileWriterFactories = ImmutableSet.copyOf(requireNonNull(fileWriterFactories, "fileWriterFactories is null")); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.pageSorter = requireNonNull(pageSorter, "pageSorter is null"); this.metastore = requireNonNull(metastore, "metastore is null"); this.pageIndexerFactory = requireNonNull(pageIndexerFactory, "pageIndexerFactory is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.maxOpenPartitions = config.getMaxPartitionsPerWriter(); this.maxOpenSortFiles = config.getMaxOpenSortFiles(); this.writerSortBufferSize = requireNonNull(config.getWriterSortBufferSize(), "writerSortBufferSize is null"); this.immutablePartitions = config.isImmutablePartitions(); this.locationService = requireNonNull(locationService, "locationService is null"); this.writeVerificationExecutor = listeningDecorator(newFixedThreadPool(config.getWriteValidationThreads(), daemonThreadsNamed("hive-write-validation-%s"))); this.partitionUpdateCodec = requireNonNull(partitionUpdateCodec, "partitionUpdateCodec is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.eventClient = requireNonNull(eventClient, "eventClient is null"); this.hiveSessionProperties = requireNonNull(hiveSessionProperties, "hiveSessionProperties is null"); this.hiveWriterStats = requireNonNull(hiveWriterStats, "stats is null"); this.perTransactionMetastoreCacheMaximumSize = config.getPerTransactionMetastoreCacheMaximumSize(); }
Example #26
Source File: IcebergRecordFileWriter.java From presto with Apache License 2.0 | 5 votes |
public IcebergRecordFileWriter( Path path, List<String> inputColumnNames, StorageFormat storageFormat, Properties schema, DataSize estimatedWriterSystemMemoryUsage, JobConf conf, TypeManager typeManager, ConnectorSession session) { super(path, inputColumnNames, storageFormat, schema, estimatedWriterSystemMemoryUsage, conf, typeManager, session); }
Example #27
Source File: HiveRecordCursorProvider.java From presto with Apache License 2.0 | 5 votes |
Optional<ReaderRecordCursorWithProjections> createRecordCursor( Configuration configuration, ConnectorSession session, Path path, long start, long length, long fileSize, Properties schema, List<HiveColumnHandle> columns, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone, TypeManager typeManager, boolean s3SelectPushdownEnabled);
Example #28
Source File: IcebergMetadata.java From presto with Apache License 2.0 | 5 votes |
public IcebergMetadata( HiveMetastore metastore, HdfsEnvironment hdfsEnvironment, TypeManager typeManager, JsonCodec<CommitTaskData> commitTaskCodec) { this.metastore = requireNonNull(metastore, "metastore is null"); this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.commitTaskCodec = requireNonNull(commitTaskCodec, "commitTaskCodec is null"); }
Example #29
Source File: ThriftTableMetadata.java From presto with Apache License 2.0 | 5 votes |
public ThriftTableMetadata(PrestoThriftTableMetadata thriftTableMetadata, TypeManager typeManager) { this(thriftTableMetadata.getSchemaTableName().toSchemaTableName(), columnMetadata(thriftTableMetadata.getColumns(), typeManager), Optional.ofNullable(thriftTableMetadata.getComment()), thriftTableMetadata.getIndexableKeys() != null ? thriftTableMetadata.getIndexableKeys() : ImmutableList.of()); }
Example #30
Source File: HiveCoercionRecordCursor.java From presto with Apache License 2.0 | 5 votes |
private static Coercer createCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, BridgingRecordCursor bridgingRecordCursor) { Type fromType = typeManager.getType(fromHiveType.getTypeSignature()); Type toType = typeManager.getType(toHiveType.getTypeSignature()); if (toType instanceof VarcharType && (fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG))) { return new IntegerNumberToVarcharCoercer(); } if (fromType instanceof VarcharType && (toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG))) { return new VarcharToIntegerNumberCoercer(toHiveType); } if (fromHiveType.equals(HIVE_BYTE) && toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG)) { return new IntegerNumberUpscaleCoercer(); } if (fromHiveType.equals(HIVE_SHORT) && toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG)) { return new IntegerNumberUpscaleCoercer(); } if (fromHiveType.equals(HIVE_INT) && toHiveType.equals(HIVE_LONG)) { return new IntegerNumberUpscaleCoercer(); } if (fromHiveType.equals(HIVE_FLOAT) && toHiveType.equals(HIVE_DOUBLE)) { return new FloatToDoubleCoercer(); } if (isArrayType(fromType) && isArrayType(toType)) { return new ListCoercer(typeManager, fromHiveType, toHiveType, bridgingRecordCursor); } if (isMapType(fromType) && isMapType(toType)) { return new MapCoercer(typeManager, fromHiveType, toHiveType, bridgingRecordCursor); } if (isRowType(fromType) && isRowType(toType)) { return new StructCoercer(typeManager, fromHiveType, toHiveType, bridgingRecordCursor); } throw new PrestoException(NOT_SUPPORTED, format("Unsupported coercion from %s to %s", fromHiveType, toHiveType)); }