org.apache.samza.config.Config Java Examples
The following examples show how to use
org.apache.samza.config.Config.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LocalApplicationRunner.java From samza with Apache License 2.0 | 6 votes |
@VisibleForTesting static Optional<MetadataStoreFactory> getDefaultCoordinatorStreamStoreFactory(Config config) { JobConfig jobConfig = new JobConfig(config); String coordinatorSystemName = jobConfig.getCoordinatorSystemNameOrNull(); JobCoordinatorConfig jobCoordinatorConfig = new JobCoordinatorConfig(jobConfig); String jobCoordinatorFactoryClassName = jobCoordinatorConfig.getJobCoordinatorFactoryClassName(); // TODO: Remove restriction to only ZkJobCoordinator after next phase of metadata store abstraction. if (StringUtils.isNotBlank(coordinatorSystemName) && ZkJobCoordinatorFactory.class.getName().equals(jobCoordinatorFactoryClassName)) { return Optional.of(new CoordinatorStreamMetadataStoreFactory()); } LOG.warn("{} or {} not configured, or {} is not {}. No default coordinator stream metadata store will be created.", JobConfig.JOB_COORDINATOR_SYSTEM, JobConfig.JOB_DEFAULT_SYSTEM, JobCoordinatorConfig.JOB_COORDINATOR_FACTORY, ZkJobCoordinatorFactory.class.getName()); return Optional.empty(); }
Example #2
Source File: SamzaRunner.java From beam with Apache License 2.0 | 6 votes |
private ApplicationRunner runSamzaApp(StreamApplication app, Config config) { final ApplicationRunner runner = ApplicationRunners.getApplicationRunner(app, config); ExternalContext externalContext = null; if (listener != null) { externalContext = listener.onStart(); } runner.run(externalContext); if (listener != null && options.getSamzaExecutionEnvironment() == SamzaExecutionEnvironment.YARN) { listener.onSubmit(); } return runner; }
Example #3
Source File: TestSamzaSqlValidator.java From samza with Apache License 2.0 | 6 votes |
@Test public void testDefaultWithNonNullableField() { Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 1); // bool_value is missing which has default value but is non-nullable config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, "Insert into testavro.outputTopic(id) select id from testavro.SIMPLE1"); Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config)); List<String> sqlStmts = fetchSqlFromConfig(config); try { new SamzaSqlValidator(samzaConfig).validate(sqlStmts); } catch (SamzaSqlValidatorException e) { Assert.assertTrue(e.getMessage().contains("Non-optional field 'bool_value' in output schema is missing")); return; } Assert.fail("Validation test has failed."); }
Example #4
Source File: KafkaSystemConsumer.java From samza with Apache License 2.0 | 6 votes |
/** * Create a KafkaSystemConsumer for the provided {@code systemName} * @param kafkaConsumer kafka Consumer object to be used by this system consumer * @param systemName system name for which we create the consumer * @param config application config * @param clientId clientId from the kafka consumer * @param kafkaConsumerProxyFactory factory for creating a KafkaConsumerProxy to use in this consumer * @param metrics metrics for this KafkaSystemConsumer * @param clock system clock */ public KafkaSystemConsumer(Consumer<K, V> kafkaConsumer, String systemName, Config config, String clientId, KafkaConsumerProxyFactory<K, V> kafkaConsumerProxyFactory, KafkaSystemConsumerMetrics metrics, Clock clock) { super(metrics.registry(), clock, metrics.getClass().getName()); this.kafkaConsumer = kafkaConsumer; this.clientId = clientId; this.systemName = systemName; this.config = config; this.metrics = metrics; fetchThresholdBytesEnabled = new KafkaConfig(config).isConsumerFetchThresholdBytesEnabled(systemName); // create a sink for passing the messages between the proxy and the consumer messageSink = new KafkaConsumerMessageSink(); // Create the proxy to do the actual message reading. proxy = kafkaConsumerProxyFactory.create(this); LOG.info("{}: Created proxy {} ", this, proxy); }
Example #5
Source File: TestKafkaSystemConsumer.java From samza with Apache License 2.0 | 6 votes |
private KafkaSystemConsumer createConsumer(String fetchMsg, String fetchBytes) { final Map<String, String> map = new HashMap<>(); map.put(JobConfig.JOB_NAME, TEST_JOB); map.put(String.format(KafkaConfig.CONSUMER_FETCH_THRESHOLD(), TEST_SYSTEM), fetchMsg); map.put(String.format(KafkaConfig.CONSUMER_FETCH_THRESHOLD_BYTES(), TEST_SYSTEM), fetchBytes); map.put(String.format("systems.%s.consumer.%s", TEST_SYSTEM, ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG), BOOTSTRAP_SERVERS); map.put(JobConfig.JOB_NAME, "jobName"); Config config = new MapConfig(map); String clientId = KafkaConsumerConfig.createClientId(TEST_CLIENT_ID, config); KafkaConsumerConfig consumerConfig = KafkaConsumerConfig.getKafkaSystemConsumerConfig(config, TEST_SYSTEM, clientId); final KafkaConsumer<byte[], byte[]> kafkaConsumer = new MockKafkaConsumer(consumerConfig); MockKafkaSystemConsumer newKafkaSystemConsumer = new MockKafkaSystemConsumer(kafkaConsumer, TEST_SYSTEM, config, TEST_CLIENT_ID, new KafkaSystemConsumerMetrics(TEST_SYSTEM, new NoOpMetricsRegistry()), System::currentTimeMillis); return newKafkaSystemConsumer; }
Example #6
Source File: AzureBlobOutputStream.java From samza with Apache License 2.0 | 6 votes |
@VisibleForTesting AzureBlobOutputStream(BlockBlobAsyncClient blobAsyncClient, Executor blobThreadPool, AzureBlobWriterMetrics metrics, BlobMetadataGeneratorFactory blobMetadataGeneratorFactory, Config blobMetadataGeneratorConfig, String streamName, long flushTimeoutMs, int maxBlockFlushThresholdSize, ByteArrayOutputStream byteArrayOutputStream, Compression compression) { this.byteArrayOutputStream = Optional.of(byteArrayOutputStream); this.blobAsyncClient = blobAsyncClient; blockList = new ArrayList<>(); blockNum = 0; this.blobThreadPool = blobThreadPool; this.flushTimeoutMs = flushTimeoutMs; this.maxBlockFlushThresholdSize = maxBlockFlushThresholdSize; this.metrics = metrics; this.compression = compression; this.blobMetadataGeneratorFactory = blobMetadataGeneratorFactory; this.blobMetadataGeneratorConfig = blobMetadataGeneratorConfig; this.streamName = streamName; }
Example #7
Source File: TestSamzaSqlRemoteTable.java From samza with Apache License 2.0 | 6 votes |
@Test(expected = SamzaException.class) public void testJoinConditionWithMoreThanOneConjunction() throws SamzaSqlValidatorException { int numMessages = 20; Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(new HashMap<>(), numMessages, true); String sql = "Insert into testavro.enrichedPageViewTopic " + "select pv.pageKey as __key__, pv.pageKey as pageKey, coalesce(null, 'N/A') as companyName," + " p.name as profileName, p.address as profileAddress " + "from testRemoteStore.Profile.`$table` as p " + "right join testavro.PAGEVIEW as pv " + " on p.__key__ = pv.profileId and p.__key__ = pv.pageKey where p.name is null or p.name <> '0'"; List<String> sqlStmts = Arrays.asList(sql); staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts)); Config config = new MapConfig(staticConfigs); new SamzaSqlValidator(config).validate(sqlStmts); runApplication(config); }
Example #8
Source File: SamzaEntranceProcessingItem.java From samoa with Apache License 2.0 | 6 votes |
public SamoaSystemConsumer(String systemName, Config config) { String yarnConfHome = config.get(SamzaConfigFactory.YARN_CONF_HOME_KEY); if (yarnConfHome != null && yarnConfHome.length() > 0) // if the property is set , otherwise, assume we are running in // local mode and ignore this SystemsUtils.setHadoopConfigHome(yarnConfHome); String filename = config.get(SamzaConfigFactory.FILE_KEY); String filesystem = config.get(SamzaConfigFactory.FILESYSTEM_KEY); String name = config.get(SamzaConfigFactory.JOB_NAME_KEY); SerializationProxy wrapper = (SerializationProxy) SystemsUtils.deserializeObjectFromFileAndKey(filesystem, filename, name); this.entranceProcessor = wrapper.processor; this.entranceProcessor.onCreate(0); // Internal stream from SystemConsumer to EntranceTask, so we // need only one partition this.systemStreamPartition = new SystemStreamPartition(systemName, wrapper.name, new Partition(0)); }
Example #9
Source File: TestClusterBasedJobCoordinator.java From samza with Apache License 2.0 | 6 votes |
@Test public void testPartitionCountMonitorWithDurableStates() { configMap.put("stores.mystore.changelog", "mychangelog"); configMap.put(JobConfig.JOB_CONTAINER_COUNT, "1"); when(CoordinatorStreamUtil.readConfigFromCoordinatorStream(anyObject())).thenReturn(new MapConfig(configMap)); Config config = new MapConfig(configMap); // mimic job runner code to write the config to coordinator stream CoordinatorStreamSystemProducer producer = new CoordinatorStreamSystemProducer(config, mock(MetricsRegistry.class)); producer.writeConfig("test-job", config); ClusterBasedJobCoordinator clusterCoordinator = ClusterBasedJobCoordinator.createFromMetadataStore(config); // change the input system stream metadata MockSystemFactory.MSG_QUEUES.put(new SystemStreamPartition("kafka", "topic1", new Partition(1)), new ArrayList<>()); StreamPartitionCountMonitor monitor = clusterCoordinator.getPartitionMonitor(); monitor.updatePartitionCountMetric(); assertEquals(clusterCoordinator.getAppStatus(), SamzaApplicationState.SamzaAppStatus.FAILED); }
Example #10
Source File: TestBaseKeyValueStorageEngineFactory.java From samza with Apache License 2.0 | 6 votes |
@Test public void testAccessLogStore() { Config config = new MapConfig(BASE_CONFIG, DISABLE_CACHE, ACCESS_LOG_ENABLED); // AccessLoggedStore requires a changelog SSP StorageEngine storageEngine = callGetStorageEngine(config, CHANGELOG_SSP); KeyValueStorageEngine<?, ?> keyValueStorageEngine = baseStorageEngineValidation(storageEngine); assertStoreProperties(keyValueStorageEngine.getStoreProperties(), true, true); NullSafeKeyValueStore<?, ?> nullSafeKeyValueStore = assertAndCast(keyValueStorageEngine.getWrapperStore(), NullSafeKeyValueStore.class); AccessLoggedStore<?, ?> accessLoggedStore = assertAndCast(nullSafeKeyValueStore.getStore(), AccessLoggedStore.class); SerializedKeyValueStore<?, ?> serializedKeyValueStore = assertAndCast(accessLoggedStore.getStore(), SerializedKeyValueStore.class); LoggedStore<?, ?> loggedStore = assertAndCast(serializedKeyValueStore.getStore(), LoggedStore.class); // type generics don't match due to wildcard type, but checking reference equality, so type generics don't matter // noinspection AssertEqualsBetweenInconvertibleTypes assertEquals(this.rawKeyValueStore, loggedStore.getStore()); }
Example #11
Source File: TestSamzaSqlRemoteTable.java From samza with Apache License 2.0 | 6 votes |
@Test (expected = AssertionError.class) public void testSinkEndToEndWithoutKey() throws SamzaSqlValidatorException { int numMessages = 20; RemoteStoreIOResolverTestFactory.records.clear(); Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages); String sql = "Insert into testRemoteStore.testTable.`$table`(id,name) select id, name from testavro.SIMPLE1"; List<String> sqlStmts = Arrays.asList(sql); staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts)); Config config = new MapConfig(staticConfigs); new SamzaSqlValidator(config).validate(sqlStmts); runApplication(config); Assert.assertEquals(numMessages, RemoteStoreIOResolverTestFactory.records.size()); }
Example #12
Source File: TestStreamProcessor.java From samza with Apache License 2.0 | 6 votes |
/** * Should be able to create task instances from the provided task factory. */ @Test public void testStreamProcessorWithStreamTaskFactory() { final String testSystem = "test-system"; final String inputTopic = "numbers2"; final String outputTopic = "output2"; final int messageCount = 20; final Config configs = new MapConfig(createConfigs("1", testSystem, inputTopic, outputTopic, messageCount)); createTopics(inputTopic, outputTopic); final TestStubs stubs = new TestStubs(configs, IdentityStreamTask::new, bootstrapServers()); produceMessages(stubs.producer, inputTopic, messageCount); run(stubs.processor, stubs.shutdownLatch); verifyNumMessages(stubs.consumer, outputTopic, messageCount); }
Example #13
Source File: TestQueryTranslator.java From samza with Apache License 2.0 | 6 votes |
@Test (expected = SamzaException.class) public void testTranslateStreamStreamJoin() { Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 1); String sql = "Insert into testavro.enrichedPageViewTopic(profileName, pageKey)" + " select p.name as profileName, pv.pageKey" + " from testavro.PAGEVIEW as pv" + " join testavro.PROFILE as p" + " on p.id = pv.profileId"; config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql); Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config)); List<String> sqlStmts = fetchSqlFromConfig(config); List<SamzaSqlQueryParser.QueryInfo> queryInfo = fetchQueryInfo(sqlStmts); SamzaSqlApplicationConfig samzaSqlApplicationConfig = new SamzaSqlApplicationConfig(new MapConfig(config), queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSources).flatMap(Collection::stream) .collect(Collectors.toList()), queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSink).collect(Collectors.toList())); StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(streamApp -> { }, samzaConfig); QueryTranslator translator = new QueryTranslator(streamAppDesc, samzaSqlApplicationConfig); translator.translate(queryInfo.get(0), streamAppDesc, 0); }
Example #14
Source File: TestSamzaSqlEndToEnd.java From samza with Apache License 2.0 | 6 votes |
@Test public void testEndToEndCompoundBooleanCheck() throws SamzaSqlValidatorException { int numMessages = 20; TestAvroSystemFactory.messages.clear(); Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages); String sql1 = "Insert into testavro.outputTopic" + " select * from testavro.COMPLEX1 where id >= 0 and bool_value IS TRUE"; List<String> sqlStmts = Arrays.asList(sql1); staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts)); Config config = new MapConfig(staticConfigs); new SamzaSqlValidator(config).validate(sqlStmts); runApplication(config); List<OutgoingMessageEnvelope> outMessages = new ArrayList<>(TestAvroSystemFactory.messages); Assert.assertEquals(numMessages / 2, outMessages.size()); }
Example #15
Source File: TestSamzaSqlValidator.java From samza with Apache License 2.0 | 5 votes |
@Test (expected = SamzaSqlValidatorException.class) public void testIllegitFieldEndingInZeroValidation() throws SamzaSqlValidatorException { Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(1); config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, "Insert into testavro.outputTopic select id, true as bool_value, false as non_existing_name0" + " from testavro.level1.level2.SIMPLE1 as s where s.id = 1"); Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config)); List<String> sqlStmts = fetchSqlFromConfig(config); new SamzaSqlValidator(samzaConfig).validate(sqlStmts); }
Example #16
Source File: AsyncApplicationExample.java From samza with Apache License 2.0 | 5 votes |
public static void main(String[] args) { CommandLine cmdLine = new CommandLine(); Config config = cmdLine.loadConfig(cmdLine.parser().parse(args)); ApplicationRunner runner = ApplicationRunners.getApplicationRunner(new AsyncApplicationExample(), config); runner.run(); runner.waitForFinish(); }
Example #17
Source File: BaseKeyValueStorageEngineFactory.java From samza with Apache License 2.0 | 5 votes |
/** * If "metrics.timer.enabled" is enabled, then returns a {@link HighResolutionClock} that uses * {@link System#nanoTime}. * Otherwise, returns a clock which always returns 0. */ private static HighResolutionClock buildClock(Config config) { MetricsConfig metricsConfig = new MetricsConfig(config); if (metricsConfig.getMetricsTimerEnabled()) { return System::nanoTime; } else { return () -> 0; } }
Example #18
Source File: TestStreamApplication.java From samza with Apache License 2.0 | 5 votes |
public static StreamApplication getInstance( String systemName, List<String> inputTopics, String outputTopic, CountDownLatch processedMessageLatch, StreamApplicationCallback callback, CountDownLatch kafkaEventsConsumedLatch, Config config) { String appName = new ApplicationConfig(config).getGlobalAppId(); String processorName = config.get(JobConfig.PROCESSOR_ID); registerLatches(processedMessageLatch, kafkaEventsConsumedLatch, callback, appName, processorName); StreamApplication app = new TestStreamApplication(systemName, inputTopics, outputTopic, appName, processorName); return app; }
Example #19
Source File: TestStreamProcessor.java From samza with Apache License 2.0 | 5 votes |
public TestableStreamProcessor(Config config, Map<String, MetricsReporter> customMetricsReporters, StreamTaskFactory streamTaskFactory, ProcessorLifecycleListener processorListener, JobCoordinator jobCoordinator, SamzaContainer container, Duration runLoopShutdownDuration) { super("TEST_PROCESSOR_ID", config, customMetricsReporters, streamTaskFactory, Optional.empty(), Optional.empty(), Optional.empty(), sp -> processorListener, jobCoordinator, Mockito.mock(MetadataStore.class)); this.container = container; this.runLoopShutdownDuration = runLoopShutdownDuration; }
Example #20
Source File: ElasticsearchSystemFactory.java From samza with Apache License 2.0 | 5 votes |
@Override public SystemProducer getProducer(String name, Config config, MetricsRegistry metricsRegistry) { ElasticsearchConfig elasticsearchConfig = new ElasticsearchConfig(name, config); return new ElasticsearchSystemProducer(name, getBulkProcessorFactory(elasticsearchConfig), getClient(elasticsearchConfig), getIndexRequestFactory(elasticsearchConfig), new ElasticsearchSystemProducerMetrics(name, metricsRegistry)); }
Example #21
Source File: TestAllSspToSingleTaskGrouper.java From samza with Apache License 2.0 | 5 votes |
@Test(expected = SamzaException.class) public void testLocalStreamWithEmptySsps() { HashSet<SystemStreamPartition> allSSPs = new HashSet<>(); HashMap<String, String> configMap = new HashMap<>(); configMap.put("job.coordinator.factory", "org.apache.samza.standalone.PassthroughJobCoordinatorFactory"); configMap.put("processor.list", "1"); Config config = new MapConfig(configMap); SystemStreamPartitionGrouper grouper = grouperFactory.getSystemStreamPartitionGrouper(config); grouper.group(allSSPs); }
Example #22
Source File: YarnResourceManagerFactory.java From samza with Apache License 2.0 | 5 votes |
@Override public ClusterResourceManager getClusterResourceManager(ClusterResourceManager.Callback callback, SamzaApplicationState state) { log.info("Creating an instance of a cluster resource manager for Yarn. "); JobModelManager jobModelManager = state.jobModelManager; Config config = jobModelManager.jobModel().getConfig(); YarnClusterResourceManager manager = new YarnClusterResourceManager(config, jobModelManager, callback, state); return manager; }
Example #23
Source File: SamzaSqlExecutionContext.java From samza with Apache License 2.0 | 5 votes |
public ScalarUdf createInstance(String clazz, String udfName, Context context) { // Configs should be same for all the UDF methods within a UDF. Hence taking the first one. Config udfConfig = udfMetadata.get(udfName).get(0).getUdfConfig(); ScalarUdf scalarUdf = ReflectionUtil.getObj(clazz, ScalarUdf.class); scalarUdf.init(udfConfig, context); return scalarUdf; }
Example #24
Source File: TestRemoteTableDescriptor.java From samza with Apache License 2.0 | 5 votes |
private Context createMockContext(TableDescriptor tableDescriptor) { Context context = mock(Context.class); ContainerContext containerContext = mock(ContainerContext.class); when(context.getContainerContext()).thenReturn(containerContext); MetricsRegistry metricsRegistry = mock(MetricsRegistry.class); when(metricsRegistry.newTimer(anyString(), anyString())).thenReturn(mock(Timer.class)); when(metricsRegistry.newCounter(anyString(), anyString())).thenReturn(mock(Counter.class)); when(containerContext.getContainerMetricsRegistry()).thenReturn(metricsRegistry); TaskContextImpl taskContext = mock(TaskContextImpl.class); when(context.getTaskContext()).thenReturn(taskContext); TaskName taskName = new TaskName("MyTask"); TaskModel taskModel = mock(TaskModel.class); when(taskModel.getTaskName()).thenReturn(taskName); when(context.getTaskContext().getTaskModel()).thenReturn(taskModel); ContainerModel containerModel = mock(ContainerModel.class); when(containerModel.getTasks()).thenReturn(ImmutableMap.of(taskName, taskModel)); when(containerContext.getContainerModel()).thenReturn(containerModel); String containerId = "container-1"; JobModel jobModel = mock(JobModel.class); when(taskContext.getJobModel()).thenReturn(jobModel); when(jobModel.getContainers()).thenReturn(ImmutableMap.of(containerId, containerModel)); JobContext jobContext = mock(JobContext.class); Config jobConfig = new MapConfig(tableDescriptor.toConfig(new MapConfig())); when(jobContext.getConfig()).thenReturn(jobConfig); when(context.getJobContext()).thenReturn(jobContext); return context; }
Example #25
Source File: TestSamzaSqlEndToEnd.java From samza with Apache License 2.0 | 5 votes |
@Ignore @Test public void testEndToEndStreamTableTableJoin() throws Exception { int numMessages = 20; TestAvroSystemFactory.messages.clear(); Map<String, String> staticConfigs = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(numMessages); String sql = "Insert into testavro.enrichedPageViewTopic " + "select pv.pageKey as __key__, pv.pageKey as pageKey, c.name as companyName, p.name as profileName," + " p.address as profileAddress " + "from testavro.PAGEVIEW as pv " + "join testavro.PROFILE.`$table` as p " + " on MyTest(p.id) = MyTest(pv.profileId) " + " join testavro.COMPANY.`$table` as c " + " on MyTest(p.companyId) = MyTest(c.id)"; List<String> sqlStmts = Arrays.asList(sql); staticConfigs.put(SamzaSqlApplicationConfig.CFG_SQL_STMTS_JSON, JsonUtil.toJson(sqlStmts)); Config config = new MapConfig(staticConfigs); new SamzaSqlValidator(config).validate(sqlStmts); runApplication(config); List<String> outMessages = TestAvroSystemFactory.messages.stream() .map(x -> ((GenericRecord) x.getMessage()).get("pageKey").toString() + "," + ((GenericRecord) x.getMessage()).get("profileName").toString() + "," + ((GenericRecord) x.getMessage()).get("companyName").toString()) .collect(Collectors.toList()); Assert.assertEquals(numMessages, outMessages.size()); List<String> expectedOutMessages = TestAvroSystemFactory.getPageKeyProfileCompanyNameJoin(numMessages); Assert.assertEquals(expectedOutMessages, outMessages); }
Example #26
Source File: TestStreamUtil.java From samza with Apache License 2.0 | 5 votes |
@Test(expected = IllegalArgumentException.class) public void testGetStreamSystemNameArgInvalid() { Config config = buildStreamConfig(STREAM_ID, StreamConfig.PHYSICAL_NAME, TEST_PHYSICAL_NAME, StreamConfig.SYSTEM, TEST_SYSTEM_INVALID); StreamUtil.getStreamSpec(STREAM_ID, new StreamConfig(config)); }
Example #27
Source File: SamzaTaskProxy.java From samza with Apache License 2.0 | 5 votes |
/** * Initialize {@link CoordinatorStreamSystemConsumer} based upon {@link JobInstance} parameter. * @param jobInstance the job instance to get CoordinatorStreamSystemConsumer for. * @return built and initialized CoordinatorStreamSystemConsumer. */ protected CoordinatorStreamSystemConsumer initializeCoordinatorStreamConsumer(JobInstance jobInstance) { Config coordinatorSystemConfig = getCoordinatorSystemConfig(jobInstance); LOG.debug("Using config: {} to create coordinatorStream consumer.", coordinatorSystemConfig); CoordinatorStreamSystemConsumer consumer = new CoordinatorStreamSystemConsumer(coordinatorSystemConfig, METRICS_REGISTRY); LOG.debug("Registering coordinator system stream consumer."); consumer.register(); LOG.debug("Starting coordinator system stream consumer."); consumer.start(); LOG.debug("Bootstrapping coordinator system stream consumer."); consumer.bootstrap(); return consumer; }
Example #28
Source File: TestQueryTranslator.java From samza with Apache License 2.0 | 5 votes |
@Test (expected = SamzaException.class) public void testTranslateStreamTableInnerJoinWithMissingStream() { Map<String, String> config = SamzaSqlTestConfig.fetchStaticConfigsWithFactories(configs, 10); String configIOResolverDomain = String.format(SamzaSqlApplicationConfig.CFG_FMT_SOURCE_RESOLVER_DOMAIN, "config"); config.put(configIOResolverDomain + SamzaSqlApplicationConfig.CFG_FACTORY, ConfigBasedIOResolverFactory.class.getName()); String sql = "Insert into testavro.enrichedPageViewTopic(profileName, pageKey)" + " select p.name as profileName, pv.pageKey" + " from testavro.PAGEVIEW as pv" + " join testavro.`$table` as p" + " on p.id = pv.profileId"; config.put(SamzaSqlApplicationConfig.CFG_SQL_STMT, sql); Config samzaConfig = SamzaSqlApplicationRunner.computeSamzaConfigs(true, new MapConfig(config)); List<String> sqlStmts = fetchSqlFromConfig(config); List<SamzaSqlQueryParser.QueryInfo> queryInfo = fetchQueryInfo(sqlStmts); SamzaSqlApplicationConfig samzaSqlApplicationConfig = new SamzaSqlApplicationConfig(new MapConfig(config), queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSources).flatMap(Collection::stream) .collect(Collectors.toList()), queryInfo.stream().map(SamzaSqlQueryParser.QueryInfo::getSink).collect(Collectors.toList())); StreamApplicationDescriptorImpl streamAppDesc = new StreamApplicationDescriptorImpl(streamApp -> { }, samzaConfig); QueryTranslator translator = new QueryTranslator(streamAppDesc, samzaSqlApplicationConfig); translator.translate(queryInfo.get(0), streamAppDesc, 0); }
Example #29
Source File: SamzaSumDemo.java From scotty-window-processor with Apache License 2.0 | 5 votes |
public static void main(String[] args) { String[] configArgs = {"--config-factory=org.apache.samza.config.factories.PropertiesConfigFactory" , "--config-path=samza-connector/src/main/Properties/config.properties"}; CommandLine cmdLine = new CommandLine(); OptionSet options = cmdLine.parser().parse(configArgs); Config config = cmdLine.loadConfig(options); LocalApplicationRunner runner = new LocalApplicationRunner(new SamzaSumDemo(), config); runner.run(); runner.waitForFinish(); }
Example #30
Source File: TestLocalTableWithConfigRewriterEndToEnd.java From samza with Apache License 2.0 | 5 votes |
@Override public Config rewrite(String name, Config config) { List<TableDescriptor> descriptors = Arrays.asList( new InMemoryTableDescriptor("t1", KVSerde.of(new IntegerSerde(), new TestTableData.PageViewJsonSerde())), new InMemoryTableDescriptor("t2", KVSerde.of(new IntegerSerde(), new StringSerde()))); Map<String, String> serdeConfig = TableConfigGenerator.generateSerdeConfig(descriptors); Map<String, String> tableConfig = TableConfigGenerator.generate(new MapConfig(config, serdeConfig), descriptors); return new MapConfig(config, serdeConfig, tableConfig); }