Java Code Examples for scala.collection.JavaConversions#asScalaBuffer()
The following examples show how to use
scala.collection.JavaConversions#asScalaBuffer() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UnionConverter.java From spork with Apache License 2.0 | 5 votes |
@Override public RDD<Tuple> convert(List<RDD<Tuple>> predecessors, POUnion physicalOperator) throws IOException { SparkUtil.assertPredecessorSizeGreaterThan(predecessors, physicalOperator, 0); UnionRDD<Tuple> unionRDD = new UnionRDD<Tuple>(sc, JavaConversions.asScalaBuffer(predecessors), SparkUtil.getManifest(Tuple.class)); return unionRDD; }
Example 2
Source File: AbstractParameterizedCommand.java From rug-cli with GNU General Public License v3.0 | 4 votes |
private ParameterValues collectParameters(ParameterizedRug rug, ParameterValues arguments) { Collection<Parameter> parameters = asJavaCollection(rug.parameters()); if (CommandLineOptions.hasOption("interactive") && !parameters.isEmpty()) { LineReader reader = ShellUtils.lineReader(ShellUtils.INTERACTIVE_HISTORY, Optional.empty()); List<ParameterValue> newValues = new ArrayList<>(); log.newline(); log.info(Style.cyan(Constants.DIVIDER) + " " + Style.bold("Please specify parameter values")); log.info(Constants.LEFT_PADDING + "Press 'Enter' to accept default or provided values. '*' indicates required parameters."); for (Parameter parameter : parameters) { log.newline(); ParameterValue pv = JavaConversions.mapAsJavaMap(arguments.parameterValueMap()) .get(parameter.getName()); String defaultValue = (pv != null ? pv.getValue().toString() : parameter.getDefaultValue()); String description = org.apache.commons.lang3.StringUtils .capitalize(parameter.getDescription()); log.info(" " + WordUtils.wrap(description, Constants.WRAP_LENGTH, "\n ", false)); pv = readParameter(reader, parameter, defaultValue); boolean firstAttempt = true; while (isInvalid(rug, pv) || ((pv.getValue() == null || pv.getValue().toString().length() == 0) && parameter.isRequired())) { log.info(Style.red(" Provided value '%s' is not valid", pv.getValue())); if (firstAttempt) { log.newline(); log.info(" pattern: %s, min length: %s, max length: %s", parameter.getPattern(), (parameter.getMinLength() >= 0 ? parameter.getMinLength() : "not defined"), (parameter.getMaxLength() >= 0 ? parameter.getMaxLength() : "not defined")); firstAttempt = false; } pv = readParameter(reader, parameter, defaultValue); } // add the new and validated parameter to project operations arguments newValues.add(pv); } arguments = new SimpleParameterValues(JavaConversions.asScalaBuffer(newValues)); log.newline(); ShellUtils.shutdown(reader); } return arguments; }
Example 3
Source File: ITZipkinReceiver.java From incubator-retired-htrace with Apache License 2.0 | 4 votes |
@Test public void testKafkaTransport() throws Exception { String topic = "zipkin"; // Kafka setup EmbeddedZookeeper zkServer = new EmbeddedZookeeper(TestZKUtils.zookeeperConnect()); ZkClient zkClient = new ZkClient(zkServer.connectString(), 30000, 30000, ZKStringSerializer$.MODULE$); Properties props = TestUtils.createBrokerConfig(0, TestUtils.choosePort(), false); KafkaConfig config = new KafkaConfig(props); KafkaServer kafkaServer = TestUtils.createServer(config, new MockTime()); Buffer<KafkaServer> servers = JavaConversions.asScalaBuffer(Collections.singletonList(kafkaServer)); TestUtils.createTopic(zkClient, topic, 1, 1, servers, new Properties()); zkClient.close(); TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0, 5000); // HTrace HTraceConfiguration hTraceConfiguration = HTraceConfiguration.fromKeyValuePairs( "sampler.classes", "AlwaysSampler", "span.receiver.classes", ZipkinSpanReceiver.class.getName(), "zipkin.kafka.metadata.broker.list", config.advertisedHostName() + ":" + config.advertisedPort(), "zipkin.kafka.topic", topic, ZipkinSpanReceiver.TRANSPORT_CLASS_KEY, KafkaTransport.class.getName() ); final Tracer tracer = new Tracer.Builder("test-tracer") .tracerPool(new TracerPool("test-tracer-pool")) .conf(hTraceConfiguration) .build(); String scopeName = "test-kafka-transport-scope"; TraceScope traceScope = tracer.newScope(scopeName); traceScope.close(); tracer.close(); // Kafka consumer Properties consumerProps = new Properties(); consumerProps.put("zookeeper.connect", props.getProperty("zookeeper.connect")); consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "testing.group"); consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "smallest"); ConsumerConnector connector = kafka.consumer.Consumer.createJavaConsumerConnector(new kafka.consumer.ConsumerConfig(consumerProps)); Map<String, Integer> topicCountMap = new HashMap<>(); topicCountMap.put(topic, 1); Map<String, List<KafkaStream<byte[], byte[]>>> streams = connector.createMessageStreams(topicCountMap); ConsumerIterator<byte[], byte[]> it = streams.get(topic).get(0).iterator(); // Test Assert.assertTrue("We should have one message in Kafka", it.hasNext()); Span span = new Span(); new TDeserializer(new TBinaryProtocol.Factory()).deserialize(span, it.next().message()); Assert.assertEquals("The span name should match our scope description", span.getName(), scopeName); kafkaServer.shutdown(); }
Example 4
Source File: TypesUtil.java From incubator-atlas with Apache License 2.0 | 4 votes |
public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums, ImmutableList<StructTypeDefinition> structs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traits, ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) { return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs), JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes)); }
Example 5
Source File: TestUserProfileAnomalyEigenEvaluator.java From Eagle with Apache License 2.0 | 4 votes |
@Test public void testDetect(){ UserProfileAnomalyEigenEvaluator eigenEvaluator = new UserProfileAnomalyEigenEvaluator(); String[] testCmdType = {"getfileinfo", "open", "listStatus", "setTimes", "setPermission", "rename", "mkdirs", "create", "setReplication", "contentSummary", "delete", "setOwner", "fsck"}; List<String> tmpCmdTypesAsList = new ArrayList<String>(); tmpCmdTypesAsList = Arrays.asList(testCmdType); Seq<String> testCmd = JavaConversions.asScalaBuffer(tmpCmdTypesAsList); String testSite = "sandbox"; long testTimestamp = 14054440; String testUser = "test_user"; RealMatrix testMatrix = new Array2DRowRealMatrix(1,testCmdType.length); for(int i=0; i < testMatrix.getColumnDimension();i++) testMatrix.addToEntry(0, i, 3.0); UserActivityAggModel testAggModel = new UserActivityAggModel(testUser, testMatrix, testCmd,testSite, testTimestamp); Long testVersion = new Long(1); RealMatrix testUMat = new Array2DRowRealMatrix(testCmdType.length,testCmdType.length); RealMatrix testDiagonalMat = new Array2DRowRealMatrix(testCmdType.length, testCmdType.length); for(int i=0; i< testCmdType.length; i++){ for(int j=0; j < testCmdType.length; j++){ testUMat.addToEntry(i,j,1.0); testDiagonalMat.addToEntry(i,j,1.0); } } int dimension = testCmdType.length -1; double[] minVector = new double[testCmdType.length]; double[] maxVector = new double[testCmdType.length]; for(int i=0; i < minVector.length;i++) { minVector[i] = 1; maxVector[i] = 1; } RealVector testMinVec = new ArrayRealVector(minVector); RealVector testMaxVec = new ArrayRealVector(maxVector); RealVector[] testPCs = new ArrayRealVector[testCmdType.length]; for(int i =0; i < testCmdType.length; i++) { testPCs[i] = new ArrayRealVector(testCmdType.length); for(int j = 0; j < testPCs[i].getDimension(); j++){ testPCs[i].addToEntry(j, 1.0); } } RealVector testMaxL2Norm = new ArrayRealVector(maxVector); RealVector testMinL2Norm = new ArrayRealVector(minVector); UserCommandStatistics userCommandStatistics[] = new UserCommandStatistics[testCmdType.length]; for(int i=0; i< testCmdType.length;i++){ userCommandStatistics[i] = new UserCommandStatistics(); userCommandStatistics[i].setCommandName(testCmdType[i]); userCommandStatistics[i].setLowVariant(false); userCommandStatistics[i].setMean(1.0); userCommandStatistics[i].setStddev(1.0); } UserProfileEigenModel testEigenModel = new UserProfileEigenModel(testVersion,testSite,testUser,testUMat,testDiagonalMat,dimension, testMinVec, testMaxVec, testPCs, testMaxL2Norm, testMinL2Norm, userCommandStatistics); List<MLCallbackResult> testResults = eigenEvaluator.detect("test_user", "eigen", testAggModel, testEigenModel); Assert.assertEquals(testResults.size(), testMatrix.getRowDimension()); for(MLCallbackResult result: testResults){ Assert.assertEquals(result.isAnomaly(), true); } }
Example 6
Source File: MySqlAsyncConnection.java From ob1k with Apache License 2.0 | 4 votes |
public ComposableFuture<QueryResult> sendPreparedStatement(final String query, final List<Object> values) { final Buffer<Object> scalaValues = JavaConversions.asScalaBuffer(values); return ScalaFutureHelper.from(() -> conn.sendPreparedStatement(query, scalaValues)); }
Example 7
Source File: MySqlConnectionPool.java From ob1k with Apache License 2.0 | 4 votes |
@Override public ComposableFuture<QueryResult> sendPreparedStatement(final String query, final List<Object> values) { final Buffer<Object> scalaValues = JavaConversions.asScalaBuffer(values); return withMetricsPreparedStatement(ScalaFutureHelper.from(() -> _pool.sendPreparedStatement(query, scalaValues))); }
Example 8
Source File: SparkUtil.java From spork with Apache License 2.0 | 4 votes |
public static <T> Seq<T> toScalaSeq(List<T> list) { return JavaConversions.asScalaBuffer(list); }
Example 9
Source File: ScalaUtil.java From kafka-spark-consumer with Apache License 2.0 | 4 votes |
public static <T> Seq<T> toScalaSeq(List<T> list) { return JavaConversions.asScalaBuffer(list); }
Example 10
Source File: BpmnaiUtils.java From bpmn.ai with BSD 3-Clause "New" or "Revised" License | 2 votes |
/** * implemented help method as per https://stackoverflow.com/questions/40741459/scala-collection-seq-doesnt-work-on-java * @param values List to be convert to Scala Seq * @param <T> Type of objects to be converted * @return the Scala Seq */ public <T> Seq<T> asSeq(List<T> values) { return JavaConversions.asScalaBuffer(values); }