Java Code Examples for scala.collection.Iterator#next()
The following examples show how to use
scala.collection.Iterator#next() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: KafkaAuthBinding.java From incubator-sentry with Apache License 2.0 | 6 votes |
public boolean removeAcls(scala.collection.immutable.Set<Acl> acls, final Resource resource) { verifyAcls(acls); LOG.info("Removing Acl: acl->" + acls + " resource->" + resource); final Iterator<Acl> iterator = acls.iterator(); while (iterator.hasNext()) { final Acl acl = iterator.next(); final String role = getRole(acl); try { execute(new Command<Void>() { @Override public Void run(SentryGenericServiceClient client) throws Exception { client.dropPrivilege( requestorName, role, toTSentryPrivilege(acl, resource)); return null; } }); } catch (KafkaException kex) { LOG.error("Failed to remove acls.", kex); return false; } } return true; }
Example 2
Source File: OpenKoreanTextPhraseExtractor.java From elasticsearch-analysis-openkoreantext with Apache License 2.0 | 6 votes |
private Seq<KoreanToken> convertPhrasesToTokens(Seq<KoreanPhrase> phrases) { KoreanToken[] tokens = new KoreanToken[phrases.length()]; Iterator<KoreanPhrase> iterator = phrases.iterator(); int i = 0; while (iterator.hasNext()) { KoreanPhrase phrase = iterator.next(); tokens[i++] = new KoreanToken(phrase.text(), phrase.pos(), phrase.offset(), phrase.length(), scala.Option.apply(null), false); } Arrays.sort(tokens, (o1, o2) -> { if(o1.offset()== o2.offset()) return 0; return o1.offset()< o2.offset()? -1 : 1; }); return JavaConverters.asScalaBuffer(Arrays.asList(tokens)).toSeq(); }
Example 3
Source File: ServiceKafkaClient.java From ranger with Apache License 2.0 | 6 votes |
private List<String> getTopicList(List<String> ignoreTopicList) throws Exception { List<String> ret = new ArrayList<String>(); int sessionTimeout = 5000; int connectionTimeout = 10000; ZooKeeperClient zookeeperClient = new ZooKeeperClient(zookeeperConnect, sessionTimeout, connectionTimeout, 1, Time.SYSTEM, "kafka.server", "SessionExpireListener", Option.empty()); try (KafkaZkClient kafkaZkClient = new KafkaZkClient(zookeeperClient, true, Time.SYSTEM)) { Iterator<String> iter = kafkaZkClient.getAllTopicsInCluster().iterator(); while (iter.hasNext()) { String topic = iter.next(); if (ignoreTopicList == null || !ignoreTopicList.contains(topic)) { ret.add(topic); } } } return ret; }
Example 4
Source File: KafkaAuthBinding.java From incubator-sentry with Apache License 2.0 | 6 votes |
public void addAcls(scala.collection.immutable.Set<Acl> acls, final Resource resource) { verifyAcls(acls); LOG.info("Adding Acl: acl->" + acls + " resource->" + resource); final Iterator<Acl> iterator = acls.iterator(); while (iterator.hasNext()) { final Acl acl = iterator.next(); final String role = getRole(acl); if (!roleExists(role)) { throw new KafkaException("Can not add Acl for non-existent Role: " + role); } execute(new Command<Void>() { @Override public Void run(SentryGenericServiceClient client) throws Exception { client.grantPrivilege( requestorName, role, COMPONENT_NAME, toTSentryPrivilege(acl, resource)); return null; } }); } }
Example 5
Source File: ParserQueryFunctionsTest.java From odata with Apache License 2.0 | 6 votes |
private void testQueryFunction(String operator) throws ODataException { EqExpr expr = getExprFromOperator(operator); MethodCallExpr call = (MethodCallExpr) expr.left(); assertThat(call.methodName(), is(operator)); List<Expression> args = call.args(); Iterator iter = args.iterator(); while (iter.hasNext()) { Object obj = iter.next(); if (obj instanceof EntityPathExpr) { EntityPathExpr entityPathExpr = (EntityPathExpr) obj; PropertyPathExpr propertyPath = (PropertyPathExpr) entityPathExpr.subPath().get(); assertThat(propertyPath.propertyName(), is("name")); } } LiteralExpr literal = (LiteralExpr) expr.right(); NumberLiteral number = (NumberLiteral) literal.value(); assertThat(number.value(), is(new BigDecimal(new java.math.BigDecimal(19)))); }
Example 6
Source File: ParserLogicalTest.java From odata with Apache License 2.0 | 6 votes |
private void processQueryFunction(FilterOption option, String boolMethod) { BooleanMethodCallExpr methodCall = (BooleanMethodCallExpr) option.expression(); assertThat(methodCall.methodName(), is(boolMethod)); List<Expression> args = methodCall.args(); Iterator iterator = args.iterator(); while (iterator.hasNext()) { Object cursor = iterator.next(); if (cursor instanceof EntityPathExpr) { EntityPathExpr pathExpr = (EntityPathExpr) cursor; PropertyPathExpr path = (PropertyPathExpr) pathExpr.subPath().get(); assertThat(path.propertyName(), is("name")); } else if (cursor instanceof LiteralExpr) { LiteralExpr literalExpr = (LiteralExpr) cursor; StringLiteral stringLiteral = (StringLiteral) literalExpr.value(); assertThat(stringLiteral.value(), is("John")); } } }
Example 7
Source File: KafkaAuthBinding.java From incubator-sentry with Apache License 2.0 | 5 votes |
private java.util.Map<Resource, scala.collection.immutable.Set<Acl>> rolePrivilegesToResourceAcls(java.util.Map<String, scala.collection.immutable.Set<TSentryPrivilege>> rolePrivilegesMap) { final java.util.Map<Resource, scala.collection.immutable.Set<Acl>> resourceAclsMap = new HashMap<>(); for (String role : rolePrivilegesMap.keySet()) { scala.collection.immutable.Set<TSentryPrivilege> privileges = rolePrivilegesMap.get(role); final Iterator<TSentryPrivilege> iterator = privileges.iterator(); while (iterator.hasNext()) { TSentryPrivilege privilege = iterator.next(); final List<TAuthorizable> authorizables = privilege.getAuthorizables(); String host = null; String operation = privilege.getAction(); for (TAuthorizable tAuthorizable : authorizables) { if (tAuthorizable.getType().equals(KafkaAuthorizable.AuthorizableType.HOST.name())) { host = tAuthorizable.getName(); } else { Resource resource = new Resource(ResourceType$.MODULE$.fromString(tAuthorizable.getType()), tAuthorizable.getName()); if (operation.equals("*")) { operation = "All"; } Acl acl = new Acl(new KafkaPrincipal("role", role), Allow$.MODULE$, host, Operation$.MODULE$.fromString(operation)); Set<Acl> newAclsJava = new HashSet<Acl>(); newAclsJava.add(acl); addExistingAclsForResource(resourceAclsMap, resource, newAclsJava); final scala.collection.mutable.Set<Acl> aclScala = JavaConversions.asScalaSet(newAclsJava); resourceAclsMap.put(resource, aclScala.<Acl>toSet()); } } } } return resourceAclsMap; }
Example 8
Source File: SparkSessionBuilderImpl.java From beakerx with Apache License 2.0 | 5 votes |
public SparkConf getSparkConf() { SparkConf sparkConf = new SparkConf(); Iterator iterator = getConfigIterator(); while (iterator.hasNext()) { Tuple2 x = (Tuple2) iterator.next(); sparkConf.set((String) (x)._1, (String) (x)._2); } return sparkConf; }
Example 9
Source File: SparkSqlInterpreter.java From Explorer with Apache License 2.0 | 5 votes |
public int getProgress() { SQLContext sqlc = getSparkInterpreter().getSQLContext(); SparkContext sc = sqlc.sparkContext(); JobProgressListener sparkListener = getSparkInterpreter().getJobProgressListener(); int completedTasks = 0; int totalTasks = 0; DAGScheduler scheduler = sc.dagScheduler(); HashSet<ActiveJob> jobs = scheduler.activeJobs(); Iterator<ActiveJob> it = jobs.iterator(); while (it.hasNext()) { ActiveJob job = it.next(); String g = (String) job.properties().get("spark.jobGroup.id"); if (jobGroup.equals(g)) { int[] progressInfo = null; if (sc.version().startsWith("1.0")) { progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage()); } else if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) { progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage()); } else { logger.warn("Spark {} getting progress information not supported" + sc.version()); continue; } totalTasks += progressInfo[0]; completedTasks += progressInfo[1]; } } if (totalTasks == 0) { return 0; } return completedTasks * 100 / totalTasks; }
Example 10
Source File: SparkInterpreter.java From Explorer with Apache License 2.0 | 5 votes |
public int getProgress() { int completedTasks = 0; int totalTasks = 0; DAGScheduler scheduler = context.getConnector().dagScheduler(); if (scheduler == null) { return 0; } HashSet<ActiveJob> jobs = scheduler.activeJobs(); if (jobs == null || jobs.size() == 0) { return 0; } Iterator<ActiveJob> it = jobs.iterator(); while (it.hasNext()) { ActiveJob job = it.next(); String g = (String) job.properties().get("spark.jobGroup.id"); if (jobGroup.equals(g)) { int[] progressInfo = null; if (context.getConnector().version().startsWith("1.0")) { progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage()); } else { progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage()); } totalTasks += progressInfo[0]; completedTasks += progressInfo[1]; } } if (totalTasks == 0) { return 0; } return completedTasks * 100 / totalTasks; }
Example 11
Source File: ExpressionParserTest.java From odata with Apache License 2.0 | 5 votes |
@Test public void testEntitySetRootElement() { ODataUriParser parser = new ODataUriParser(model); ODataUri target = parser.parseUri(SERVICE_ROOT + "Customers?$filter=Phone eq $root/Customers('A1245')/Phone"); assertThat(target.serviceRoot() + "/", is(SERVICE_ROOT)); ResourcePathUri resourcePathUri = (ResourcePathUri) target.relativeUri(); List<QueryOption> options = resourcePathUri.options(); assertThat(options.size(), is(1)); Iterator<QueryOption> iter = options.iterator(); while (iter.hasNext()) { Object obj = iter.next(); if (obj instanceof FilterOption) { FilterOption option = (FilterOption) obj; EqExpr expr = (EqExpr) option.expression(); EntityPathExpr pathExpr = (EntityPathExpr) expr.left(); Option<PathExpr> subPath = pathExpr.subPath(); PropertyPathExpr propertyPath = (PropertyPathExpr) subPath.get(); assertThat(propertyPath.propertyName(), is(notNullValue())); assertThat(propertyPath.propertyName(), is("Phone")); EntitySetRootExpr rootExpr = (EntitySetRootExpr) expr.right(); assertThat(rootExpr.entitySetName(), is("Customers")); SimpleKeyPredicate predicate = (SimpleKeyPredicate) rootExpr.keyPredicate(); StringLiteral value = (StringLiteral) predicate.value(); assertThat(value.value(), is("A1245")); } } }
Example 12
Source File: GlobalWatermarkHolder.java From beam with Apache License 2.0 | 5 votes |
private static Map<Integer, SparkWatermarks> fetchSparkWatermarks(BlockManager blockManager) { final Option<BlockResult> blockResultOption = blockManager.get(WATERMARKS_BLOCK_ID, WATERMARKS_TAG); if (blockResultOption.isDefined()) { Iterator<Object> data = blockResultOption.get().data(); Map<Integer, SparkWatermarks> next = (Map<Integer, SparkWatermarks>) data.next(); // Spark 2 only triggers completion at the end of the iterator. while (data.hasNext()) { // NO-OP } return next; } else { return null; } }
Example 13
Source File: CMMModel.java From vn.vitk with GNU General Public License v3.0 | 5 votes |
@Override public CMMModel load(String path) { org.apache.spark.ml.util.DefaultParamsReader.Metadata metadata = DefaultParamsReader.loadMetadata(path, sc(), CMMModel.class.getName()); String pipelinePath = new Path(path, "pipelineModel").toString(); PipelineModel pipelineModel = PipelineModel.load(pipelinePath); String dataPath = new Path(path, "data").toString(); DataFrame df = sqlContext().read().format("parquet").load(dataPath); Row row = df.select("markovOrder", "weights", "tagDictionary").head(); // load the Markov order MarkovOrder order = MarkovOrder.values()[row.getInt(0)-1]; // load the weight vector Vector w = row.getAs(1); // load the tag dictionary @SuppressWarnings("unchecked") scala.collection.immutable.HashMap<String, WrappedArray<Integer>> td = (scala.collection.immutable.HashMap<String, WrappedArray<Integer>>)row.get(2); Map<String, Set<Integer>> tagDict = new HashMap<String, Set<Integer>>(); Iterator<Tuple2<String, WrappedArray<Integer>>> iterator = td.iterator(); while (iterator.hasNext()) { Tuple2<String, WrappedArray<Integer>> tuple = iterator.next(); Set<Integer> labels = new HashSet<Integer>(); scala.collection.immutable.List<Integer> list = tuple._2().toList(); for (int i = 0; i < list.size(); i++) labels.add(list.apply(i)); tagDict.put(tuple._1(), labels); } // build a CMM model CMMModel model = new CMMModel(pipelineModel, w, order, tagDict); DefaultParamsReader.getAndSetParams(model, metadata); return model; }
Example 14
Source File: ZkConsumerCommand.java From jeesuite-libs with Apache License 2.0 | 5 votes |
public List<BrokerInfo> fetchAllBrokers(){ List<BrokerInfo> result = new ArrayList<>(); Seq<Broker> brokers = zkUtils.getAllBrokersInCluster(); Iterator<Broker> iterator = brokers.toList().iterator(); while(iterator.hasNext()){ Broker broker = iterator.next(); Node node = broker.getNode(ListenerName.forSecurityProtocol(SecurityProtocol.PLAINTEXT)).get(); result.add(new BrokerInfo(node.idString(), node.host(), node.port())); } return result; }
Example 15
Source File: OpenKoreanTextStemmer.java From elasticsearch-analysis-openkoreantext with Apache License 2.0 | 5 votes |
@Override protected Seq<KoreanToken> perform(Seq<KoreanToken> tokens) { KoreanToken[] performed = new KoreanToken[tokens.length()]; int i = 0; Iterator<KoreanToken> tokenIterator = tokens.iterator(); while (tokenIterator.hasNext()) { KoreanToken token = tokenIterator.next(); performed[i++] = token.stem().nonEmpty() ? stem(token) : token; } return JavaConverters.asScalaBuffer(Arrays.asList(performed)).toSeq(); }
Example 16
Source File: KafkaAuthBinding.java From incubator-sentry with Apache License 2.0 | 5 votes |
private void addExistingAclsForResource(java.util.Map<Resource, scala.collection.immutable.Set<Acl>> resourceAclsMap, Resource resource, java.util.Set<Acl> newAclsJava) { final scala.collection.immutable.Set<Acl> existingAcls = resourceAclsMap.get(resource); if (existingAcls != null) { final Iterator<Acl> aclsIter = existingAcls.iterator(); while (aclsIter.hasNext()) { Acl curAcl = aclsIter.next(); newAclsJava.add(curAcl); } } }
Example 17
Source File: KafkaAdminClient.java From common-kafka with Apache License 2.0 | 5 votes |
/** * Manually converting to java map to avoid binary compatibility issues between scala versions when using JavaConverters */ static <K, V> Map<K, V> convertToJavaMap(Iterator<Tuple2<K, V>> mapIterator) { Map<K, V> map = new HashMap<>(); while(mapIterator.hasNext()) { Tuple2<K, V> entry = mapIterator.next(); map.put(entry.copy$default$1(), entry.copy$default$2()); } return Collections.unmodifiableMap(map); }
Example 18
Source File: DataConverter.java From AsyncDao with MIT License | 5 votes |
public static Map<String, Object> rowDataToMap(RowData rowData, ModelMap resultMap, List<String> columnNames) { Map<String, Object> res = new HashMap<>(); Iterator<Object> iterable = rowData.iterator(); int index = 0; while (iterable.hasNext()) { Object item = iterable.next(); String property = getProperty(resultMap, columnNames.get(index)); res.put(property, item); index++; } return res; }
Example 19
Source File: CsvSourceTest.java From kylin-on-parquet-v2 with Apache License 2.0 | 5 votes |
@Test public void testGetSourceDataFromLookupTable() { CubeManager cubeMgr = CubeManager.getInstance(getTestConfig()); CubeInstance cube = cubeMgr.getCube(CUBE_NAME); Iterator<TableDesc> iterator = MetadataConverter.extractLookupTable(cube).iterator(); while (iterator.hasNext()) { TableDesc lookup = iterator.next(); NSparkCubingEngine.NSparkCubingSource cubingSource = new CsvSource().adaptToBuildEngine(NSparkCubingEngine.NSparkCubingSource.class); Dataset<Row> sourceData = cubingSource.getSourceData(lookup, ss, Maps.newHashMap()); List<Row> rows = sourceData.collectAsList(); Assert.assertTrue(rows != null && rows.size() > 0); } }
Example 20
Source File: KafkaConfigModelGenerator.java From strimzi-kafka-operator with Apache License 2.0 | 4 votes |
private static Map<String, ConfigModel> configs() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { ConfigDef def = brokerConfigs(); Map<String, String> dynamicUpdates = brokerDynamicUpdates(); Method getConfigValueMethod = def.getClass().getDeclaredMethod("getConfigValue", ConfigDef.ConfigKey.class, String.class); getConfigValueMethod.setAccessible(true); Method sortedConfigs = ConfigDef.class.getDeclaredMethod("sortedConfigs"); sortedConfigs.setAccessible(true); List<ConfigDef.ConfigKey> keys = (List) sortedConfigs.invoke(def); Map<String, ConfigModel> result = new TreeMap<>(); for (ConfigDef.ConfigKey key : keys) { String configName = String.valueOf(getConfigValueMethod.invoke(def, key, "Name")); Type type = parseType(String.valueOf(getConfigValueMethod.invoke(def, key, "Type"))); Scope scope = parseScope(dynamicUpdates.getOrDefault(key.name, "read-only")); ConfigModel descriptor = new ConfigModel(); descriptor.setType(type); descriptor.setScope(scope); if (key.validator instanceof ConfigDef.Range) { descriptor = range(key, descriptor); } else if (key.validator instanceof ConfigDef.ValidString) { descriptor.setValues(enumer(key.validator)); } else if (key.validator instanceof ConfigDef.ValidList) { descriptor.setItems(validList(key)); } else if (key.validator instanceof ApiVersionValidator$) { Iterator<ApiVersion> iterator = ApiVersion$.MODULE$.allVersions().iterator(); LinkedHashSet<String> versions = new LinkedHashSet<>(); while (iterator.hasNext()) { ApiVersion next = iterator.next(); ApiVersion$.MODULE$.apply(next.shortVersion()); versions.add(Pattern.quote(next.shortVersion()) + "(\\.[0-9]+)*"); ApiVersion$.MODULE$.apply(next.version()); versions.add(Pattern.quote(next.version())); } descriptor.setPattern(String.join("|", versions)); } else if (key.validator != null) { throw new IllegalStateException(key.validator.getClass().toString()); } result.put(configName, descriptor); } return result; }