Java Code Examples for com.google.common.collect.Table#get()
The following examples show how to use
com.google.common.collect.Table#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SRLParse.java From EasySRL with Apache License 2.0 | 6 votes |
private static void parseFile(final Table<String, Integer, TreebankParse> parses, final Iterator<String> propbank, final boolean isPropbank, final Table<String, Integer, SRLParse> result) { SRLParse srlparse; String file; int number; while (propbank.hasNext()) { final String line = propbank.next(); // wsj/00/wsj_0003.mrg 0 24 gold expose.01 p---p 24:1-rel // 25:2-ARG2-to 27:6-ARGM-TMP 20:1,21:1,22:1,23:1-ARG1 final String[] fields = line.split(" "); file = fields[0].substring(fields[0].lastIndexOf("/") + 1, fields[0].length() - 4); number = Integer.valueOf(fields[1]); final TreebankParse treebank = parses.get(file, number); srlparse = result.get(file, number); if (srlparse == null) { srlparse = new SRLParse(treebank.getWords()); result.put(file, number, srlparse); } srlparse.add(parseLine(line, treebank, isPropbank)); } }
Example 2
Source File: QuickStringDoubleTable.java From mynlp with Apache License 2.0 | 6 votes |
public QuickStringDoubleTable(Table<String, String, Double> table) { ArrayList<String> labelList = Lists.newArrayList(table.rowKeySet()); labelBase = findABase(labelList); labelSize = labelBase.length; data = new double[labelSize * labelSize]; for (String rowKey : table.rowKeySet()) { for (String colKey : table.columnKeySet()) { int rowid = labelBase[rowKey.hashCode() % labelSize]; int colid = labelBase[colKey.hashCode() % labelSize]; data[rowid * labelSize + colid] = table.get(rowKey, colKey); } } }
Example 3
Source File: QuickStringIntTable.java From mynlp with Apache License 2.0 | 6 votes |
public QuickStringIntTable(Table<String, String, Integer> table) { ArrayList<String> labelList = Lists.newArrayList(table.rowKeySet()); labelBase = findABase(labelList); labelSize = labelBase.length; data = new int[labelSize * labelSize]; for (String rowKey : table.rowKeySet()) { for (String colKey : table.columnKeySet()) { int rowid = labelBase[rowKey.hashCode() % labelSize]; int colid = labelBase[colKey.hashCode() % labelSize]; data[rowid * labelSize + colid] = table.get(rowKey, colKey); } } }
Example 4
Source File: NearestNeighborModelEvaluator.java From jpmml-evaluator with GNU Affero General Public License v3.0 | 6 votes |
private Function<Integer, String> createIdentifierResolver(FieldName name, Table<Integer, FieldName, FieldValue> table){ Function<Integer, String> function = new Function<Integer, String>(){ @Override public String apply(Integer row){ FieldValue value = table.get(row, name); if(FieldValueUtil.isMissing(value)){ throw new MissingValueException(name); } return value.asString(); } }; return function; }
Example 5
Source File: GuavaTableUnitTest.java From tutorials with MIT License | 6 votes |
@Test public void givenArrayTable_whenGet_returnsSuccessfully() { final List<String> universityRowTable = Lists.newArrayList("Mumbai", "Harvard"); final List<String> courseColumnTables = Lists.newArrayList("Chemical", "IT", "Electrical"); final Table<String, String, Integer> universityCourseSeatTable = ArrayTable.create(universityRowTable, courseColumnTables); universityCourseSeatTable.put("Mumbai", "Chemical", 120); universityCourseSeatTable.put("Mumbai", "IT", 60); universityCourseSeatTable.put("Harvard", "Electrical", 60); universityCourseSeatTable.put("Harvard", "IT", 120); final int seatCount = universityCourseSeatTable.get("Mumbai", "IT"); assertThat(seatCount).isEqualTo(60); }
Example 6
Source File: BrowserTest.java From kurento-java with Apache License 2.0 | 6 votes |
public void writeCSV(String outputFile, Table<Integer, Integer, String> resultTable) throws IOException { FileWriter writer = new FileWriter(outputFile); for (Integer row : resultTable.rowKeySet()) { boolean first = true; for (Integer column : resultTable.columnKeySet()) { if (!first) { writer.append(','); } String value = resultTable.get(row, column); if (value != null) { writer.append(value); } first = false; } writer.append('\n'); } writer.flush(); writer.close(); }
Example 7
Source File: TestingResultReporter.java From SPDS with Eclipse Public License 2.0 | 6 votes |
public void onSeedFinished(Node<Statement, Val> seed, final ForwardBoomerangResults<W> res) { Table<Statement, Val, W> results = res.asStatementValWeightTable(); for (final Entry<Unit, Assertion> e : stmtToResults.entries()) { if (e.getValue() instanceof ComparableResult) { final ComparableResult<W, Val> expectedResults = (ComparableResult) e.getValue(); W w2 = results.get(new Statement((Stmt) e.getKey(), null), expectedResults.getVal()); if (w2 != null) { expectedResults.computedResults(w2); } } // check if any of the methods that should not be analyzed have been analyzed if (e.getValue() instanceof ShouldNotBeAnalyzed) { final ShouldNotBeAnalyzed shouldNotBeAnalyzed = (ShouldNotBeAnalyzed) e.getValue(); Unit analyzedUnit = e.getKey(); if (analyzedUnit.equals(shouldNotBeAnalyzed.unit)) { shouldNotBeAnalyzed.hasBeenAnalyzed(); } } } }
Example 8
Source File: SentryIniPolicyFileFormatter.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * parse the ini file and return a map with all data * * @param resourcePath * The path of the input file * @param conf * The configuration info * @return the result of sentry mapping data in map structure. */ @Override public Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf) throws Exception { Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap(); // SimpleFileProviderBackend is used for parse the ini file SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath); ProviderBackendContext context = new ProviderBackendContext(); context.setAllowPerDatabase(true); // parse the ini file policyFileBackend.initialize(context); // SimpleFileProviderBackend parsed the input file and output the data in Table format. Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend .getGroupRolePrivilegeTable(); Map<String, Set<String>> groupRolesMap = Maps.newHashMap(); Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap(); for (String groupName : groupRolePrivilegeTable.rowKeySet()) { for (String roleName : groupRolePrivilegeTable.columnKeySet()) { // get the roles set for the current groupName Set<String> tempRoles = groupRolesMap.get(groupName); if (tempRoles == null) { tempRoles = Sets.newHashSet(); } Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName); // if there has privilege for [group,role], if no privilege exist, the [group, role] info // will be discard. if (privileges != null) { // update [group, role] mapping data tempRoles.add(roleName); groupRolesMap.put(groupName, tempRoles); // update [role, privilege] mapping data rolePrivilegesMap.put(roleName, privileges); } } } resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap); resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap); return resultMap; }
Example 9
Source File: Util.java From EasySRL with Apache License 2.0 | 5 votes |
public static <R, C, V> void add(final Table<R, C, Multiset<V>> table, final R row, final C column, final V value) { Multiset<V> multiset = table.get(row, column); if (multiset == null) { multiset = HashMultiset.create(); table.put(row, column, multiset); } multiset.add(value); }
Example 10
Source File: TableUtils.java From argument-reasoning-comprehension-task with Apache License 2.0 | 5 votes |
/** * Converts Guava table to a CSV table * * @param table table * @param csvFormat CSV format * @param missingValuePlaceholder print if a value is missing (empty string by default) * @param <T> object type (string) * @return table * @throws IOException exception */ public static <T> String tableToCsv(Table<String, String, T> table, CSVFormat csvFormat, String missingValuePlaceholder) throws IOException { StringWriter sw = new StringWriter(); CSVPrinter printer = new CSVPrinter(sw, csvFormat); List<String> firstRow = new ArrayList<>(); firstRow.add(" "); firstRow.addAll(table.columnKeySet()); printer.printRecord(firstRow); for (String rowKey : table.rowKeySet()) { printer.print(rowKey); for (String columnKey : table.columnKeySet()) { T value = table.get(rowKey, columnKey); if (value == null) { printer.print(missingValuePlaceholder); } else { printer.print(value); } } printer.println(); } printer.close(); return sw.toString(); }
Example 11
Source File: GuavaTableUnitTest.java From tutorials with MIT License | 5 votes |
@Test public void givenTable_whenGet_returnsSuccessfully() { final Table<String, String, Integer> universityCourseSeatTable = HashBasedTable.create(); universityCourseSeatTable.put("Mumbai", "Chemical", 120); universityCourseSeatTable.put("Mumbai", "IT", 60); universityCourseSeatTable.put("Harvard", "Electrical", 60); universityCourseSeatTable.put("Harvard", "IT", 120); final int seatCount = universityCourseSeatTable.get("Mumbai", "IT"); final Integer seatCountForNoEntry = universityCourseSeatTable.get("Oxford", "IT"); assertThat(seatCount).isEqualTo(60); assertThat(seatCountForNoEntry).isEqualTo(null); }
Example 12
Source File: CROSSSimSimilarityCalculatorTest.java From scava with Eclipse Public License 2.0 | 5 votes |
@Test public void crossSimCommutativeTest() { Map<String, String> parameters = Maps.newHashMap(); parameters.put("committers","false"); parameters.put("deps","true"); parameters.put("stargazers","true"); parameters.put("freqDeps", "129"); Table<String, String, Double> table = crossSim.calculateAggregatedSimilarityValues(artifacts, parameters); double val1 = table.get(artifacts.get(0).getFullName(), artifacts.get(1).getFullName()); double val2 = table.get(artifacts.get(1).getFullName(), artifacts.get(0).getFullName()); assertEquals(val1,val2, 0.0); }
Example 13
Source File: CROSSSimSimilarityCalculatorTest.java From scava with Eclipse Public License 2.0 | 5 votes |
@Test public void crossSimIdentityTest() { Map<String, String> parameters = new HashMap<>(); parameters.put("committers","false"); parameters.put("deps","true"); parameters.put("stargazers","true"); parameters.put("freqDeps", "129"); Table<String, String, Double> table = crossSim.calculateAggregatedSimilarityValues(artifacts, parameters); double val1 = table.get(artifacts.get(0).getFullName(), artifacts.get(0).getFullName()); assertEquals(val1, 1, 0.0); }
Example 14
Source File: DataStore.java From synthea with Apache License 2.0 | 5 votes |
private static int pickUtilization(Table<Integer, String, AtomicInteger> u, int year, String category) { AtomicInteger value = u.get(year, category); if (value == null) { return 0; } else { return value.get(); } }
Example 15
Source File: CROSSRecSimilarityCalculatorTest.java From scava with Eclipse Public License 2.0 | 5 votes |
@Test public void crossSimCommutativeTest() { Map<String, String> parameters = new HashMap<>(); parameters.put("committers","false"); parameters.put("deps","true"); parameters.put("stargazers","true"); parameters.put("freqDeps", "129"); Table<String, String, Double> table = crossRec.calculateAggregatedSimilarityValues(artifacts, parameters); double val1 = table.get(artifacts.get(0).getFullName(), artifacts.get(1).getFullName()); double val2 = table.get(artifacts.get(1).getFullName(), artifacts.get(0).getFullName()); assertEquals(val1,val2, 0.0001); }
Example 16
Source File: DemographicsTest.java From synthea with Apache License 2.0 | 5 votes |
/** * Set up the demographics to use for testing. */ @BeforeClass @SuppressWarnings("rawtypes") public static void setUp() throws IOException { demographicsFile = Config.get("generate.demographics.default_file"); Config.set("generate.demographics.default_file", "geography/test_demographics.csv"); Table pa = Demographics.load("Pennsylvania"); philly = (Demographics) pa.get("Pennsylvania", "27237"); random = new Random(); }
Example 17
Source File: ServiceModel.java From vespa with Apache License 2.0 | 5 votes |
private static List<Service> getAndSetEntry(Table<String, String, List<Service>> services, String clusterName, String clusterType) { List<Service> serviceList = services.get(clusterName, clusterType); if (serviceList == null) { serviceList = new ArrayList<>(); services.put(clusterName, clusterType, serviceList); } return serviceList; }
Example 18
Source File: MarkRemovableRulesetNodes.java From closure-stylesheets with Apache License 2.0 | 4 votes |
/** * Processes the given ruleset, deciding whether it should be kept * or removed by looking at the given previous rules. */ private void processRuleset( Table<String, String, CssRulesetNode> rules, CssRulesetNode ruleset) { if ((referencedRules != null) && !referencedRules.isEmpty()) { // If this rule is not referenced to in the code we remove it. if (isSelectorUnreferenced(ruleset.getSelectors().getChildAt(0))) { // TODO(henrywong, dgajda): Storing the set of things to clean up // in the tree is pretty brittle - better would be to have the pass // return the rules it finds and then manually pass them in to the // EliminateUselessRulesets pass. tree.getRulesetNodesToRemove().addRulesetNode(ruleset); return; } } // Make sure the node has only one declaration. Preconditions.checkArgument(ruleset.getDeclarations().numChildren() == 1); Preconditions.checkArgument( ruleset.getDeclarations().getChildAt(0) instanceof CssDeclarationNode); CssDeclarationNode declaration = (CssDeclarationNode) ruleset.getDeclarations().getChildAt(0); CssPropertyNode propertyNode = declaration.getPropertyName(); String propertyName = propertyNode.getPropertyName(); if (PROPERTIES_NOT_TO_BE_CHECKED.contains(propertyName)) { return; } // If the declaration is star-hacked then we make the star be part of // the property name to ensure that we do not consider hacked // declarations as overridden by the non-hacked ones. if (declaration.hasStarHack()) { propertyName = "*" + propertyName; } String selector = PassUtil.printSelector( ruleset.getSelectors().getChildAt(0)); CssRulesetNode previousRuleset = rules.get(selector, propertyName); if (previousRuleset != null) { // If the new rule is important and the saved was not, then remove the saved one. if (isImportantRule(ruleset) && !isImportantRule(previousRuleset)) { tree.getRulesetNodesToRemove().addRulesetNode(previousRuleset); // Replace the non-important ruleset in the map, keep the important one. rules.put(selector, propertyName, ruleset); } else { tree.getRulesetNodesToRemove().addRulesetNode(ruleset); } } else if (hasOverridingShorthand(propertyNode, selector, rules, ruleset)) { tree.getRulesetNodesToRemove().addRulesetNode(ruleset); } else if (PassUtil.hasAlternateAnnotation(declaration)) { // The declaration has @alternate, so do not let it mask other // declarations that precede it. However, @alternate rules may be masked // by succeeding non-@alternate rules. } else { rules.put(selector, propertyName, ruleset); } }
Example 19
Source File: TopicMFMTModel.java From jstarcraft-rns with Apache License 2.0 | 4 votes |
@Override public void prepare(Configurator configuration, DataModule model, DataSpace space) { super.prepare(configuration, model, space); commentField = configuration.getString("data.model.fields.comment"); commentDimension = model.getQualityInner(commentField); MemoryQualityAttribute attribute = (MemoryQualityAttribute) space.getQualityAttribute(commentField); Object[] documentValues = attribute.getDatas(); // init hyper-parameters lambda = configuration.getFloat("recommender.regularization.lambda", 0.001F); lambdaU = configuration.getFloat("recommender.regularization.lambdaU", 0.001F); lambdaV = configuration.getFloat("recommender.regularization.lambdaV", 0.001F); lambdaB = configuration.getFloat("recommender.regularization.lambdaB", 0.001F); numberOfTopics = configuration.getInteger("recommender.topic.number", 10); learnRatio = configuration.getFloat("recommender.iterator.learnrate", 0.01F); epocheSize = configuration.getInteger("recommender.iterator.maximum", 10); numberOfDocuments = scoreMatrix.getElementSize(); // count the number of words, build the word dictionary and // userItemToDoc dictionary Map<String, Integer> wordDictionaries = new HashMap<>(); Table<Integer, Integer, Float> documentTable = HashBasedTable.create(); int rowCount = 0; userItemToDocument = HashBasedTable.create(); for (DataInstance sample : model) { int userIndex = sample.getQualityFeature(userDimension); int itemIndex = sample.getQualityFeature(itemDimension); int documentIndex = sample.getQualityFeature(commentDimension); userItemToDocument.put(userIndex, itemIndex, rowCount); // convert wordIds to wordIndices String data = (String) documentValues[documentIndex]; String[] words = data.isEmpty() ? new String[0] : data.split(":"); for (String word : words) { Integer wordIndex = wordDictionaries.get(word); if (wordIndex == null) { wordIndex = numberOfWords++; wordDictionaries.put(word, wordIndex); } Float oldValue = documentTable.get(rowCount, wordIndex); if (oldValue == null) { oldValue = 0F; } float newValue = oldValue + 1F / words.length; documentTable.put(rowCount, wordIndex, newValue); } rowCount++; } // build W W = SparseMatrix.valueOf(numberOfDocuments, numberOfWords, documentTable); userBiases = DenseVector.valueOf(userSize); userBiases.iterateElement(MathCalculator.SERIAL, (scalar) -> { scalar.setValue(distribution.sample().floatValue()); }); itemBiases = DenseVector.valueOf(itemSize); itemBiases.iterateElement(MathCalculator.SERIAL, (scalar) -> { scalar.setValue(distribution.sample().floatValue()); }); userFactors = DenseMatrix.valueOf(userSize, numberOfTopics); userFactors.iterateElement(MathCalculator.SERIAL, (scalar) -> { scalar.setValue(distribution.sample().floatValue()); }); itemFactors = DenseMatrix.valueOf(itemSize, numberOfTopics); itemFactors.iterateElement(MathCalculator.SERIAL, (scalar) -> { scalar.setValue(distribution.sample().floatValue()); }); K = initStd; topicVector = DenseVector.valueOf(numberOfTopics); function = new SoftMaxActivationFunction(); // init theta and phi // TODO theta实际是documentFactors documentFactors = DenseMatrix.valueOf(numberOfDocuments, numberOfTopics); calculateTheta(); // TODO phi实际是wordFactors wordFactors = DenseMatrix.valueOf(numberOfTopics, numberOfWords); wordFactors.iterateElement(MathCalculator.SERIAL, (scalar) -> { scalar.setValue(RandomUtility.randomFloat(0.01F)); }); logger.info("number of users : " + userSize); logger.info("number of Items : " + itemSize); logger.info("number of words : " + wordDictionaries.size()); }
Example 20
Source File: DefaultConsumerIndexManager.java From joyqueue with Apache License 2.0 | 4 votes |
protected Map<BrokerNode, Table<String, Short, List<CommitAckData>>> buildCommitAckParams(Map<String, List<ConsumeReply>> ackMap, String app) { Map<BrokerNode, Table<String, Short, List<CommitAckData>>> result = Maps.newHashMap(); for (Map.Entry<String, List<ConsumeReply>> entry : ackMap.entrySet()) { String topic = entry.getKey(); TopicMetadata topicMetadata = clusterManager.fetchTopicMetadata(topic, app); if (topicMetadata == null) { logger.warn("topic {} metadata is null", topic); continue; } for (ConsumeReply consumeReply : entry.getValue()) { PartitionMetadata partitionMetadata = topicMetadata.getPartition(consumeReply.getPartition()); if (partitionMetadata == null) { partitionMetadata = topicMetadata.getPartitions().get(0); } BrokerNode leader = partitionMetadata.getLeader(); if (leader == null) { logger.warn("topic {}, partition {}, leader is null", topic, consumeReply.getPartition()); continue; } Table<String, Short, List<CommitAckData>> topicConsumeAckTable = result.get(leader); if (topicConsumeAckTable == null) { topicConsumeAckTable = HashBasedTable.create(); result.put(leader, topicConsumeAckTable); } List<CommitAckData> commitAckList = topicConsumeAckTable.get(topic, consumeReply.getPartition()); if (commitAckList == null) { commitAckList = Lists.newLinkedList(); topicConsumeAckTable.put(topic, consumeReply.getPartition(), commitAckList); } commitAckList.add(new CommitAckData(consumeReply.getPartition(), consumeReply.getIndex(), consumeReply.getRetryType())); } } return result; }