Java Code Examples for com.google.common.base.Optional#equals()
The following examples show how to use
com.google.common.base.Optional#equals() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ProbabilityUtils.java From api-mining with GNU General Public License v3.0 | 6 votes |
/** * Compare if two conditional probabilities are equal in the * maximum-likelihood view. * * @param cpd1 * @param cpd2 * @return */ public static <A, B> boolean conditionalProbabiltiesEquivalentInML( final IDiscreteConditionalProbability<A, B> cpd1, final IDiscreteConditionalProbability<A, B> cpd2) { final Set<B> support1 = cpd1.getPossibleContexts(); final Set<B> support2 = cpd2.getPossibleContexts(); if (!support1.equals(support2)) { return false; } for (final B context : support1) { final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context); final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context); if (!ml1.equals(ml2)) { return false; } } return true; }
Example 2
Source File: ProbabilityUtils.java From tassal with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Compare if two conditional probabilities are equal in the * maximum-likelihood view. * * @param cpd1 * @param cpd2 * @return */ public static <A, B> boolean conditionalProbabiltiesEquivalentInML( final IDiscreteConditionalProbability<A, B> cpd1, final IDiscreteConditionalProbability<A, B> cpd2) { final Set<B> support1 = cpd1.getPossibleContexts(); final Set<B> support2 = cpd2.getPossibleContexts(); if (!support1.equals(support2)) { return false; } for (final B context : support1) { final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context); final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context); if (!ml1.equals(ml2)) { return false; } } return true; }
Example 3
Source File: PublicXmlResourceValue.java From bazel with Apache License 2.0 | 6 votes |
@Override public XmlResourceValue combineWith(XmlResourceValue value) { if (!(value instanceof PublicXmlResourceValue)) { throw new IllegalArgumentException(value + "is not combinable with " + this); } PublicXmlResourceValue other = (PublicXmlResourceValue) value; Map<ResourceType, Optional<Integer>> combined = new EnumMap<>(ResourceType.class); combined.putAll(typeToId); for (Map.Entry<ResourceType, Optional<Integer>> entry : other.typeToId.entrySet()) { Optional<Integer> existing = combined.get(entry.getKey()); if (existing != null && !existing.equals(entry.getValue())) { throw new IllegalArgumentException( String.format( "Public resource of type %s assigned two different id values 0x%x and 0x%x", entry.getKey(), existing.orNull(), entry.getValue().orNull())); } combined.put(entry.getKey(), entry.getValue()); } return of(combined); }
Example 4
Source File: ProbabilityUtils.java From api-mining with GNU General Public License v3.0 | 5 votes |
/** * Print the differences among two CPDs. * * @param cpd1 * @param cpd2 */ public static <A, B> void printClusterDifferences( final IDiscreteConditionalProbability<A, B> cpd1, final IDiscreteConditionalProbability<A, B> cpd2) { final Set<B> support1 = cpd1.getPossibleContexts(); for (final B context : support1) { final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context); final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context); if (!ml1.equals(ml2)) { System.out.println("Context " + context + ": " + ml1.orNull() + " vs " + ml2.orNull()); } } }
Example 5
Source File: BindingGraph.java From dagger2-sample with Apache License 2.0 | 5 votes |
private Optional<RequestResolver> getOwningResolver(ProvisionBinding provisionBinding) { Optional<Equivalence.Wrapper<AnnotationMirror>> bindingScope = provisionBinding.wrappedScope(); for (RequestResolver requestResolver : getResolverLineage()) { if (bindingScope.equals(requestResolver.targetScope) || requestResolver.explicitProvisionBindings.containsValue(provisionBinding)) { return Optional.of(requestResolver); } } return Optional.absent(); }
Example 6
Source File: ProbabilityUtils.java From tassal with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * Print the differences among two CPDs. * * @param cpd1 * @param cpd2 */ public static <A, B> void printClusterDifferences( final IDiscreteConditionalProbability<A, B> cpd1, final IDiscreteConditionalProbability<A, B> cpd2) { final Set<B> support1 = cpd1.getPossibleContexts(); for (final B context : support1) { final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context); final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context); if (!ml1.equals(ml2)) { System.out.println("Context " + context + ": " + ml1.orNull() + " vs " + ml2.orNull()); } } }
Example 7
Source File: KafkaDeserializerExtractor.java From incubator-gobblin with Apache License 2.0 | 5 votes |
@VisibleForTesting KafkaDeserializerExtractor(WorkUnitState state, Optional<Deserializers> deserializerType, Deserializer<?> kafkaDeserializer, KafkaSchemaRegistry<?, ?> kafkaSchemaRegistry) { super(state); this.kafkaDeserializer = kafkaDeserializer; this.kafkaSchemaRegistry = kafkaSchemaRegistry; this.latestSchema = (deserializerType.equals(Optional.of(Deserializers.CONFLUENT_AVRO))) ? (Schema) getSchema() : null; }
Example 8
Source File: IfEqualHelperSource.java From Baragon with Apache License 2.0 | 5 votes |
public static CharSequence ifOptionalEqual(Optional<String> v1, Optional<String> v2, Options options) throws IOException { if (v1.equals(v2)) { return options.fn(); } else { return options.inverse(); } }
Example 9
Source File: BaseState.java From TakinRPC with Apache License 2.0 | 4 votes |
@VisibleForTesting boolean shouldVoteFor(@Nonnull RaftLog log, @Nonnull RequestVote request) { Optional<Replica> lastVotedFor = log.lastVotedFor(); Replica candidate = Replica.fromString(request.getCandidateId()); boolean hasAtLeastTerm = request.getLastLogTerm() >= log.lastLogTerm(); boolean hasAtLeastIndex = request.getLastLogIndex() >= log.lastLogIndex(); boolean logAsComplete = (hasAtLeastTerm && hasAtLeastIndex); boolean alreadyVotedForCandidate = lastVotedFor.equals(Optional.of(candidate)); boolean notYetVoted = !lastVotedFor.isPresent(); return (alreadyVotedForCandidate && logAsComplete) || (notYetVoted && logAsComplete) || (request.getLastLogTerm() > log.lastLogTerm()) || (hasAtLeastTerm && (request.getLastLogIndex() > log.lastLogIndex())) || logAsComplete; }
Example 10
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static RecordReader getRecordReader(HiveSplitXattr splitXattr, HiveTableXattr tableXattr, OperatorContext context, HiveConf hiveConf, SplitAndPartitionInfo split, CompositeReaderConfig compositeReader, HiveProxyingSubScan config, UserGroupInformation readerUgi) throws Exception { final JobConf baseJobConf = new JobConf(hiveConf); final Properties tableProperties = new Properties(); addProperties(baseJobConf, tableProperties, HiveReaderProtoUtil.getTableProperties(tableXattr)); final boolean isTransactional = AcidUtils.isTablePropertyTransactional(baseJobConf); final boolean isPartitioned = config.getPartitionColumns() != null && config.getPartitionColumns().size() > 0; final Optional<String> tableInputFormat = HiveReaderProtoUtil.getTableInputFormat(tableXattr); final JobConf jobConf = new JobConf(baseJobConf); final SerDe tableSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getTableSerializationLib(tableXattr).get(), tableProperties); final StructObjectInspector tableOI = getStructOI(tableSerDe); final SerDe partitionSerDe; final StructObjectInspector partitionOI; boolean hasDeltas = false; if (isTransactional) { InputSplit inputSplit = HiveUtilities.deserializeInputSplit(splitXattr.getInputSplit()); if (inputSplit instanceof OrcSplit) { hasDeltas = hasDeltas((OrcSplit) inputSplit); } } final Class<? extends HiveAbstractReader> tableReaderClass = getNativeReaderClass(tableInputFormat, context.getOptions(), hiveConf, false, isTransactional && hasDeltas); final Constructor<? extends HiveAbstractReader> tableReaderCtor = getNativeReaderCtor(tableReaderClass); Constructor<? extends HiveAbstractReader> readerCtor = tableReaderCtor; // It is possible to for a partition to have different input format than table input format. if (isPartitioned) { final List<Prop> partitionPropertiesList; final Properties partitionProperties = new Properties(); final Optional<String> partitionInputFormat; final Optional<String> partitionStorageHandlerName; // First add table properties and then add partition properties. Partition properties override table properties. addProperties(jobConf, partitionProperties, HiveReaderProtoUtil.getTableProperties(tableXattr)); // If Partition Properties are stored in DatasetMetadata (Pre 3.2.0) if (HiveReaderProtoUtil.isPreDremioVersion3dot2dot0LegacyFormat(tableXattr)) { logger.debug("Reading partition properties from DatasetMetadata"); partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, splitXattr.getPartitionId()); addProperties(jobConf, partitionProperties, partitionPropertiesList); partitionSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, splitXattr.getPartitionId()).get(), partitionProperties ); partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, splitXattr.getPartitionId()); partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, splitXattr.getPartitionId()); } else { logger.debug("Reading partition properties from PartitionChunk"); final PartitionXattr partitionXattr = HiveReaderProtoUtil.getPartitionXattr(split); partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, partitionXattr); addProperties(jobConf, partitionProperties, partitionPropertiesList); partitionSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, partitionXattr), partitionProperties ); partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, partitionXattr); partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr); } jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName)); partitionOI = getStructOI(partitionSerDe); final boolean mixedSchema = !tableOI.equals(partitionOI); if (!partitionInputFormat.equals(tableInputFormat) || mixedSchema || isTransactional && hasDeltas) { final Class<? extends HiveAbstractReader> partitionReaderClass = getNativeReaderClass( partitionInputFormat, context.getOptions(), jobConf, mixedSchema, isTransactional); readerCtor = getNativeReaderCtor(partitionReaderClass); } } else { partitionSerDe = null; partitionOI = null; jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr))); } return readerCtor.newInstance(tableXattr, split, compositeReader.getInnerColumns(), context, jobConf, tableSerDe, tableOI, partitionSerDe, partitionOI, config.getFilter(), config.getReferencedTables(), readerUgi); }
Example 11
Source File: ScanWithHiveReader.java From dremio-oss with Apache License 2.0 | 4 votes |
private static RecordReader getRecordReader(HiveSplitXattr splitXattr, HiveTableXattr tableXattr, OperatorContext context, HiveConf hiveConf, SplitAndPartitionInfo split, CompositeReaderConfig compositeReader, HiveProxyingSubScan config, UserGroupInformation readerUgi) throws Exception { final JobConf baseJobConf = new JobConf(hiveConf); final Properties tableProperties = new Properties(); addProperties(baseJobConf, tableProperties, HiveReaderProtoUtil.getTableProperties(tableXattr)); final boolean isTransactional = AcidUtils.isTablePropertyTransactional(baseJobConf); final boolean isPartitioned = config.getPartitionColumns() != null && config.getPartitionColumns().size() > 0; final Optional<String> tableInputFormat = HiveReaderProtoUtil.getTableInputFormat(tableXattr); final JobConf jobConf = new JobConf(baseJobConf); final AbstractSerDe tableSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getTableSerializationLib(tableXattr).get(), tableProperties); final StructObjectInspector tableOI = getStructOI(tableSerDe); final AbstractSerDe partitionSerDe; final StructObjectInspector partitionOI; boolean hasDeltas = false; if (isTransactional) { InputSplit inputSplit = HiveUtilities.deserializeInputSplit(splitXattr.getInputSplit()); if (inputSplit instanceof OrcSplit) { hasDeltas = hasDeltas((OrcSplit) inputSplit); } } final Class<? extends HiveAbstractReader> tableReaderClass = getNativeReaderClass(tableInputFormat, context.getOptions(), hiveConf, false, isTransactional && hasDeltas); final Constructor<? extends HiveAbstractReader> tableReaderCtor = getNativeReaderCtor(tableReaderClass); Constructor<? extends HiveAbstractReader> readerCtor = tableReaderCtor; // It is possible to for a partition to have different input format than table input format. if (isPartitioned) { final List<Prop> partitionPropertiesList; final Properties partitionProperties = new Properties(); final Optional<String> partitionInputFormat; final Optional<String> partitionStorageHandlerName; // First add table properties and then add partition properties. Partition properties override table properties. addProperties(jobConf, partitionProperties, HiveReaderProtoUtil.getTableProperties(tableXattr)); // If Partition Properties are stored in DatasetMetadata (Pre 3.2.0) if (HiveReaderProtoUtil.isPreDremioVersion3dot2dot0LegacyFormat(tableXattr)) { logger.debug("Reading partition properties from DatasetMetadata"); partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, splitXattr.getPartitionId()); addProperties(jobConf, partitionProperties, partitionPropertiesList); partitionSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, splitXattr.getPartitionId()).get(), partitionProperties ); partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, splitXattr.getPartitionId()); partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, splitXattr.getPartitionId()); } else { logger.debug("Reading partition properties from PartitionChunk"); final PartitionXattr partitionXattr = HiveReaderProtoUtil.getPartitionXattr(split); partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, partitionXattr); addProperties(jobConf, partitionProperties, partitionPropertiesList); partitionSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, partitionXattr), partitionProperties ); partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, partitionXattr); partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr); } jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName)); partitionOI = getStructOI(partitionSerDe); final boolean mixedSchema = !tableOI.equals(partitionOI); if (!partitionInputFormat.equals(tableInputFormat) || mixedSchema || isTransactional && hasDeltas) { final Class<? extends HiveAbstractReader> partitionReaderClass = getNativeReaderClass( partitionInputFormat, context.getOptions(), jobConf, mixedSchema, isTransactional); readerCtor = getNativeReaderCtor(partitionReaderClass); } } else { partitionSerDe = null; partitionOI = null; jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr))); } return readerCtor.newInstance(tableXattr, split, compositeReader.getInnerColumns(), context, jobConf, tableSerDe, tableOI, partitionSerDe, partitionOI, config.getFilter(), config.getReferencedTables(), readerUgi); }
Example 12
Source File: BindingGraphValidator.java From dagger2-sample with Apache License 2.0 | 4 votes |
/** * Validates that the scope (if any) of this component are compatible with the scopes of the * bindings available in this component */ void validateComponentScope(final BindingGraph subject, final ValidationReport.Builder<BindingGraph> reportBuilder, ImmutableMap<BindingKey, ResolvedBindings> resolvedBindings) { Optional<Equivalence.Wrapper<AnnotationMirror>> componentScope = subject.componentDescriptor().wrappedScope(); ImmutableSet.Builder<String> incompatiblyScopedMethodsBuilder = ImmutableSet.builder(); for (ResolvedBindings bindings : resolvedBindings.values()) { if (bindings.bindingKey().kind().equals(BindingKey.Kind.CONTRIBUTION)) { for (ContributionBinding contributionBinding : bindings.ownedContributionBindings()) { if (contributionBinding instanceof ProvisionBinding) { ProvisionBinding provisionBinding = (ProvisionBinding) contributionBinding; if (provisionBinding.scope().isPresent() && !componentScope.equals(provisionBinding.wrappedScope())) { // Scoped components cannot reference bindings to @Provides methods or @Inject // types decorated by a different scope annotation. Unscoped components cannot // reference to scoped @Provides methods or @Inject types decorated by any // scope annotation. switch (provisionBinding.bindingKind()) { case PROVISION: ExecutableElement provisionMethod = MoreElements.asExecutable(provisionBinding.bindingElement()); incompatiblyScopedMethodsBuilder.add( methodSignatureFormatter.format(provisionMethod)); break; case INJECTION: incompatiblyScopedMethodsBuilder.add(stripCommonTypePrefixes( provisionBinding.scope().get().toString()) + " class " + provisionBinding.bindingTypeElement().getQualifiedName()); break; default: throw new IllegalStateException(); } } } } } } ImmutableSet<String> incompatiblyScopedMethods = incompatiblyScopedMethodsBuilder.build(); if (!incompatiblyScopedMethods.isEmpty()) { TypeElement componentType = subject.componentDescriptor().componentDefinitionType(); StringBuilder message = new StringBuilder(componentType.getQualifiedName()); if (componentScope.isPresent()) { message.append(" scoped with "); message.append(stripCommonTypePrefixes(ErrorMessages.format(componentScope.get().get()))); message.append(" may not reference bindings with different scopes:\n"); } else { message.append(" (unscoped) may not reference scoped bindings:\n"); } for (String method : incompatiblyScopedMethods) { message.append(ErrorMessages.INDENT).append(method).append("\n"); } reportBuilder.addItem(message.toString(), componentType, subject.componentDescriptor().componentAnnotation()); } }