Java Code Examples for java.util.HashMap#putAll()
The following examples show how to use
java.util.HashMap#putAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: XmlBeansUtil.java From j-road with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") public static HashMap<String, XmlBeansXRoadMetadata> loadMetadata() throws IOException, ClassNotFoundException { HashMap<String, XmlBeansXRoadMetadata> metaMap = new HashMap<String, XmlBeansXRoadMetadata>(); for (Enumeration<URL> metaUrls = Thread.currentThread().getContextClassLoader().getResources("xroad.metadata"); metaUrls.hasMoreElements();) { URL metaUrl = metaUrls.nextElement(); InputStream is = metaUrl.openStream(); ObjectInputStream ois = new ObjectInputStream(is); metaMap.putAll((HashMap<String, XmlBeansXRoadMetadata>) ois.readObject()); ois.close(); is.close(); } return metaMap; }
Example 2
Source File: UDFStringSplitToMultimapTest.java From hive-third-functions with Apache License 2.0 | 6 votes |
@Test public void testStringSplitToMultimap() throws Exception { UDFStringSplitToMultimap udf = new UDFStringSplitToMultimap(); GenericUDF.DeferredObject string = new GenericUDF.DeferredJavaObject("a=123,b=0.4,a=124"); GenericUDF.DeferredObject entryDelimiter = new GenericUDF.DeferredJavaObject(","); GenericUDF.DeferredObject keyValueDelimiter = new GenericUDF.DeferredJavaObject("="); GenericUDF.DeferredObject[] args = {string, entryDelimiter, keyValueDelimiter}; HashMap<String, List<String>> output = (HashMap<String, List<String>>) udf.evaluate(args); HashMap<String, List<String>> expect = Maps.newHashMap(); expect.putAll(ImmutableMap.<String, List<String>>of("a", ImmutableList.<String>of("123", "124"), "b", ImmutableList.<String>of("0.4"))); Assert.assertEquals("split_to_multimap() test", true, MapUtils.mapEquals(output, expect)); }
Example 3
Source File: ProbLocalStateGraph.java From iBioSim with Apache License 2.0 | 6 votes |
/** * Add all transitions and their corresponding rates for the nextState to the nextTranRateMap. * @param nextState * @param nextStateTranRateMap */ public void addTranRate(State nextState, HashMap<Transition, Double> nextStateTranRateMap) { HashMap<Transition, Double> innerMap = this.nextTranRateMap.get(nextState); if (innerMap == null) { // innerMap = new HashMap<Transition, Double>(); // innerMap.putAll(nextStateTranRateMap); // this.nextTranRateMap.put(nextState, innerMap); this.nextTranRateMap.put(nextState, nextStateTranRateMap); } else { // TODO: Need to remove this step. innerMap.putAll(nextStateTranRateMap); } if (Options.getDebugMode()) printNextProbLocalTranRateMapForGivenState(nextState, "ProbLocalStateGraph.java -> (public) addTranRate(). Adding state " + nextState.getFullLabel() + " to the map."); }
Example 4
Source File: ElasticJoinExecutor.java From elasticsearch-sql with Apache License 2.0 | 6 votes |
protected void onlyReturnedFields(Map<String, Object> fieldsMap, List<Field> required,boolean allRequired) { HashMap<String,Object> filteredMap = new HashMap<>(); if(allFieldsReturn || allRequired) { filteredMap.putAll(fieldsMap); return; } for(Field field: required){ String name = field.getName(); String returnName = name; String alias = field.getAlias(); if(alias !=null && alias !=""){ returnName = alias; aliasesOnReturn.add(alias); } filteredMap.put(returnName, deepSearchInMap(fieldsMap, name)); } fieldsMap.clear(); fieldsMap.putAll(filteredMap); }
Example 5
Source File: VariableScopeImpl.java From activiti6-boot2 with Apache License 2.0 | 6 votes |
protected Map<String, Object> collectVariables(HashMap<String, Object> variables) { ensureVariableInstancesInitialized(); VariableScopeImpl parentScope = getParentVariableScope(); if (parentScope!=null) { variables.putAll(parentScope.collectVariables(variables)); } for (VariableInstanceEntity variableInstance: variableInstances.values()) { variables.put(variableInstance.getName(), variableInstance.getValue()); } for (String variableName : usedVariablesCache.keySet()) { variables.put(variableName, usedVariablesCache.get(variableName).getValue()); } return variables; }
Example 6
Source File: XML11DTDDVFactoryImpl.java From openjdk-jdk8u-backup with GNU General Public License v2.0 | 5 votes |
/** * get all built-in DVs, which are stored in a Map keyed by the name * New XML 1.1 datatypes are inserted. * * @return a Map which contains all datatypes */ @Override public Map<String, DatatypeValidator> getBuiltInTypes() { final HashMap<String, DatatypeValidator> toReturn = new HashMap<>(fBuiltInTypes); toReturn.putAll(XML11BUILTINTYPES); return toReturn; }
Example 7
Source File: SignificanceHeuristicStreams.java From Elasticsearch with Apache License 2.0 | 5 votes |
/** * Registers the given stream and associate it with the given types. * * @param stream The stream to register */ public static synchronized void registerStream(Stream stream) { if (STREAMS.containsKey(stream.getName())) { throw new IllegalArgumentException("Can't register stream with name [" + stream.getName() + "] more than once"); } HashMap<String, Stream> map = new HashMap<>(); map.putAll(STREAMS); map.put(stream.getName(), stream); STREAMS = Collections.unmodifiableMap(map); }
Example 8
Source File: Decoder.java From openjdk-8-source with GNU General Public License v2.0 | 5 votes |
/** * {@inheritDoc} */ public void setExternalVocabularies(Map referencedVocabualries) { if (referencedVocabualries != null) { // Clone the input map _externalVocabularies = new HashMap(); _externalVocabularies.putAll(referencedVocabualries); } else { _externalVocabularies = null; } }
Example 9
Source File: Decoder.java From openjdk-jdk8u with GNU General Public License v2.0 | 5 votes |
/** * {@inheritDoc} */ public void setExternalVocabularies(Map referencedVocabualries) { if (referencedVocabualries != null) { // Clone the input map _externalVocabularies = new HashMap(); _externalVocabularies.putAll(referencedVocabualries); } else { _externalVocabularies = null; } }
Example 10
Source File: CopyOnWriteMap.java From BiglyBT with GNU General Public License v2.0 | 5 votes |
public void setAll(Map<K,V> m ) { synchronized(this) { HashMap<K,V> new_map = new HashMap<>(); new_map.putAll( m ); this.map = new_map; } }
Example 11
Source File: RegionWithHDFSBasicDUnitTest.java From gemfirexd-oss with Apache License 2.0 | 5 votes |
protected void verifyTwoHDFSFilesWithTwoEntries(VM vm, String uniqueName, String value) throws Exception { HashMap<String, HashMap<String, String>> filesToEntriesMap = createFilesAndEntriesMap(vm, uniqueName, uniqueName); assertTrue( "there should be exactly two files, but there are " + filesToEntriesMap.size(), filesToEntriesMap.size() == 2); HashMap<String, String> entriesMap = new HashMap<String, String>(); for (HashMap<String, String> v : filesToEntriesMap.values()) { entriesMap.putAll(v); } assertTrue( "Expected key K1 received " + entriesMap.get(value+ "1vm0"), entriesMap.get(value+ "1vm0").equals("K1")); assertTrue( "Expected key K2 received " + entriesMap.get(value+ "2vm0"), entriesMap.get(value+ "2vm0").equals("K2")); assertTrue( "Expected key K2 received " + entriesMap.get(value+ "2vm1"), entriesMap.get(value+ "2vm1").equals("K2")); assertTrue( "Expected key K3 received " + entriesMap.get(value+ "3vm1"), entriesMap.get(value+ "3vm1").equals("K3")); }
Example 12
Source File: UsersTest.java From hugegraph with Apache License 2.0 | 5 votes |
@Test public void testCreateTarget() { HugeGraph graph = graph(); UserManager userManager = graph.userManager(); HugeTarget target = makeTarget("graph1", "127.0.0.1:8080"); target.creator("admin"); Id id = userManager.createTarget(target); target = userManager.getTarget(id); Assert.assertEquals("graph1", target.name()); Assert.assertEquals("127.0.0.1:8080", target.url()); Assert.assertEquals(target.create(), target.update()); HashMap<String, Object> expected = new HashMap<>(); expected.putAll(ImmutableMap.of("target_name", "graph1", "target_graph", "graph1", "target_url", "127.0.0.1:8080", "target_creator", "admin")); expected.putAll(ImmutableMap.of("target_create", target.create(), "target_update", target.update(), "id", target.id())); Assert.assertEquals(expected, target.asMap()); }
Example 13
Source File: OnekeyShare.java From enjoyshop with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") public void show(Context context) { HashMap<String, Object> shareParamsMap = new HashMap<String, Object>(); shareParamsMap.putAll(params); if (!(context instanceof MobApplication)) { MobSDK.init(context.getApplicationContext()); } // 打开分享菜单的统计 ShareSDK.logDemoEvent(1, null); int iTheme = 0; try { iTheme = ResHelper.parseInt(String.valueOf(shareParamsMap.remove("theme"))); } catch (Throwable t) {} OnekeyShareTheme theme = OnekeyShareTheme.fromValue(iTheme); OnekeyShareThemeImpl themeImpl = theme.getImpl(); themeImpl.setShareParamsMap(shareParamsMap); themeImpl.setDialogMode(shareParamsMap.containsKey("dialogMode") ? ((Boolean) shareParamsMap.remove("dialogMode")) : false); themeImpl.setSilent(shareParamsMap.containsKey("silent") ? ((Boolean) shareParamsMap.remove("silent")) : false); themeImpl.setCustomerLogos((ArrayList<CustomerLogo>) shareParamsMap.remove("customers")); themeImpl.setHiddenPlatforms((HashMap<String, String>) shareParamsMap.remove("hiddenPlatforms")); themeImpl.setPlatformActionListener((PlatformActionListener) shareParamsMap.remove("callback")); themeImpl.setShareContentCustomizeCallback((ShareContentCustomizeCallback) shareParamsMap.remove("customizeCallback")); if (shareParamsMap.containsKey("disableSSO") ? ((Boolean) shareParamsMap.remove("disableSSO")) : false) { themeImpl.disableSSO(); } themeImpl.show(context.getApplicationContext()); }
Example 14
Source File: WSDLCustomParser.java From zap-extensions with Apache License 2.0 | 4 votes |
private void parseWSDL(Definitions wsdl, boolean sendMessages) { StringBuilder sb = new StringBuilder(); List<Service> services = wsdl.getServices(); keyIndex++; /* Endpoint identification. */ for (Service service : services) { for (Port port : service.getPorts()) { Binding binding = port.getBinding(); AbstractBinding innerBinding = binding.getBinding(); String soapPrefix = innerBinding.getPrefix(); int soapVersion = detectSoapVersion( wsdl, soapPrefix); // SOAP 1.X, where X is represented by this // variable. /* If the binding is not a SOAP binding, it is ignored. */ String style = detectStyle(innerBinding); if (style != null && (style.equals("document") || style.equals("rpc"))) { List<BindingOperation> operations = binding.getOperations(); String endpointLocation = port.getAddress().getLocation().toString(); sb.append( "\n|-- Port detected: " + port.getName() + " (" + endpointLocation + ")\n"); /* Identifies operations for each endpoint.. */ for (BindingOperation bindOp : operations) { String opDisplayName = "/" + bindOp.getName() + " (v1." + soapVersion + ")"; sb.append( "|\t|-- SOAP 1." + soapVersion + " Operation: " + bindOp.getName()); /* Adds this operation to the global operations chart. */ recordOperation(keyIndex, bindOp); /* Identifies operation's parameters. */ List<Part> requestParts = detectParameters(wsdl, bindOp); /* Set values to parameters. */ HashMap<String, String> formParams = new HashMap<String, String>(); for (Part part : requestParts) { Element element = part.getElement(); if (element != null) { formParams.putAll(fillParameters(element, null)); } } /* Connection test for each operation. */ /* Basic message creation. */ SOAPMsgConfig soapConfig = new SOAPMsgConfig(wsdl, soapVersion, formParams, port, bindOp); lastConfig = soapConfig; HttpMessage requestMessage = createSoapRequest(soapConfig); if (sendMessages) sendSoapRequest(keyIndex, requestMessage, opDisplayName, sb); } // bindingOperations loop } // Binding check if } // Ports loop } printOutput(sb); }
Example 15
Source File: TradeSecuritiesDMLDistTxStmtJson.java From gemfirexd-oss with Apache License 2.0 | 4 votes |
@SuppressWarnings("unchecked") @Override public boolean insertGfxd(Connection gConn, boolean withDerby) { if (!withDerby) { return insertGfxdOnly(gConn); } int size = 1; int[] sec_id = new int[size]; String[] symbol = new String[size]; String[] exchange = new String[size]; BigDecimal[] price = new BigDecimal[size]; int[] updateCount = new int[size]; SQLException gfxdse = null; getDataForInsert(sec_id, symbol, exchange, price, size); //get the data HashMap<String, Integer> modifiedKeysByOp = new HashMap<String, Integer>(); modifiedKeysByOp.put(getTableName()+"_"+sec_id[0], (Integer)SQLDistTxTest.curTxId.get()); //when two tx insert/update could lead to unique key constraint violation, //only one tx could hold the unique key lock //if unique key already exists, committed prior to this round, it should get //unique key constraint violation, not conflict or lock not held exception modifiedKeysByOp.put(getTableName()+"_unique_"+symbol[0] + "_" + exchange[0], (Integer)SQLDistTxTest.curTxId.get()); Log.getLogWriter().info("gemfirexd - TXID:" + (Integer)SQLDistTxTest.curTxId.get()+ " need to hold the unique key " + getTableName()+"_unique_"+symbol[0] + "_" + exchange[0]); HashMap<String, Integer> modifiedKeysByTx = (HashMap<String, Integer>) SQLDistTxTest.curTxModifiedKeys.get(); //no need to check fk, as securities is a parent table try { insertToGfxdTable(gConn, sec_id, symbol, exchange, price, updateCount, size); } catch (SQLException se) { SQLHelper.printSQLException(se); if (se.getSQLState().equalsIgnoreCase("X0Z02") ) { if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, se, true); else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, se, hasSecondary, true); return false; } else if (gfxdtxHANotReady && isHATest && SQLHelper.gotTXNodeFailureException(se) ) { SQLHelper.printSQLException(se); Log.getLogWriter().info("gemfirexd - TXID:" + (Integer)SQLDistTxTest.curTxId.get()+ " got node failure exception during Tx with HA support, continue testing"); return false; } else { gfxdse = se; } } if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, gfxdse, false); else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, gfxdse, hasSecondary, false); //add this operation for derby addInsertToDerbyTx(sec_id, symbol, exchange, price, updateCount, gfxdse); modifiedKeysByTx.putAll(modifiedKeysByOp); SQLDistTxTest.curTxModifiedKeys.set(modifiedKeysByTx); return true; }
Example 16
Source File: OptimizerRuleBased.java From systemds with Apache License 2.0 | 4 votes |
protected boolean rewriteSetDataPartitioner(OptNode n, LocalVariableMap vars, HashMap<String, PartitionFormat> partitionedMatrices, double thetaM, boolean constrained ) { if( n.getNodeType() != NodeType.PARFOR ) LOG.warn(getOptMode()+" OPT: Data partitioner can only be set for a ParFor node."); boolean blockwise = false; //preparations long id = n.getID(); Object[] o = OptTreeConverter.getAbstractPlanMapping().getMappedProg(id); ParForStatementBlock pfsb = (ParForStatementBlock) o[0]; ParForProgramBlock pfpb = (ParForProgramBlock) o[1]; //search for candidates boolean apply = false; if( OptimizerUtils.isHybridExecutionMode() //only if we are allowed to recompile && (_N >= PROB_SIZE_THRESHOLD_PARTITIONING || _Nmax >= PROB_SIZE_THRESHOLD_PARTITIONING) ) //only if beneficial wrt problem size { HashMap<String, PartitionFormat> cand2 = new HashMap<>(); for( String c : pfsb.getReadOnlyParentMatrixVars() ) { PartitionFormat dpf = pfsb.determineDataPartitionFormat( c ); double mem = getMemoryEstimate(c, vars); if( dpf != PartitionFormat.NONE && dpf._dpf != PDataPartitionFormat.BLOCK_WISE_M_N && (constrained || (mem > _lm/2 && mem > _rm/2)) && vars.get(c) != null //robustness non-existing vars && !vars.get(c).getDataType().isList() ) { cand2.put( c, dpf ); } } apply = rFindDataPartitioningCandidates(n, cand2, vars, thetaM); if( apply ) partitionedMatrices.putAll(cand2); } PDataPartitioner REMOTE = PDataPartitioner.REMOTE_SPARK; PDataPartitioner pdp = (apply)? REMOTE : PDataPartitioner.NONE; //NOTE: since partitioning is only applied in case of MR index access, we assume a large // matrix and hence always apply REMOTE_MR (the benefit for large matrices outweigths // potentially unnecessary MR jobs for smaller matrices) // modify rtprog pfpb.setDataPartitioner( pdp ); // modify plan n.addParam(ParamType.DATA_PARTITIONER, pdp.toString()); _numEvaluatedPlans++; LOG.debug(getOptMode()+" OPT: rewrite 'set data partitioner' - result="+pdp.toString()+ " ("+Arrays.toString(partitionedMatrices.keySet().toArray())+")" ); return blockwise; }
Example 17
Source File: DataExtractionTaskV1.java From birt with Eclipse Public License 1.0 | 4 votes |
private IDataExtractionOption setupExtractOption( IDataExtractionOption options ) { // setup the data extraction options from: HashMap allOptions = new HashMap( ); // try to get the default render option from the engine config. HashMap configs = engine.getConfig( ).getEmitterConfigs( ); // get the default format of the emitters, the default format key is // IRenderOption.OUTPUT_FORMAT; IRenderOption defaultOptions = (IRenderOption) configs .get( IEngineConfig.DEFAULT_RENDER_OPTION ); if ( defaultOptions != null ) { allOptions.putAll( defaultOptions.getOptions( ) ); } // try to get the render options by the format IRenderOption defaultHtmlOptions = (IRenderOption) configs .get( IRenderOption.OUTPUT_FORMAT_HTML ); if ( defaultHtmlOptions != null ) { allOptions.putAll( defaultHtmlOptions.getOptions( ) ); } // merge the user's setting allOptions.putAll( options.getOptions( ) ); // copy the new setting to old APIs Map appContext = executionContext.getAppContext( ); Object renderContext = appContext .get( EngineConstants.APPCONTEXT_HTML_RENDER_CONTEXT ); if ( renderContext == null ) { HTMLRenderContext htmlContext = new HTMLRenderContext( ); HTMLRenderOption htmlOptions = new HTMLRenderOption( allOptions ); htmlContext.setBaseImageURL( htmlOptions.getBaseImageURL( ) ); htmlContext.setBaseURL( htmlOptions.getBaseURL( ) ); htmlContext.setImageDirectory( htmlOptions.getImageDirectory( ) ); htmlContext.setSupportedImageFormats( htmlOptions .getSupportedImageFormats( ) ); htmlContext.setRenderOption( htmlOptions ); appContext.put( EngineConstants.APPCONTEXT_HTML_RENDER_CONTEXT, htmlContext ); } if ( options instanceof CubeDataExtractionOption ) { CubeDataExtractionOption cubeOption = (CubeDataExtractionOption) options; this.cubeName = cubeOption.getCubeName( ); return options; } else { this.cubeName = null; } // setup the instance id which is comes from the task.setInstanceId IDataExtractionOption extractOption = new DataExtractionOption( allOptions ); if ( extractOption.getInstanceID( ) == null ) { if ( instanceId != null ) { extractOption.setInstanceID( instanceId ); } } return extractOption; }
Example 18
Source File: TypeCheck.java From astor with GNU General Public License v2.0 | 4 votes |
/** * Visits a {@link Token#FUNCTION} node. * * @param t The node traversal object that supplies context, such as the * scope chain to use in name lookups as well as error reporting. * @param n The node being visited. */ private void visitFunction(NodeTraversal t, Node n) { FunctionType functionType = JSType.toMaybeFunctionType(n.getJSType()); String functionPrivateName = n.getFirstChild().getString(); if (functionType.isConstructor()) { FunctionType baseConstructor = functionType.getSuperClassConstructor(); if (baseConstructor != getNativeType(OBJECT_FUNCTION_TYPE) && baseConstructor != null && baseConstructor.isInterface()) { compiler.report( t.makeError(n, CONFLICTING_EXTENDED_TYPE, "constructor", functionPrivateName)); } else { if (baseConstructor != getNativeType(OBJECT_FUNCTION_TYPE)) { ObjectType proto = functionType.getPrototype(); if (functionType.makesStructs() && !proto.isStruct()) { compiler.report(t.makeError(n, CONFLICTING_EXTENDED_TYPE, "struct", functionPrivateName)); } else if (functionType.makesDicts() && !proto.isDict()) { compiler.report(t.makeError(n, CONFLICTING_EXTENDED_TYPE, "dict", functionPrivateName)); } } // All interfaces are properly implemented by a class for (JSType baseInterface : functionType.getImplementedInterfaces()) { boolean badImplementedType = false; ObjectType baseInterfaceObj = ObjectType.cast(baseInterface); if (baseInterfaceObj != null) { FunctionType interfaceConstructor = baseInterfaceObj.getConstructor(); if (interfaceConstructor != null && !interfaceConstructor.isInterface()) { badImplementedType = true; } } else { badImplementedType = true; } if (badImplementedType) { report(t, n, BAD_IMPLEMENTED_TYPE, functionPrivateName); } } // check properties validator.expectAllInterfaceProperties(t, n, functionType); } } else if (functionType.isInterface()) { // Interface must extend only interfaces for (ObjectType extInterface : functionType.getExtendedInterfaces()) { if (extInterface.getConstructor() != null && !extInterface.getConstructor().isInterface()) { compiler.report( t.makeError(n, CONFLICTING_EXTENDED_TYPE, "interface", functionPrivateName)); } } // Check whether the extended interfaces have any conflicts if (functionType.getExtendedInterfacesCount() > 1) { // Only check when extending more than one interfaces HashMap<String, ObjectType> properties = new HashMap<String, ObjectType>(); HashMap<String, ObjectType> currentProperties = new HashMap<String, ObjectType>(); for (ObjectType interfaceType : functionType.getExtendedInterfaces()) { currentProperties.clear(); checkInterfaceConflictProperties(t, n, functionPrivateName, properties, currentProperties, interfaceType); properties.putAll(currentProperties); } } } }
Example 19
Source File: HurlStack.java From jus with Apache License 2.0 | 4 votes |
@Override public HttpResponse performRequest(Request<?> request, Map<String, String> additionalHeaders) throws IOException, AuthFailureError { String url = request.getUrl(); HashMap<String, String> map = new HashMap<String, String>(); map.putAll(request.getHeaders()); map.putAll(additionalHeaders); if (mUrlRewriter != null) { String rewritten = mUrlRewriter.rewriteUrl(url); if (rewritten == null) { throw new IOException("URL blocked by rewriter: " + url); } url = rewritten; } URL parsedUrl = new URL(url); HttpURLConnection connection = openConnection(parsedUrl, request); for (String headerName : map.keySet()) { connection.addRequestProperty(headerName, map.get(headerName)); } setConnectionParametersForRequest(connection, request); // Initialize HttpResponse with data from the HttpURLConnection. ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); int responseCode = connection.getResponseCode(); if (responseCode == -1) { // -1 is returned by getResponseCode() if the response code could not be retrieved. // Signal to the caller that something was wrong with the connection. throw new IOException("Could not retrieve response code from HttpUrlConnection."); } StatusLine responseStatus = new BasicStatusLine(protocolVersion, connection.getResponseCode(), connection.getResponseMessage()); BasicHttpResponse response = new BasicHttpResponse(responseStatus); if (hasResponseBody(request.getMethod(), responseStatus.getStatusCode())) { response.setEntity(entityFromConnection(connection)); } for (Entry<String, List<String>> header : connection.getHeaderFields().entrySet()) { if (header.getKey() != null) { Header h = new BasicHeader(header.getKey(), header.getValue().get(0)); response.addHeader(h); } } return response; }
Example 20
Source File: Closure_69_TypeCheck_t.java From coming with MIT License | 4 votes |
/** * Visits a {@link Token#FUNCTION} node. * * @param t The node traversal object that supplies context, such as the * scope chain to use in name lookups as well as error reporting. * @param n The node being visited. */ private void visitFunction(NodeTraversal t, Node n) { FunctionType functionType = (FunctionType) n.getJSType(); String functionPrivateName = n.getFirstChild().getString(); if (functionType.isConstructor()) { FunctionType baseConstructor = functionType. getPrototype().getImplicitPrototype().getConstructor(); if (baseConstructor != null && baseConstructor != getNativeType(OBJECT_FUNCTION_TYPE) && (baseConstructor.isInterface() && functionType.isConstructor())) { compiler.report( t.makeError(n, CONFLICTING_EXTENDED_TYPE, functionPrivateName)); } else { // All interfaces are properly implemented by a class for (JSType baseInterface : functionType.getImplementedInterfaces()) { boolean badImplementedType = false; ObjectType baseInterfaceObj = ObjectType.cast(baseInterface); if (baseInterfaceObj != null) { FunctionType interfaceConstructor = baseInterfaceObj.getConstructor(); if (interfaceConstructor != null && !interfaceConstructor.isInterface()) { badImplementedType = true; } } else { badImplementedType = true; } if (badImplementedType) { report(t, n, BAD_IMPLEMENTED_TYPE, functionPrivateName); } } // check properties validator.expectAllInterfaceProperties(t, n, functionType); } } else if (functionType.isInterface()) { // Interface must extend only interfaces for (ObjectType extInterface : functionType.getExtendedInterfaces()) { if (extInterface.getConstructor() != null && !extInterface.getConstructor().isInterface()) { compiler.report( t.makeError(n, CONFLICTING_EXTENDED_TYPE, functionPrivateName)); } } // Interface cannot implement any interfaces if (functionType.hasImplementedInterfaces()) { compiler.report(t.makeError(n, CONFLICTING_IMPLEMENTED_TYPE, functionPrivateName)); } // Check whether the extended interfaces have any conflicts if (functionType.getExtendedInterfacesCount() > 1) { // Only check when extending more than one interfaces HashMap<String, ObjectType> properties = new HashMap<String, ObjectType>(); HashMap<String, ObjectType> currentProperties = new HashMap<String, ObjectType>(); for (ObjectType interfaceType : functionType.getExtendedInterfaces()) { currentProperties.clear(); checkInterfaceConflictProperties(t, n, functionPrivateName, properties, currentProperties, interfaceType); properties.putAll(currentProperties); } } } }