Java Code Examples for org.bson.Document#keySet()
The following examples show how to use
org.bson.Document#keySet() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MongoIndex.java From presto with Apache License 2.0 | 6 votes |
private static List<MongodbIndexKey> parseKey(Document key) { ImmutableList.Builder<MongodbIndexKey> builder = ImmutableList.builder(); for (String name : key.keySet()) { Object value = key.get(name); if (value instanceof Number) { int order = ((Number) value).intValue(); checkState(order == 1 || order == -1, "Unknown index sort order"); builder.add(new MongodbIndexKey(name, order == 1 ? SortOrder.ASC_NULLS_LAST : SortOrder.DESC_NULLS_LAST)); } else if (value instanceof String) { builder.add(new MongodbIndexKey(name, (String) value)); } else { throw new UnsupportedOperationException("Unknown index type: " + value.toString()); } } return builder.build(); }
Example 2
Source File: MongoRecords.java From Prism with MIT License | 6 votes |
/** * Convert a mongo Document to a DataContainer. * @param document Mongo document. * @return Data container. */ private DataContainer documentToDataContainer(Document document) { DataContainer result = DataContainer.createNew(); for (String key : document.keySet()) { DataQuery query = DataUtil.unescapeQuery(key); Object value = document.get(key); if (value instanceof Document) { PrimitiveArray primitiveArray = PrimitiveArray.of((Document) value); if (primitiveArray != null) { result.set(query, primitiveArray.getArray()); continue; } result.set(query, documentToDataContainer((Document) value)); } else { result.set(query, value); } } return result; }
Example 3
Source File: GenericResourceRepositoryImpl.java From microcks with Apache License 2.0 | 6 votes |
@Override public List<GenericResource> findByServiceIdAndJSONQuery(String serviceId, String jsonQuery) { // First parse query document and prepare a list of key to rename then remove. Document query = Document.parse(jsonQuery); ArrayList<String> keysToRemove = new ArrayList<>(); // Collect the keys of document that should be updated. for (String key : query.keySet()) { keysToRemove.add(key); } // Prefix all keys by payload. that is the nested document where we put resource in // and remove all modified keys. for (String keyToRemove : keysToRemove) { query.append("payload." + keyToRemove, query.get(keyToRemove)); query.remove(keyToRemove); } // Finally, append serviceId criterion before launching selection. query.append("serviceId", serviceId); return template.find(new BasicQuery(query.toJson()), GenericResource.class); }
Example 4
Source File: RefreshOperation.java From jpa-unit with Apache License 2.0 | 6 votes |
@Override public void execute(final MongoDatabase connection, final Document data) { for (final String collectionName : data.keySet()) { final MongoCollection<Document> collection = connection.getCollection(collectionName); @SuppressWarnings("unchecked") final List<Document> documents = data.get(collectionName, List.class); for (final Document doc : documents) { final UpdateResult result = collection.replaceOne(Filters.eq(doc.get("_id")), doc); if (result.getMatchedCount() == 0) { collection.insertOne(doc); } } } }
Example 5
Source File: DataSetLoaderProviderTest.java From jpa-unit with Apache License 2.0 | 6 votes |
@Test public void testJsonLoaderLoadUsingProperResource() throws Exception { // WHEN final DataSetLoader<Document> loader = LOADER_PROVIDER.jsonLoader(); // THEN assertThat(loader, notNullValue()); // WHEN final Document document = loader.load(getFile("test-data.json")); // THEN assertThat(document, notNullValue()); final Set<String> tableNames = document.keySet(); assertThat(tableNames.size(), equalTo(2)); assertThat(tableNames, hasItems("JSON_COLLECTION_1", "JSON_COLLECTION_2")); final List<Document> collection1 = document.get("JSON_COLLECTION_1", List.class); assertThat(collection1.size(), equalTo(3)); final List<Document> collection2 = document.get("JSON_COLLECTION_2", List.class); assertThat(collection2.size(), equalTo(1)); }
Example 6
Source File: DeleteAllOperation.java From jpa-unit with Apache License 2.0 | 5 votes |
@Override public void execute(final MongoDatabase connection, final Document data) { for (final String collectionName : data.keySet()) { final MongoCollection<Document> collection = connection.getCollection(collectionName); collection.deleteMany(new Document()); } }
Example 7
Source File: TestSerializedFormat.java From morphia with Apache License 2.0 | 5 votes |
private void verifyCoverage(final Document document) { for (MappedField field : getMapper().getMappedClass(ReferenceType.class).getFields()) { String name = field.getMappedFieldName(); boolean found = document.containsKey(name); if (!found) { for (String s : document.keySet()) { found |= s.startsWith(name + "."); } } assertTrue("Not found in document: " + name, found); } }
Example 8
Source File: MapKeyDotReplacer.java From javers with Apache License 2.0 | 5 votes |
private Document replaceInPropertyMaps(Document snapshot, String regexFrom, String from, String to) { Document state = getState(snapshot); for (String pName : state.keySet()){ if (state.get(pName) instanceof Document) { Document mapProperty = (Document)state.get(pName); state.put(pName, replaceInMapKeys(mapProperty, regexFrom, from, to)); } } return snapshot; }
Example 9
Source File: AggregationPipelineQueryNode.java From rya with Apache License 2.0 | 5 votes |
/** * Given a StatementPattern, generate an object representing the arguments * to a "$match" command that will find matching triples. * @param sp The StatementPattern to search for * @param path If given, specify the field that should be matched against * the statement pattern, using an ordered list of field names for a nested * field. E.g. to match records { "x": { "y": <statement pattern } }, pass * "x" followed by "y". * @return The argument of a "$match" query */ private static Document getMatchExpression(final StatementPattern sp, final String ... path) { final Var subjVar = sp.getSubjectVar(); final Var predVar = sp.getPredicateVar(); final Var objVar = sp.getObjectVar(); final Var contextVar = sp.getContextVar(); RyaIRI s = null; RyaIRI p = null; RyaType o = null; RyaIRI c = null; if (subjVar != null && subjVar.getValue() instanceof Resource) { s = RdfToRyaConversions.convertResource((Resource) subjVar.getValue()); } if (predVar != null && predVar.getValue() instanceof IRI) { p = RdfToRyaConversions.convertIRI((IRI) predVar.getValue()); } if (objVar != null && objVar.getValue() != null) { o = RdfToRyaConversions.convertValue(objVar.getValue()); } if (contextVar != null && contextVar.getValue() instanceof IRI) { c = RdfToRyaConversions.convertIRI((IRI) contextVar.getValue()); } final RyaStatement rs = new RyaStatement(s, p, o, c); final Document obj = strategy.getQuery(rs); // Add path prefix, if given if (path.length > 0) { final StringBuilder sb = new StringBuilder(); for (final String str : path) { sb.append(str).append("."); } final String prefix = sb.toString(); final Set<String> originalKeys = new HashSet<>(obj.keySet()); originalKeys.forEach(key -> { final Object value = obj.remove(key); obj.put(prefix + key, value); }); } return obj; }
Example 10
Source File: MongoMetadataDaoImpl.java From eagle with Apache License 2.0 | 5 votes |
/** * Due to some field name in SpoutSpec contains dot(.) which is invalid Mongo Field name, we need to transform the * format to store in Mongo. * @return opresult */ private <T> OpResult addOneSpoutSpec(T t) { OpResult result = new OpResult(); String json = ""; try { json = mapper.writeValueAsString(t); Document doc = Document.parse(json); String [] metadataMapArrays = {"kafka2TupleMetadataMap", "tuple2StreamMetadataMap", "streamRepartitionMetadataMap"}; for (String metadataMapName: metadataMapArrays) { Document _metadataMapDoc = (Document) doc.get(metadataMapName); doc.remove(metadataMapName); ArrayList<Document> _metadataMapArray = new ArrayList<>(); for ( String key : _metadataMapDoc.keySet()) { Document _subDoc = new Document(); _subDoc.put("topicName", key); _subDoc.put(metadataMapName, _metadataMapDoc.get(key)); _metadataMapArray.add(_subDoc); } doc.append(metadataMapName, _metadataMapArray); } spoutSpecs.insertOne(doc); result.code = 200; result.message = String.format("add one document [%s] to collection [%s] succeed!", doc.toJson(), spoutSpecs.getNamespace()); LOG.info(result.message); } catch (Exception e) { result.code = 400; result.message = e.getMessage(); LOG.error(String.format("Add one document [%s] to collection [%s] failed!", json, spoutSpecs.getNamespace()), e); } return result; }
Example 11
Source File: MongoResultsWriter.java From spring-data-dev-tools with Apache License 2.0 | 5 votes |
/** * Replace {@code .} by {@code ,}. * * @param doc * @return */ private static Document fixDocumentKeys(Document doc) { Document sanitized = new Document(); for (Object key : doc.keySet()) { Object value = doc.get(key); if (value instanceof Document) { value = fixDocumentKeys((Document) value); } else if (value instanceof Map) { value = fixDocumentKeys(new Document((Map<String, Object>) value)); } if (key instanceof String) { String newKey = (String) key; if (newKey.contains(".")) { newKey = newKey.replace('.', ','); } sanitized.put(newKey, value); } else { sanitized.put(ObjectUtils.nullSafeToString(key).replace('.', ','), value); } } return sanitized; }
Example 12
Source File: MapKeyDotReplacer.java From javers with Apache License 2.0 | 5 votes |
private Document replaceInMapKeys(Document map, String regexFrom, String from, String to) { for (String key : new HashSet<>(map.keySet())){ if (key.contains(from)){ String escaped = key.replaceAll(regexFrom, to); Object val = map.get(key); map.remove(key); map.put(escaped, val); } } return map; }
Example 13
Source File: UpdateOperation.java From jpa-unit with Apache License 2.0 | 5 votes |
@Override public void execute(final MongoDatabase connection, final Document data) { for (final String collectionName : data.keySet()) { @SuppressWarnings("unchecked") final List<Document> documents = data.get(collectionName, List.class); final MongoCollection<Document> collection = connection.getCollection(collectionName); documents.forEach(d -> collection.replaceOne(Filters.eq(d.get("_id")), d)); } }
Example 14
Source File: TestDocumentValidation.java From morphia with Apache License 2.0 | 5 votes |
private void checkValidation(final Document validator, final MappedClass mappedClass, final ValidationLevel level, final ValidationAction action) { updateValidation(mappedClass, level, action); Document expected = new Document("validator", validator) .append("validationLevel", level.getValue()) .append("validationAction", action.getValue()); Document validation = getValidation(); for (String key : expected.keySet()) { assertEquals(expected.get(key), validation.get(key)); } }
Example 15
Source File: MongoDocumentStorage.java From lumongo with Apache License 2.0 | 5 votes |
private AssociatedDocument loadGridFSToAssociatedDocument(GridFSBucket gridFS, GridFSFile file, FetchType fetchType) throws IOException { AssociatedDocument.Builder aBuilder = AssociatedDocument.newBuilder(); aBuilder.setFilename(file.getFilename()); Document metadata = file.getMetadata(); boolean compressed = false; if (metadata.containsKey(COMPRESSED_FLAG)) { compressed = (boolean) metadata.remove(COMPRESSED_FLAG); } long timestamp = (long) metadata.remove(TIMESTAMP); aBuilder.setCompressed(compressed); aBuilder.setTimestamp(timestamp); aBuilder.setDocumentUniqueId((String) metadata.remove(DOCUMENT_UNIQUE_ID_KEY)); for (String field : metadata.keySet()) { aBuilder.addMetadata(Metadata.newBuilder().setKey(field).setValue((String) metadata.get(field))); } if (FetchType.FULL.equals(fetchType)) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); gridFS.downloadToStream(file.getObjectId(), byteArrayOutputStream); byte[] bytes = byteArrayOutputStream.toByteArray(); if (null != bytes) { if (compressed) { bytes = CommonCompression.uncompressZlib(bytes); } aBuilder.setDocument(ByteString.copyFrom(bytes)); } } aBuilder.setIndexName(indexName); return aBuilder.build(); }
Example 16
Source File: ExampleSlowOpsCache.java From mongodb-slow-operations-profiler with GNU Affero General Public License v3.0 | 5 votes |
/** * replace illegal characters from field names such as $ and . * by their html entity $ respective . * because the doc will be shown only in browsers * https://docs.mongodb.com/manual/reference/limits/#Restrictions-on-Field-Names * * Side effect: since all nodes of the document will be traversed in order to replace the illegal chars in their keys, * values that are shown in the mongo-shell as an Object will be cut down into their parts, e.g.: * e.g.: * ISODate("2020-02-29T20:42:23.979Z") * becomes: * {"$date": "2020-02-29T20:42:23.979Z"} * or: * {"id" : BinData(3,"zU1B4cnuCK9/ntQZkFU4gA==")} * becomes: * {"id": { * "$binary": { * "base64": "zU1B4cnuCK9/ntQZkFU4gA==", * "subType": "03" * } * } * } * * @param */ private Document replaceIllegalChars(Object input, Document output) { if(input instanceof Document) { Document dbObj = (Document)input; for (String key : dbObj.keySet()) { Object obj = dbObj.get(key); key = key.replace("$", "$").replace(".", "."); if (obj instanceof Collection) { final List<Object> list = Lists.newLinkedList(); for (Object subObj : (Collection<Object>) obj) { if (! (subObj instanceof Document || subObj instanceof Collection)) { list.add(subObj); // add scalar value }else{ list.add(replaceIllegalChars(subObj, new Document())); // add structured value } } output.append(key, list); }else if(obj instanceof Document){ final Document cleanDoc = replaceIllegalChars(obj, new Document()); output.append(key, cleanDoc); }else{ output.append(key, obj); } } } return output; }
Example 17
Source File: SchemaUtils.java From aws-athena-query-federation with Apache License 2.0 | 4 votes |
/** * This method will produce an Apache Arrow Schema for the given TableName and DocumentDB connection * by scanning up to the requested number of rows and using basic schema inference to determine * data types. * * @param client The DocumentDB connection to use for the scan operation. * @param table The DocumentDB TableName for which to produce an Apache Arrow Schema. * @param numObjToSample The number of records to scan as part of producing the Schema. * @return An Apache Arrow Schema representing the schema of the HBase table. * @note The resulting schema is a union of the schema of every row that is scanned. Presently the code does not * attempt to resolve conflicts if unique field has different types across documents. It is recommend that you * use AWS Glue to define a schema for tables which may have such conflicts. In the future we may enhance this method * to use a reasonable default (like String) and coerce heterogeneous fields to avoid query failure but forcing * explicit handling by defining Schema in AWS Glue is likely a better approach. */ public static Schema inferSchema(MongoClient client, TableName table, int numObjToSample) { MongoDatabase db = client.getDatabase(table.getSchemaName()); int docCount = 0; int fieldCount = 0; try (MongoCursor<Document> docs = db.getCollection(table.getTableName()).find().batchSize(numObjToSample) .maxScan(numObjToSample).limit(numObjToSample).iterator()) { if (!docs.hasNext()) { return SchemaBuilder.newBuilder().build(); } SchemaBuilder schemaBuilder = SchemaBuilder.newBuilder(); while (docs.hasNext()) { docCount++; Document doc = docs.next(); for (String key : doc.keySet()) { fieldCount++; Field newField = getArrowField(key, doc.get(key)); Types.MinorType newType = Types.getMinorTypeForArrowType(newField.getType()); Field curField = schemaBuilder.getField(key); Types.MinorType curType = (curField != null) ? Types.getMinorTypeForArrowType(curField.getType()) : null; if (curField == null) { schemaBuilder.addField(newField); } else if (newType != curType) { //TODO: currently we resolve fields with mixed types by defaulting to VARCHAR. This is _not_ ideal logger.warn("inferSchema: Encountered a mixed-type field[{}] {} vs {}, defaulting to String.", key, curType, newType); schemaBuilder.addStringField(key); } else if (curType == Types.MinorType.LIST) { schemaBuilder.addField(mergeListField(key, curField, newField)); } else if (curType == Types.MinorType.STRUCT) { schemaBuilder.addField(mergeStructField(key, curField, newField)); } } } Schema schema = schemaBuilder.build(); if (schema.getFields().isEmpty()) { throw new RuntimeException("No columns found after scanning " + fieldCount + " values across " + docCount + " documents. Please ensure the collection is not empty and contains at least 1 supported column type."); } return schema; } finally { logger.info("inferSchema: Evaluated {} field values across {} documents.", fieldCount, docCount); } }
Example 18
Source File: GameServer.java From Much-Assembly-Required with GNU General Public License v3.0 | 4 votes |
void load() { LogManager.LOGGER.info("Loading all data from MongoDB"); MongoDatabase db = mongo.getDatabase(config.getString("mongo_dbname")); MongoCollection<Document> worlds = db.getCollection("world"); MongoCollection<Document> server = db.getCollection("server"); Document whereQuery = new Document(); whereQuery.put("shouldUpdate", true); MongoCursor<Document> cursor = worlds.find(whereQuery).iterator(); GameUniverse universe = GameServer.INSTANCE.getGameUniverse(); while (cursor.hasNext()) { World w = World.deserialize(cursor.next()); universe.addWorld(w); } //Load users ArrayList<User> userList = userManager.getUsers(); for (User user : userList) { universe.addUser(user); } //Load server & plugin data cursor = server.find().iterator(); if (cursor.hasNext()) { Document serverObj = cursor.next(); gameUniverse.setTime((long) serverObj.get("time")); Document plugins = (Document) serverObj.get("plugins"); for (String pluginName : plugins.keySet()) { ServerPlugin plugin = pluginManager.getPluginByName(pluginName); plugin.load((Document) plugins.get(pluginName)); } } LogManager.LOGGER.info("Done loading! W:" + GameServer.INSTANCE.getGameUniverse().getWorldCount() + " | U:" + GameServer.INSTANCE.getGameUniverse().getUserCount()); }
Example 19
Source File: EntityDocumentConverter.java From rya with Apache License 2.0 | 4 votes |
@Override public Entity fromDocument(final Document document) throws DocumentConverterException { requireNonNull(document); // Preconditions. if(!document.containsKey(SUBJECT)) { throw new DocumentConverterException("Could not convert document '" + document + "' because its '" + SUBJECT + "' field is missing."); } if(!document.containsKey(EXPLICIT_TYPE_IDS)) { throw new DocumentConverterException("Could not convert document '" + document + "' because its '" + EXPLICIT_TYPE_IDS + "' field is missing."); } if(!document.containsKey(PROPERTIES)) { throw new DocumentConverterException("Could not convert document '" + document + "' because its '" + PROPERTIES + "' field is missing."); } if(!document.containsKey(VERSION)) { throw new DocumentConverterException("Could not convert document '" + document + "' because its '" + VERSION + "' field is missing."); } if(!document.containsKey(SMART_URI)) { throw new DocumentConverterException("Could not convert document '" + document + "' because its '" + SMART_URI + "' field is missing."); } // Perform the conversion. final Entity.Builder builder = Entity.builder() .setSubject( new RyaIRI(document.getString(SUBJECT)) ); final List<String> explicitTypeIds = document.getList(EXPLICIT_TYPE_IDS, String.class); explicitTypeIds.stream() .forEach(explicitTypeId -> builder.setExplicitType(new RyaIRI(explicitTypeId))); final Document propertiesDoc = (Document) document.get(PROPERTIES); for(final String typeId : propertiesDoc.keySet()) { final Document typePropertiesDoc = (Document) propertiesDoc.get(typeId); for(final String propertyName : typePropertiesDoc.keySet()) { final String decodedPropertyName = MongoDbSafeKey.decodeKey(propertyName); final Document value = (Document) typePropertiesDoc.get(propertyName); final RyaType propertyValue = ryaTypeConverter.fromDocument( value ); builder.setProperty(new RyaIRI(typeId), new Property(new RyaIRI(decodedPropertyName), propertyValue)); } } builder.setVersion( document.getInteger(VERSION) ); builder.setSmartUri( SimpleValueFactory.getInstance().createIRI(document.getString(SMART_URI)) ); return builder.build(); }
Example 20
Source File: MongoDBFactory.java From database-transform-tool with Apache License 2.0 | 4 votes |
/** * @decription 查询数据表字段名(key:字段名,value:字段类型名) * @author yi.zhang * @time 2017年6月30日 下午2:16:02 * @param table 表名 * @return */ public Map<String,String> queryColumns(String table){ try { if(session==null){ init(servers, database, schema, username, password); } MongoCollection<Document> collection = session.getCollection(table); if (collection == null) { return null; } Map<String,String> reflect = new HashMap<String,String>(); FindIterable<Document> documents = collection.find(); Document document = documents.first(); if(document==null){ return reflect; } for (String column : document.keySet()) { Object value = document.get(column); String type = "string"; if(value instanceof Integer){ type = "int"; } if(value instanceof Long){ type = "long"; } if(value instanceof Double){ type = "double"; } if(value instanceof Boolean){ type = "boolean"; } if(value instanceof Date){ type = "date"; } reflect.put(column, type); } return reflect; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }