com.mongodb.reactivestreams.client.MongoCollection Java Examples
The following examples show how to use
com.mongodb.reactivestreams.client.MongoCollection.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NewsServicePublisherTest.java From Hands-On-Reactive-Programming-in-Spring-5 with MIT License | 6 votes |
@Override public Publisher<NewsLetter> createPublisher(long elements) { MongoCollection<News> collection = mongoClient().getDatabase("news") .getCollection("news", News.class); int period = elements > 0 ? (int)(1000 / elements) : 1; prepareItemsInDatabase(elements); Publisher<NewsLetter> newsServicePublisher = new NewsServicePublisher(smp -> new ScheduledPublisher<>( () -> new NewsPreparationOperator( new DBPublisher( collection, "tech" ), "Some Digest" ), period == 0 ? 1 : period, TimeUnit.MILLISECONDS ).subscribe(smp) ); newsServicePublisher = Flowable.fromPublisher(newsServicePublisher) .take(elements); return newsServicePublisher; }
Example #2
Source File: NewsServicePublisherTest.java From Hands-On-Reactive-Programming-in-Spring-5 with MIT License | 6 votes |
@Override public Publisher<NewsLetter> createFailedPublisher() { MongoCollection<News> collection = mongoClient().getDatabase("news") .getCollection("news", News.class); WithEmbeddedMongo.tearDownMongo(); return new NewsServicePublisher(smp -> new ScheduledPublisher<>( () -> new NewsPreparationOperator( new DBPublisher( collection, "tech" ), "Some Digest" ), 1, TimeUnit.MILLISECONDS ).subscribe(smp) ); }
Example #3
Source File: NewsServicePublisherTest.java From Hands-On-Reactive-Programming-in-Spring-5 with MIT License | 6 votes |
private void prepareItemsInDatabase(long elements) { if (elements <= 0) { return; } MongoCollection<News> collection = mongoClient().getDatabase("news") .getCollection("news", News.class); Flowable<Success> successFlowable = Flowable.fromPublisher(collection.drop()) .ignoreElements() .andThen(Flowable.rangeLong(0L, elements) .map(l -> NewsHarness.generate()) .buffer(500, TimeUnit.MILLISECONDS) .flatMap(collection::insertMany)); if (elements == Long.MAX_VALUE || elements == Integer.MAX_VALUE) { successFlowable.subscribe(); } else { successFlowable.blockingSubscribe(); } }
Example #4
Source File: IdAttributeTest.java From immutables with Apache License 2.0 | 6 votes |
/** * Test that {@code _id} attribute is persisted instead of {@code id} */ @Test void idAttribute() { StringHolderRepository repository = new StringHolderRepository(resource.backend()); ImmutableStringHolder holder = TypeHolder.StringHolder.generator().get().withId("id1"); repository.insertAll(Arrays.asList(holder, holder.withId("id2"))); MongoCollection<BsonDocument> collection = resource.collection(TypeHolder.StringHolder.class) .withDocumentClass(BsonDocument.class); List<BsonDocument> docs = Flowable.fromPublisher(collection.find()).toList().blockingGet(); Checkers.check(docs).hasSize(2); // has _id attribute Checkers.check(docs.stream().map(BsonDocument::keySet).flatMap(Collection::stream).collect(Collectors.toSet())).has("_id"); // does not have 'id' attribute only '_id' (with underscore which is mongo specific) in collected documents Checkers.check(docs.stream().map(BsonDocument::keySet).flatMap(Collection::stream).collect(Collectors.toSet())).not().has("id"); Checkers.check(docs.stream().map(d -> d.get("_id").asString().getValue()).collect(Collectors.toList())).hasContentInAnyOrder("id1", "id2"); // using repository Checkers.check(repository.findAll().fetch().stream().map(TypeHolder.StringHolder::id).collect(Collectors.toList())).hasContentInAnyOrder("id1", "id2"); }
Example #5
Source File: MongoTimestampPersistence.java From ditto with Eclipse Public License 2.0 | 6 votes |
/** * Creates the capped collection {@code collectionName} using {@code clientWrapper} if it doesn't exists yet. * * @param database The database to use. * @param collectionName The name of the capped collection that should be created. * @param cappedCollectionSizeInBytes The size in bytes of the collection that should be created. * @param materializer The actor materializer to pre-materialize the restart source. * @return Returns the created or retrieved collection. */ private static Source<MongoCollection, NotUsed> createOrGetCappedCollection( final MongoDatabase database, final String collectionName, final long cappedCollectionSizeInBytes, final ActorMaterializer materializer) { final Source<Success, NotUsed> createCollectionSource = repeatableCreateCappedCollectionSource(database, collectionName, cappedCollectionSizeInBytes); final Source<MongoCollection, NotUsed> infiniteCollectionSource = createCollectionSource.map(success -> database.getCollection(collectionName)) .flatMapConcat(Source::repeat); final Source<MongoCollection, NotUsed> restartSource = RestartSource.withBackoff(BACKOFF_MIN, BACKOFF_MAX, 1.0, () -> infiniteCollectionSource); // pre-materialize source with BroadcastHub so that a successfully obtained capped collection is reused // until the stream fails, whereupon it gets recreated with backoff. return restartSource.runWith(BroadcastHub.of(MongoCollection.class, 1), materializer); }
Example #6
Source File: MongoOpsUtil.java From ditto with Eclipse Public License 2.0 | 6 votes |
private static Source<Optional<Throwable>, NotUsed> doDeleteByFilter(final MongoCollection<Document> collection, final Bson filter) { // https://stackoverflow.com/a/33164008 // claims unordered bulk ops halve MongoDB load final List<WriteModel<Document>> writeModel = Collections.singletonList(new DeleteManyModel<>(filter)); final BulkWriteOptions options = new BulkWriteOptions().ordered(false); return Source.fromPublisher(collection.bulkWrite(writeModel, options)) .map(result -> { if (LOGGER.isDebugEnabled()) { // in contrast to Bson, BsonDocument has meaningful toString() final BsonDocument filterBsonDoc = BsonUtil.toBsonDocument(filter); LOGGER.debug("Deleted <{}> documents from collection <{}>. Filter was <{}>.", result.getDeletedCount(), collection.getNamespace(), filterBsonDoc); } return Optional.<Throwable>empty(); }) .recoverWithRetries(RETRY_ATTEMPTS, new PFBuilder<Throwable, Source<Optional<Throwable>, NotUsed>>() .matchAny(throwable -> Source.single(Optional.of(throwable))) .build()); }
Example #7
Source File: ReactivePersonRepositoryIntegrationTest.java From spring-data-examples with Apache License 2.0 | 6 votes |
@Before public void setUp() { Mono<MongoCollection<Document>> recreateCollection = operations.collectionExists(Person.class) // .flatMap(exists -> exists ? operations.dropCollection(Person.class) : Mono.just(exists)) // .then(operations.createCollection(Person.class, CollectionOptions.empty() // .size(1024 * 1024) // .maxDocuments(100) // .capped())); StepVerifier.create(recreateCollection).expectNextCount(1).verifyComplete(); Flux<Person> insertAll = operations.insertAll(Flux.just(new Person("Walter", "White", 50), // new Person("Skyler", "White", 45), // new Person("Saul", "Goodman", 42), // new Person("Jesse", "Pinkman", 27)).collectList()); StepVerifier.create(insertAll).expectNextCount(4).verifyComplete(); }
Example #8
Source File: RxJava2PersonRepositoryIntegrationTest.java From spring-data-examples with Apache License 2.0 | 6 votes |
@Before public void setUp() { Mono<MongoCollection<Document>> recreateCollection = operations.collectionExists(Person.class) // .flatMap(exists -> exists ? operations.dropCollection(Person.class) : Mono.just(exists)) // .then(operations.createCollection(Person.class, CollectionOptions.empty() // .size(1024 * 1024) // .maxDocuments(100) // .capped())); StepVerifier.create(recreateCollection).expectNextCount(1).verifyComplete(); repository.saveAll(Flowable.just(new Person("Walter", "White", 50), // new Person("Skyler", "White", 45), // new Person("Saul", "Goodman", 42), // new Person("Jesse", "Pinkman", 27))) // .test() // .awaitCount(4) // .assertNoErrors() // .awaitTerminalEvent(); }
Example #9
Source File: MongoSourceTest.java From pulsar with Apache License 2.0 | 6 votes |
@BeforeMethod public void setUp() { map = TestHelper.createMap(true); mockSourceContext = mock(SourceContext.class); mockMongoClient = mock(MongoClient.class); mockMongoDb = mock(MongoDatabase.class); mockMongoColl = mock(MongoCollection.class); mockPublisher = mock(ChangeStreamPublisher.class); source = new MongoSource(() -> mockMongoClient); when(mockMongoClient.getDatabase(anyString())).thenReturn(mockMongoDb); when(mockMongoDb.getCollection(anyString())).thenReturn(mockMongoColl); when(mockMongoColl.watch()).thenReturn(mockPublisher); when(mockPublisher.batchSize(anyInt())).thenReturn(mockPublisher); when(mockPublisher.fullDocument(any())).thenReturn(mockPublisher); doAnswer((invocation) -> { subscriber = invocation.getArgument(0, Subscriber.class); return null; }).when(mockPublisher).subscribe(any()); }
Example #10
Source File: MongoSinkTest.java From pulsar with Apache License 2.0 | 6 votes |
@BeforeMethod public void setUp() { map = TestHelper.createMap(true); mockRecord = mock(Record.class); mockSinkContext = mock(SinkContext.class); mockMongoClient = mock(MongoClient.class); mockMongoDb = mock(MongoDatabase.class); mockMongoColl = mock(MongoCollection.class); mockPublisher = mock(Publisher.class); sink = new MongoSink(() -> mockMongoClient); when(mockMongoClient.getDatabase(anyString())).thenReturn(mockMongoDb); when(mockMongoDb.getCollection(anyString())).thenReturn(mockMongoColl); when(mockMongoDb.getCollection(anyString()).insertMany(any())).thenReturn(mockPublisher); }
Example #11
Source File: ReactiveAggregateMergeTest.java From mongodb-aggregate-query-support with Apache License 2.0 | 6 votes |
@Test public void mustNotOverwriteExistingDocuments() throws JsonProcessingException { String employeeCollection = RandomStringUtils.randomAlphabetic(10); String orgArchiveColl = RandomStringUtils.randomAlphabetic(10); List<Employee> originalEmployees = addEmployeeDocuments(employeeCollection, EMPLOYEE_DOCS); MongoCollection<Document> employeeColl = mongoTemplate.getCollection(employeeCollection); validateCount(employeeCollection, originalEmployees.size()); List<OrgArchiveEntry> orgArchiveEntries = addOrgArchiveEntries(orgArchiveColl, ORG_ARCHIVE_DOCS); MongoCollection<Document> orgArchiveCollection = mongoTemplate.getCollection(orgArchiveColl); validateCount(orgArchiveColl, orgArchiveEntries.size()); Document index = Document.parse("{'fiscalYear': 1, 'dept': 1}"); IndexDefinition def = new CompoundIndexDefinition(index).unique(); mongoTemplate.indexOps(orgArchiveColl).ensureIndex(def).block(); zooEmployeeRepository.updateOrgArchiveInsertOnly(employeeCollection, orgArchiveColl).block(); validateCount(orgArchiveColl, orgArchiveEntries.size() + 2); Query query = new Query(Criteria.where("fiscalYear").is(2019)); List<OrgArchiveEntry> newArchiveEntries = mongoTemplate.find(query, OrgArchiveEntry.class, orgArchiveColl) .collectList().block(); assertNotNull(newArchiveEntries); assertEquals(newArchiveEntries.size(), 2); }
Example #12
Source File: MongoTimestampPersistenceIT.java From ditto with Eclipse Public License 2.0 | 5 votes |
@Test public void ensureCollectionIsCapped() throws Exception { final MongoCollection<Document> collection = syncPersistence.getCollection().runWith(Sink.head(), materializer).toCompletableFuture().get(); runBlocking(syncPersistence.setTimestamp(Instant.now())); runBlocking(syncPersistence.setTimestamp(Instant.now())); assertThat(runBlocking(Source.fromPublisher(collection.count()))).containsExactly(1L); }
Example #13
Source File: BackendResource.java From immutables with Apache License 2.0 | 5 votes |
@Override public MongoCollection<?> resolve(Class<?> entityClass) { final String name = ContainerNaming.DEFAULT.name(entityClass); final MongoCollection<?> collection; // already exists ? if (!Flowable.fromPublisher(database.listCollectionNames()).toList().blockingGet().contains(name)) { Success success = Flowable.fromPublisher(database.createCollection(name)).blockingFirst(); } return database.getCollection(name).withDocumentClass(entityClass).withCodecRegistry(registry); }
Example #14
Source File: MongoReadJournal.java From ditto with Eclipse Public License 2.0 | 5 votes |
private Source<List<Document>, NotUsed> listNewestSnapshots(final MongoCollection<Document> snapshotStore, final String lowerBound, final int batchSize, final ActorMaterializer mat, final String... snapshotFields) { return this.unfoldBatchedSource(lowerBound, mat, SnapshotBatch::getMaxPid, actualStart -> listNewestActiveSnapshotsByBatch(snapshotStore, actualStart, batchSize, snapshotFields)) .mapConcat(x -> x) .map(SnapshotBatch::getItems); }
Example #15
Source File: MongoNamespacePersistenceOperations.java From ditto with Eclipse Public License 2.0 | 5 votes |
private Source<List<Throwable>, NotUsed> purge(final MongoPersistenceOperationsSelection selection) { final MongoCollection<Document> collection = db.getCollection(selection.getCollectionName()); if (selection.isEntireCollection()) { return MongoOpsUtil.drop(collection); } else { return MongoOpsUtil.deleteByFilter(collection, selection.getFilter()); } }
Example #16
Source File: MongoOpsUtil.java From ditto with Eclipse Public License 2.0 | 5 votes |
private static Source<Optional<Throwable>, NotUsed> doDrop(final MongoCollection<Document> collection) { return Source.fromPublisher(collection.drop()) .map(result -> { LOGGER.debug("Successfully dropped collection <{}>.", collection.getNamespace()); return Optional.<Throwable>empty(); }) .recoverWithRetries(RETRY_ATTEMPTS, new PFBuilder<Throwable, Source<Optional<Throwable>, NotUsed>>() .matchAny(throwable -> Source.single(Optional.of(throwable))) .build()); }
Example #17
Source File: MongoReadJournal.java From ditto with Eclipse Public License 2.0 | 5 votes |
private Source<List<String>, NotUsed> listPidsInJournal(final MongoCollection<Document> journal, final String lowerBound, final int batchSize, final ActorMaterializer mat, final Duration maxBackOff, final int maxRestarts) { return unfoldBatchedSource(lowerBound, mat, Function.identity(), actualStart -> listJournalPidsAbove(journal, actualStart, batchSize, maxBackOff, maxRestarts) ); }
Example #18
Source File: MongoTimestampPersistence.java From ditto with Eclipse Public License 2.0 | 5 votes |
/** * Creates a new initialized instance. * * @param collectionName The name of the collection. * @param mongoClient the client wrapper holding the connection information. * @param materializer an actor materializer to materialize the restart-source of the timestamp collection. * @return a new initialized instance. */ public static MongoTimestampPersistence initializedInstance(final String collectionName, final DittoMongoClient mongoClient, final ActorMaterializer materializer) { final Source<MongoCollection, NotUsed> collectionSource = createOrGetCappedCollection(mongoClient.getDefaultDatabase(), collectionName, MIN_CAPPED_COLLECTION_SIZE_IN_BYTES, materializer); return new MongoTimestampPersistence(collectionSource); }
Example #19
Source File: MongoThingsSearchPersistence.java From ditto with Eclipse Public License 2.0 | 5 votes |
private MongoThingsSearchPersistence( final MongoCollection<Document> collection, final LoggingAdapter log, final IndexInitializer indexInitializer, final Duration maxQueryTime, final MongoHints hints) { this.collection = collection; this.log = log; this.indexInitializer = indexInitializer; this.maxQueryTime = maxQueryTime; this.hints = hints; }
Example #20
Source File: MongoAuthenticationProvider.java From graviteeio-access-management with Apache License 2.0 | 5 votes |
private Maybe<Document> findUserByUsername(String username) { MongoCollection<Document> usersCol = this.mongoClient.getDatabase(this.configuration.getDatabase()).getCollection(this.configuration.getUsersCollection()); String rawQuery = this.configuration.getFindUserByUsernameQuery().replaceAll("\\?", username); String jsonQuery = convertToJsonString(rawQuery); BsonDocument query = BsonDocument.parse(jsonQuery); return Observable.fromPublisher(usersCol.find(query).first()).firstElement(); }
Example #21
Source File: MongoAuthenticationProviderTestConfiguration.java From graviteeio-access-management with Apache License 2.0 | 5 votes |
@Override public void afterPropertiesSet() throws Exception { Observable.fromPublisher(mongoDatabase.createCollection("users")).blockingFirst(); MongoCollection<Document> collection = mongoDatabase.getCollection("users"); Document doc = new Document("username", "bob").append("password", "bobspassword"); Observable.fromPublisher(collection.insertOne(doc)).blockingFirst(); }
Example #22
Source File: ReactiveMongoNativeJavaDriverQueryExecutor.java From mongodb-aggregate-query-support with Apache License 2.0 | 5 votes |
@Override public Object executeQuery(QueryProvider queryProvider) { // convert the pipelines by parsing the JSON strings Iterator iterator = queryProvider.getPipelines().iterator(); int i = 0; String collectionName = queryProvider.getCollectionName(); List<Bson> pipelineStages = new ArrayList<>(); while (iterator.hasNext()) { String query = (String) iterator.next(); LOGGER.trace("Processing query string {} for pipeline stage {}", query, i++); Bson document = BsonDocument.parse(query); pipelineStages.add(document); } // run the pipeline and return a flux. MongoCollection<Document> collection = mongoOperations.getCollection(collectionName); AggregatePublisher<Document> aggregatePublisher = collection.aggregate(pipelineStages) .allowDiskUse(queryProvider.isAllowDiskUse()) .maxTime(queryProvider.getMaxTimeMS(), MILLISECONDS); Class methodReturnType = queryProvider.getMethodReturnType(); boolean isFlux = Flux.class.isAssignableFrom(methodReturnType); boolean isMono = Mono.class.isAssignableFrom(methodReturnType); boolean isFluxOrMono = isFlux || isMono; if (!isFluxOrMono) { throw new IllegalArgumentException("Method return type must be of Flux or Mono type"); } Class<?> outputClass = queryProvider.getOutputClass(); if (isFlux) { LOGGER.trace("Return type is Flux<{}>", outputClass); Flux<Document> retval = Flux.from(aggregatePublisher); if (outputClass != null) { return adaptPipeline(queryProvider, outputClass, retval); } return retval; } else { Mono<Document> mono = Mono.from(aggregatePublisher); if (outputClass != null) { LOGGER.trace("Return type is Mono<{}>", outputClass); return adaptPipeline(queryProvider, outputClass, mono); } return mono; } }
Example #23
Source File: MongoSession.java From immutables with Apache License 2.0 | 5 votes |
MongoSession(MongoCollection<?> collection, KeyExtractor keyExtractor) { this.collection = Objects.requireNonNull(collection, "collection"); this.keyExtractor = Objects.requireNonNull(keyExtractor, "keyExtractor"); PathNaming pathNaming = PathNaming.defaultNaming(); KeyExtractor.KeyMetadata metadata = keyExtractor.metadata(); if (metadata.isKeyDefined() && metadata.isExpression() && metadata.keys().size() == 1) { Path idProperty = Visitors.toPath(Iterables.getOnlyElement(metadata.keys())); pathNaming = new MongoPathNaming(idProperty, pathNaming); } this.pathNaming = pathNaming; this.converter = Mongos.converter(this.pathNaming, collection.getCodecRegistry()); }
Example #24
Source File: MongoSession.java From immutables with Apache License 2.0 | 5 votes |
/** * Uses <a href="https://docs.mongodb.com/manual/reference/method/db.collection.replaceOne/">replaceOne</a> operation * with <a href="https://docs.mongodb.com/manual/reference/method/db.collection.bulkWrite/">bulkWrite</a>. Right now has to convert * object to BsonDocument to extract {@code _id} attribute. */ private <T> Publisher<WriteResult> update(StandardOperations.Update operation) { ReplaceOptions options = new ReplaceOptions(); if (operation.upsert()) { options.upsert(operation.upsert()); } List<ReplaceOneModel<Object>> docs = operation.values().stream() .map(value -> new ReplaceOneModel<>(new BsonDocument(Mongos.ID_FIELD_NAME, toBsonValue(keyExtractor.extract(value))), value, options)) .collect(Collectors.toList()); Publisher<BulkWriteResult> publisher = ((MongoCollection<Object>) collection).bulkWrite(docs); return Flowable.fromPublisher(publisher).map(x -> WriteResult.unknown()); }
Example #25
Source File: MongoSession.java From immutables with Apache License 2.0 | 5 votes |
private <X> Publisher<X> watch(StandardOperations.Watch operation) { final MongoCollection<X> collection = (MongoCollection<X>) this.collection; final Bson filter = new Document("fullDocument", toBsonFilter(operation.query())); return Flowable.fromPublisher(collection.watch(Collections.singletonList(filter)) .fullDocument(FullDocument.UPDATE_LOOKUP) .withDocumentClass(collection.getDocumentClass())); }
Example #26
Source File: AbstractThingSearchPersistenceITBase.java From ditto with Eclipse Public License 2.0 | 5 votes |
private void dropCollectionWithBackoff(final MongoCollection<Document> collection) { RuntimeException lastException = null; for (int i = 0; i < 20; ++i) { try { waitFor(Source.fromPublisher(collection.drop())); return; } catch (final RuntimeException e) { lastException = e; backoff(); } } throw lastException; }
Example #27
Source File: ReactiveStreamsMongoLockProviderIntegrationTest.java From ShedLock with Apache License 2.0 | 4 votes |
private MongoCollection<Document> getLockCollection() { return mongo.getDatabase(DB_NAME).getCollection(DEFAULT_SHEDLOCK_COLLECTION_NAME); }
Example #28
Source File: ReactiveStreamsMongoLockProvider.java From ShedLock with Apache License 2.0 | 4 votes |
private MongoCollection<Document> getCollection() { return collection; }
Example #29
Source File: MongoEntitiesPersistenceOperations.java From ditto with Eclipse Public License 2.0 | 4 votes |
private Source<List<Throwable>, NotUsed> purge(final MongoPersistenceOperationsSelection selection) { final MongoCollection<Document> collection = db.getCollection(selection.getCollectionName()); return MongoOpsUtil.deleteByFilter(collection, selection.getFilter()); }
Example #30
Source File: QuickTourAdmin.java From mongo-java-driver-reactivestreams with Apache License 2.0 | 4 votes |
/** * Run this main method to see the output of this quick example. * * @param args takes an optional single argument for the connection string * @throws Throwable if an operation fails */ public static void main(final String[] args) throws Throwable { MongoClient mongoClient; if (args.length == 0) { // connect to the local database server mongoClient = MongoClients.create(); } else { mongoClient = MongoClients.create(args[0]); } // get handle to "mydb" database MongoDatabase database = mongoClient.getDatabase("mydb"); // get a handle to the "test" collection MongoCollection<Document> collection = database.getCollection("test"); ObservableSubscriber subscriber = new ObservableSubscriber<Success>(); collection.drop().subscribe(subscriber); subscriber.await(); // getting a list of databases mongoClient.listDatabaseNames().subscribe(new PrintSubscriber<String>("Database Names: %s")); // drop a database subscriber = new ObservableSubscriber<Success>(); mongoClient.getDatabase("databaseToBeDropped").drop().subscribe(subscriber); subscriber.await(); // create a collection database.createCollection("cappedCollection", new CreateCollectionOptions().capped(true).sizeInBytes(0x100000)) .subscribe(new PrintSubscriber<Success>("Creation Created!")); database.listCollectionNames().subscribe(new PrintSubscriber<String>("Collection Names: %s")); // drop a collection: subscriber = new ObservableSubscriber<Success>(); collection.drop().subscribe(subscriber); subscriber.await(); // create an ascending index on the "i" field collection.createIndex(new Document("i", 1)).subscribe(new PrintSubscriber<String>("Created an index named: %s")); // list the indexes on the collection collection.listIndexes().subscribe(new PrintDocumentSubscriber()); // create a text index on the "content" field subscriber = new PrintSubscriber<String>("Created an index named: %s"); collection.createIndex(new Document("content", "text")).subscribe(subscriber); subscriber.await(); subscriber = new OperationSubscriber(); collection.insertMany(asList(new Document("_id", 0).append("content", "textual content"), new Document("_id", 1).append("content", "additional content"), new Document("_id", 2).append("content", "irrelevant content"))).subscribe(subscriber); subscriber.await(); // Find using the text index subscriber = new PrintSubscriber("Text search matches: %s"); collection.countDocuments(text("textual content -irrelevant")).subscribe(subscriber); subscriber.await(); // Find using the $language operator subscriber = new PrintSubscriber("Text search matches (english): %s"); Bson textSearch = text("textual content -irrelevant", new TextSearchOptions().language("english")); collection.countDocuments(textSearch).subscribe(subscriber); subscriber.await(); // Find the highest scoring match System.out.print("Highest scoring document: "); Document projection = new Document("score", new Document("$meta", "textScore")); collection.find(textSearch).projection(projection).first().subscribe(new PrintDocumentSubscriber()); // Run a command database.runCommand(new Document("buildInfo", 1)).subscribe(new PrintDocumentSubscriber()); // release resources subscriber = new OperationSubscriber(); database.drop().subscribe(subscriber); subscriber.await(); mongoClient.close(); }