com.mongodb.QueryBuilder Java Examples

The following examples show how to use com.mongodb.QueryBuilder. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MongoReader.java    From deep-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Create query partition.
 *
 * @param partition the partition
 * @return the dB object
 */
private DBObject createQueryPartition(MongoPartition partition) {

    QueryBuilder queryBuilderMin = QueryBuilder.start(partition.getKey());
    DBObject bsonObjectMin = queryBuilderMin.greaterThanEquals(partition.splitWrapper().getStartToken()).get();

    QueryBuilder queryBuilderMax = QueryBuilder.start(partition.getKey());
    DBObject bsonObjectMax = queryBuilderMax.lessThan(partition.splitWrapper().getEndToken()).get();

    QueryBuilder queryBuilder = QueryBuilder.start();
    if (partition.splitWrapper().getStartToken() != null) {
        queryBuilder.and(bsonObjectMin);
    }

    if (partition.splitWrapper().getEndToken() != null) {
        queryBuilder.and(bsonObjectMax);
    }

    LOG.debug("mongodb query "+queryBuilder.get());

    return queryBuilder.get();
}
 
Example #2
Source File: AbstractHistoricalMetricProvider.java    From scava with Eclipse Public License 2.0 5 votes vote down vote up
public List<Pongo> getHistoricalMeasurements(MetricProviderContext context, Project project, Date start, Date end) {
	
	DB db = context.getProjectDB(project);
	DBCollection collection = db.getCollection(this.getCollectionName());
	
	QueryBuilder builder = QueryBuilder.start();
	if (start != null) {
		builder.and("__datetime").greaterThanEquals(start.toJavaDate());
	}
	if (end != null) {
		builder.and("__datetime").lessThanEquals(end.toJavaDate());
	}
	 
	BasicDBObject query = (BasicDBObject) builder.get(); 

	Iterator<DBObject> it = collection.find(query).iterator();
	
	List<Pongo> pongoList = new ArrayList<Pongo>();
	
	while (it.hasNext()) {
		DBObject dbObject = it.next();
		pongoList.add(PongoFactory.getInstance().createPongo(dbObject));
	}
	
	return pongoList;
	
}
 
Example #3
Source File: ResourceFilter.java    From EDDI with Apache License 2.0 5 votes vote down vote up
private Document createQuery(QueryFilters[] allQueryFilters) {
    QueryBuilder retQuery = new QueryBuilder();

    for (QueryFilters queryFilters : allQueryFilters) {
        List<DBObject> dbObjects = new LinkedList<>();
        for (QueryFilter queryFilter : queryFilters.getQueryFilters()) {
            if (queryFilter.getFilter() instanceof String) {
                Pattern resourcePattern = getPatternForRegex((String) queryFilter.getFilter());
                dbObjects.add(new QueryBuilder().put(queryFilter.getField()).regex(resourcePattern).get());
            } else {
                dbObjects.add(new QueryBuilder().put(queryFilter.getField()).is(queryFilter.getFilter()).get());
            }
        }

        DBObject[] dbObjectArray = dbObjects.toArray(new DBObject[dbObjects.size()]);

        DBObject filterQuery;
        if (dbObjectArray.length > 0) {
            if (queryFilters.getConnectingType() == QueryFilters.ConnectingType.AND) {
                filterQuery = new QueryBuilder().and(dbObjectArray).get();
            } else {
                filterQuery = new QueryBuilder().or(dbObjectArray).get();
            }

            retQuery.and(filterQuery);
        }

    }

    return new Document(retQuery.get().toMap());
}
 
Example #4
Source File: MongoDeepJobConfig.java    From deep-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Filter query.
 *
 * @param filters the filters
 * @return the mongo deep job config
 */
public MongoDeepJobConfig<T> filterQuery(Filter[] filters) {

    if (filters.length > 0) {
        List<BasicDBObject> list = new ArrayList<>();

        QueryBuilder queryBuilder = QueryBuilder.start();
        for (int i = 0; i < filters.length; i++) {
            BasicDBObject bsonObject = new BasicDBObject();

            Filter filter = filters[i];
            if (filter.getFilterType().equals(FilterType.EQ)) {
                bsonObject.put(filter.getField(), filter.getValue());
            } else {
                bsonObject.put(filter.getField(),
                        new BasicDBObject("$".concat(filter.getFilterType().getFilterTypeId().toLowerCase()),
                                filter.getValue()));
            }

            list.add(bsonObject);
        }
        queryBuilder.and(list.toArray(new BasicDBObject[list.size()]));

        filterQuery(queryBuilder);
    }
    return this;

}
 
Example #5
Source File: RawMetricResource.java    From scava with Eclipse Public License 2.0 4 votes vote down vote up
public Representation doRepresent() {
	
	/**
	 * Fetch data metrics for both HistoricalMetricProvider & TransientMetricProvider
	 */
	String projectId = (String) getRequest().getAttributes().get("projectid");
	String metricId = (String) getRequest().getAttributes().get("metricid");
	
	String start = getQueryValue("startDate");
	String end = getQueryValue("endDate");
	
	QueryBuilder builder = QueryBuilder.start();
	try {
		if (start != null && start != "") {
			builder.and("__datetime").greaterThanEquals(new Date(start).toJavaDate());
		}
		if (end != null && end != "") {
			builder.and("__datetime").lessThanEquals(new Date(end).toJavaDate());
		}
	} catch (ParseException e) {
		e.getStackTrace();
	}
	
	BasicDBObject query = (BasicDBObject) builder.get(); 
	
	ArrayNode results = mapper.createArrayNode();
			
	if (projectId != null && metricId != null) {
		this.db = mongo.getDB(ANALYSIS_SCHEDULING_DATABASE);
		ProjectAnalysisResportory repository = new ProjectAnalysisResportory(this.db);
		Iterable<MetricExecution> listMetricExecutions = repository.getMetricExecutions().findByProjectId(projectId);
		
		List<String> metricExecutions = new ArrayList<>();
		for (MetricExecution metricExecution : listMetricExecutions) {
			metricExecutions.add(metricExecution.getMetricProviderId());
		}

		if (metricExecutions.contains(metricId)) {
			List<IMetricProvider> platformProvider = this.platform.getMetricProviderManager().getMetricProviders();
			for (IMetricProvider iMetricProvider : platformProvider) {
				if (iMetricProvider.getIdentifier().equals(metricId)) {
					Project project = platform.getProjectRepositoryManager().getProjectRepository().getProjects().findOneByShortName(projectId);
					if(iMetricProvider instanceof IHistoricalMetricProvider) {
						results.addAll(getHistoricDocuments(platform.getMetricsRepository(project).getDb().getCollection(((IHistoricalMetricProvider) iMetricProvider).getCollectionName()), query));
					} else if (iMetricProvider instanceof ITransientMetricProvider) {
						results.addAll(getTransientDocuments(((ITransientMetricProvider) iMetricProvider).adapt(platform.getMetricsRepository(project).getDb()).getPongoCollections()));
					}
					break;
				}
			}

		}
	
	}
	
	return Util.createJsonRepresentation(results);
}
 
Example #6
Source File: MetricVisualisationResource.java    From scava with Eclipse Public License 2.0 4 votes vote down vote up
public Representation doRepresent() {
		String projectName = (String) getRequest().getAttributes().get("projectid");
		String metricId = (String) getRequest().getAttributes().get("metricid");
		
		String agg = getQueryValue("agg");
		String start = getQueryValue("startDate");
		String end = getQueryValue("endDate");
		
		QueryBuilder builder = QueryBuilder.start();
		if (agg != null && agg != "") {
//			builder.... // TODO
		}
		try {
			if (start != null && start != "") {
				builder.and("__datetime").greaterThanEquals(new Date(start).toJavaDate());
			}
			if (end != null && end != "") {
				builder.and("__datetime").lessThanEquals(new Date(end).toJavaDate());
			}
		} catch (ParseException e) {
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "Invalid date. Format must be YYYYMMDD.");
		}
		
		BasicDBObject query = (BasicDBObject) builder.get(); 
		
		ProjectRepository projectRepo = platform.getProjectRepositoryManager().getProjectRepository();
		
		Project project = projectRepo.getProjects().findOneByShortName(projectName);
		if (project == null) {
			getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "No project was found with the requested name.");
		}
		
		MetricVisualisationExtensionPointManager manager = MetricVisualisationExtensionPointManager.getInstance();
		manager.getRegisteredVisualisations();

		MetricVisualisation vis = manager.findVisualisationById(metricId);
		
		if (vis == null) {
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "No visualiser found with specified ID.");
		}
		
		DB db = platform.getMetricsRepository(project).getDb();
		JsonNode visualisation = vis.visualise(db, query);
		
		StringRepresentation resp = new StringRepresentation(visualisation.toString());
		resp.setMediaType(MediaType.APPLICATION_JSON);
		return resp;
	}
 
Example #7
Source File: MongoNativeExtractor.java    From deep-spark with Apache License 2.0 4 votes vote down vote up
/**
 * Calculate splits.
 *
 * @param collection the collection
 * @return the deep partition [ ]
 */
private DeepPartition[] calculateSplits(DBCollection collection) {

    BasicDBList splitData = getSplitData(collection);
    List<ServerAddress> serverAddressList = collection.getDB().getMongo().getServerAddressList();

    if (splitData == null) {
        Pair<BasicDBList, List<ServerAddress>> pair = getSplitDataCollectionShardEnviroment(getShards(collection),
                collection.getDB().getName(),
                collection.getName());
        splitData = pair.left;
        serverAddressList = pair.right;
    }

    Object lastKey = null; // Lower boundary of the first min split

    List<String> stringHosts = new ArrayList<>();

    for (ServerAddress serverAddress : serverAddressList) {
        stringHosts.add(serverAddress.toString());
    }
    int i = 0;

    MongoPartition[] partitions = new MongoPartition[splitData.size() + 1];

    for (Object aSplitData : splitData) {

        BasicDBObject currentKey = (BasicDBObject) aSplitData;

        Object currentO = currentKey.get(MONGO_DEFAULT_ID);

        partitions[i] = new MongoPartition(mongoDeepJobConfig.getRddId(), i, new DeepTokenRange(lastKey,
                currentO, stringHosts), MONGO_DEFAULT_ID);

        lastKey = currentO;
        i++;
    }
    QueryBuilder queryBuilder = QueryBuilder.start(MONGO_DEFAULT_ID);
    queryBuilder.greaterThanEquals(lastKey);
    partitions[i] = new MongoPartition(0, i, new DeepTokenRange(lastKey, null, stringHosts), MONGO_DEFAULT_ID);
    return partitions;
}
 
Example #8
Source File: MongoReader.java    From deep-spark with Apache License 2.0 4 votes vote down vote up
/**
 * Generate filter query.
 *
 * @param partition the partition
 * @return the dB object
 */
private DBObject generateFilterQuery(MongoPartition partition) {

    if (mongoDeepJobConfig.getQuery() != null) {
        QueryBuilder queryBuilder = QueryBuilder.start();

        queryBuilder.and(createQueryPartition(partition), mongoDeepJobConfig.getQuery());

        LOG.debug("mongodb query "+queryBuilder.get());

        return queryBuilder.get();
    }

    return createQueryPartition(partition);

}
 
Example #9
Source File: MongoDeepJobConfig.java    From deep-spark with Apache License 2.0 4 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MongoDeepJobConfig<T> filterQuery(QueryBuilder query) {
    this.query = query.get();
    return this;
}
 
Example #10
Source File: IMongoDeepJobConfig.java    From deep-spark with Apache License 2.0 2 votes vote down vote up
/**
 * Filter query
 *
 * @param query
 * @return this object.
 */
S filterQuery(QueryBuilder query);