Java Code Examples for me.prettyprint.hector.api.query.QueryResult#get()
The following examples show how to use
me.prettyprint.hector.api.query.QueryResult#get() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OpsCiStateDao.java From oneops with Apache License 2.0 | 6 votes |
public Map<String,Long> getComponentStates(Long manifestId) { Map<String,Long> result = new HashMap<String,Long>(); SliceCounterQuery<Long, String> query = HFactory.createCounterSliceQuery(keyspace, longSerializer, stringSerializer); query.setKey(manifestId); query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF); query.setRange(null, null, false, 100); QueryResult<CounterSlice<String>> qResult = query.execute(); CounterSlice<String> row = qResult.get(); if (row != null && row.getColumns().size()>0) { for (HCounterColumn<String> col :row.getColumns()) { result.put(col.getName(), col.getValue()); } } return result; }
Example 2
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected void removeMailingList( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); if ( result.get().getCount() < 1 ) { return; } for ( Row<String, String, String> row : result.get() ) { this.mailingListTemplate.deleteRow( row.getKey() ); } }
Example 3
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected Map<String, String> getChecksums( String artifactMetadataKey ) { Map<String, String> checksums = new HashMap<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) // .setColumnNames( ARTIFACT_METADATA_MODEL_KEY, REPOSITORY_NAME.toString(), CHECKSUM_ALG.toString(), CHECKSUM_VALUE.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.checksumTemplate.queryColumns( row.getKey() ); checksums.put(columnFamilyResult.getString(CHECKSUM_ALG.toString()), columnFamilyResult.getString(CHECKSUM_VALUE.toString())); } return checksums; }
Example 4
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public void removeFacetFromArtifact( RepositorySession session, final String repositoryId, final String namespace, final String project, final String version, final MetadataFacet metadataFacet ) throws MetadataRepositoryException { RangeSlicesQuery<String, String, String> query = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) // .setColumnNames( NAMESPACE_ID.toString() ); // query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .addEqualsExpression( PROJECT.toString(), project ) // .addEqualsExpression( VERSION.toString(), version ); QueryResult<OrderedRows<String, String, String>> result = query.execute(); for ( Row<String, String, String> row : result.get() ) { this.artifactMetadataTemplate.deleteRow( row.getKey() ); } }
Example 5
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getProjects( RepositorySession session, final String repoId, final String namespace ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) // .setColumnNames( PROJECT_ID.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .execute(); final Set<String> projects = new HashSet<>( result.get( ).getCount( ) ); for ( Row<String, String, String> row : result.get() ) { projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) ); } return new ArrayList<>( projects ); }
Example 6
Source File: OpsCiStateDao.java From oneops with Apache License 2.0 | 6 votes |
public List<CiChangeStateEvent> getCiStateHistory(long ciId, Long startTime, Long endTime, Integer count) { if (count == null) count = 1000; List<CiChangeStateEvent> states = new ArrayList<CiChangeStateEvent>(); SliceQuery<Long, Long, String> sliceQuery = HFactory.createSliceQuery(keyspace, longSerializer, longSerializer, stringSerializer); sliceQuery.setColumnFamily(SchemaBuilder.CI_STATE_HIST_CF); sliceQuery.setRange(startTime, endTime, false, count); sliceQuery.setKey(ciId); QueryResult<ColumnSlice<Long, String>> result = sliceQuery.execute(); ColumnSlice<Long, String> resultCols = result.get(); for (HColumn<Long, String> col : resultCols.getColumns()) { CiChangeStateEvent event = gson.fromJson(col.getValue(), CiChangeStateEvent.class); states.add(event); } return states; }
Example 7
Source File: QueueManagerImpl.java From usergrid with Apache License 2.0 | 5 votes |
@Override public Message getMessage( UUID messageId ) { SliceQuery<UUID, String, ByteBuffer> q = createSliceQuery( cass.getApplicationKeyspace( applicationId ), ue, se, be ); q.setColumnFamily( MESSAGE_PROPERTIES.getColumnFamily() ); q.setKey( messageId ); q.setRange( null, null, false, ALL_COUNT ); QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute(); ColumnSlice<String, ByteBuffer> slice = r.get(); List<HColumn<String, ByteBuffer>> results = slice.getColumns(); return deserializeMessage( results ); }
Example 8
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected void removeDependencies( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( GROUP_ID.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.dependencyTemplate.deleteRow( row.getKey() ); } }
Example 9
Source File: CassandraService.java From usergrid with Apache License 2.0 | 5 votes |
public <N, V> ColumnSlice<N, V> getColumns( Keyspace ko, Object columnFamily, Object key, N[] columns, Serializer<N> nameSerializer, Serializer<V> valueSerializer ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "getColumn cf={} key={} column={}", columnFamily, key, columns ); } /* * ByteBuffer column_bytes = null; if (column instanceof List) { * column_bytes = Composite.serializeToByteBuffer((List<?>) column); } else * { column_bytes = bytebuffer(column); } */ SliceQuery<ByteBuffer, N, V> q = HFactory.createSliceQuery( ko, be, nameSerializer, valueSerializer ); QueryResult<ColumnSlice<N, V>> r = q.setKey( bytebuffer( key ) ).setColumnNames( columns ).setColumnFamily( columnFamily.toString() ) .execute(); ColumnSlice<N, V> result = r.get(); if ( db_logger.isTraceEnabled() ) { if ( result == null ) { db_logger.trace( "getColumn returned null" ); } } return result; }
Example 10
Source File: QueueManagerImpl.java From usergrid with Apache License 2.0 | 5 votes |
public Queue getQueue( String queuePath, UUID queueId ) { SliceQuery<UUID, String, ByteBuffer> q = createSliceQuery( cass.getApplicationKeyspace( applicationId ), ue, se, be ); q.setColumnFamily( QUEUE_PROPERTIES.getColumnFamily() ); q.setKey( queueId ); q.setRange( null, null, false, ALL_COUNT ); QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute(); ColumnSlice<String, ByteBuffer> slice = r.get(); List<HColumn<String, ByteBuffer>> results = slice.getColumns(); return deserializeQueue( results ); }
Example 11
Source File: CassandraService.java From usergrid with Apache License 2.0 | 5 votes |
/** * Gets the columns. * * @param ko the keyspace * @param columnFamily the column family * @param key the key * @param columnNames the column names * * @return columns * * @throws Exception the exception */ @SuppressWarnings("unchecked") public <N, V> List<HColumn<N, V>> getColumns( Keyspace ko, Object columnFamily, Object key, Set<String> columnNames, Serializer<N> nameSerializer, Serializer<V> valueSerializer ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "getColumns cf={} key={} names={}", columnFamily, key, columnNames ); } SliceQuery<ByteBuffer, N, V> q = createSliceQuery( ko, be, nameSerializer, valueSerializer ); q.setColumnFamily( columnFamily.toString() ); q.setKey( bytebuffer( key ) ); // q.setColumnNames(columnNames.toArray(new String[0])); q.setColumnNames( ( N[] ) nameSerializer.fromBytesSet( se.toBytesSet( new ArrayList<String>( columnNames ) ) ) .toArray() ); QueryResult<ColumnSlice<N, V>> r = q.execute(); ColumnSlice<N, V> slice = r.get(); List<HColumn<N, V>> results = slice.getColumns(); if ( db_logger.isTraceEnabled() ) { if ( results == null ) { db_logger.trace( "getColumns returned null" ); } else { db_logger.trace( "getColumns returned {} columns", results.size()); } } return results; }
Example 12
Source File: Cassandra12xMapDAO.java From cumulusrdf with Apache License 2.0 | 5 votes |
@Override public V get(final K key) throws DataAccessLayerException { final ColumnQuery<K, byte[], V> q = createColumnQuery(_keyspace, _serializer_k, BYTE_SERIALIZER, _serializer_v); final QueryResult<HColumn<byte[], V>> r = q.setKey(key).setName(COLUMN_NAME).setColumnFamily(_cf_name).execute(); final HColumn<byte[], V> c = r.get(); if (c == null) { return (_default_value != null) ? _default_value : null; } else { return c.getValue(); } }
Example 13
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected List<Dependency> getDependencies( String projectVersionMetadataKey ) { List<Dependency> dependencies = new ArrayList<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( "projectVersionMetadataModel.key" ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.dependencyTemplate.queryColumns( row.getKey() ); Dependency dependency = new Dependency(); dependency.setClassifier( columnFamilyResult.getString( "classifier" ) ); dependency.setOptional( Boolean.parseBoolean( columnFamilyResult.getString( "optional" ) ) ); dependency.setScope( columnFamilyResult.getString( "scope" ) ); dependency.setSystemPath( columnFamilyResult.getString( "systemPath" ) ); dependency.setType( columnFamilyResult.getString( "type" ) ); dependency.setArtifactId( columnFamilyResult.getString( ARTIFACT_ID.toString() ) ); dependency.setNamespace( columnFamilyResult.getString( GROUP_ID.toString() ) ); dependency.setVersion( columnFamilyResult.getString( VERSION.toString() ) ); dependencies.add( dependency ); } return dependencies; }
Example 14
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession session, final String repositoryId, final ZonedDateTime startTime, final ZonedDateTime endTime, QueryParameter queryParameter ) throws MetadataRepositoryException { LongSerializer ls = LongSerializer.get(); RangeSlicesQuery<String, String, Long> query = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ls ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) // .setColumnNames( ArtifactMetadataModel.COLUMNS ); // if ( startTime != null ) { query = query.addGteExpression( WHEN_GATHERED.toString(), startTime.toInstant().toEpochMilli() ); } if ( endTime != null ) { query = query.addLteExpression( WHEN_GATHERED.toString(), endTime.toInstant().toEpochMilli() ); } QueryResult<OrderedRows<String, String, Long>> result = query.execute(); List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() ); Iterator<Row<String, String, Long>> keyIter = result.get().iterator(); if (keyIter.hasNext()) { String key = keyIter.next().getKey(); for (Row<String, String, Long> row : result.get()) { ColumnSlice<String, Long> columnSlice = row.getColumnSlice(); String repositoryName = getAsStringValue(columnSlice, REPOSITORY_NAME.toString()); if (StringUtils.equals(repositoryName, repositoryId)) { artifactMetadatas.add(mapArtifactMetadataLongColumnSlice(key, columnSlice)); } } } return artifactMetadatas; }
Example 15
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 4 votes |
@Override public List<ArtifactMetadata> getArtifactsByAttribute( RepositorySession session, String key, String value, String repositoryId ) throws MetadataRepositoryException { RangeSlicesQuery<String, String, String> query = HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( MetadataFacetModel.COLUMNS ) // .addEqualsExpression( VALUE.toString(), value ); if ( key != null ) { query.addEqualsExpression( KEY.toString(), key ); // } if ( repositoryId != null ) { query.addEqualsExpression( "repositoryName", repositoryId ); } QueryResult<OrderedRows<String, String, String>> metadataFacetResult = query.execute(); if ( metadataFacetResult.get() == null || metadataFacetResult.get().getCount() < 1 ) { return Collections.emptyList(); } List<ArtifactMetadata> artifactMetadatas = new LinkedList<>( ); // TODO doing multiple queries, there should be a way to get all the artifactMetadatas for any number of // projects for ( Row<String, String, String> row : metadataFacetResult.get() ) { QueryResult<OrderedRows<String, String, String>> artifactMetadataResult = HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) // .setColumnNames( ArtifactMetadataModel.COLUMNS ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( REPOSITORY_NAME.toString(), getStringValue( row.getColumnSlice(), REPOSITORY_NAME ) ) // .addEqualsExpression( NAMESPACE_ID.toString(), getStringValue( row.getColumnSlice(), NAMESPACE_ID ) ) // .addEqualsExpression( PROJECT.toString(), getStringValue( row.getColumnSlice(), PROJECT_ID ) ) // .addEqualsExpression( PROJECT_VERSION.toString(), getStringValue( row.getColumnSlice(), PROJECT_VERSION ) ) // .execute(); if ( artifactMetadataResult.get() == null || artifactMetadataResult.get().getCount() < 1 ) { return Collections.emptyList(); } for ( Row<String, String, String> artifactMetadataRow : artifactMetadataResult.get() ) { String artifactKey = artifactMetadataRow.getKey(); artifactMetadatas.add( mapArtifactMetadataStringColumnSlice( artifactKey, artifactMetadataRow.getColumnSlice() ) ); } } return mapArtifactFacetToArtifact( metadataFacetResult, artifactMetadatas ); }
Example 16
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 4 votes |
/** * Lists the users in the user store. */ @Override protected String[] doListUsers(String filter, int maxItemLimit) throws UserStoreException { List<String> users = new ArrayList<String>(); int arrayLength = 0; if (maxItemLimit == 0) { return new String[0]; } int givenMax = UserCoreConstants.MAX_USER_ROLE_LIST; try { givenMax = Integer.parseInt(realmConfig .getUserStoreProperty(UserCoreConstants.RealmConfig.PROPERTY_MAX_USER_LIST)); } catch (Exception e) { givenMax = UserCoreConstants.MAX_USER_ROLE_LIST; if (log.isDebugEnabled()) { log.debug("Realm configuration maximum not set : Using User Core Constant value instead!", e); } } if (maxItemLimit < 0 || maxItemLimit > givenMax) { maxItemLimit = givenMax; } RangeSlicesQuery<String, String, String> rangeSliceQuery = HFactory.createRangeSlicesQuery(keyspace, stringSerializer, stringSerializer, stringSerializer); rangeSliceQuery.setColumnFamily(CFConstants.UM_USER); rangeSliceQuery.setRange(filter, null, false, Integer.MAX_VALUE); rangeSliceQuery.addEqualsExpression(CFConstants.UM_TENANT_ID, tenantIdString); // TODO - Need to check how to use the filter for range rangeSliceQuery.setKeys("", ""); rangeSliceQuery.setRowCount(maxItemLimit); QueryResult<OrderedRows<String, String, String>> result = rangeSliceQuery.execute(); if (result != null) { OrderedRows<String, String, String> rows = result.get(); if (rows.getCount() <= 0) { // reformatted to avoid nesting too many blocks return users.toArray(new String[arrayLength]); } arrayLength = rows.getCount(); Iterator<Row<String, String, String>> rowsIterator = rows.iterator(); while (rowsIterator.hasNext()) { Row<String, String, String> row = rowsIterator.next(); if (row.getColumnSlice().getColumnByName(CFConstants.UM_USER_ID).getValue() != null) { String name = row.getColumnSlice().getColumnByName(CFConstants.UM_USER_NAME).getValue(); // append the domain if exist name = UserCoreUtil.addDomainToName(name, domain); users.add(name); } } } return users.toArray(new String[arrayLength]); }
Example 17
Source File: CassandraService.java From usergrid with Apache License 2.0 | 4 votes |
/** * Gets the columns. * * @param ko the keyspace * @param columnFamily the column family * @param key the key * @param start the start * @param finish the finish * @param count the count * @param reversed the reversed * * @return columns * * @throws Exception the exception */ public List<HColumn<ByteBuffer, ByteBuffer>> getColumns( Keyspace ko, Object columnFamily, Object key, Object start, Object finish, int count, boolean reversed ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.debug( "getColumns cf=" + columnFamily + " key=" + key + " start=" + start + " finish=" + finish + " count=" + count + " reversed=" + reversed ); } SliceQuery<ByteBuffer, ByteBuffer, ByteBuffer> q = createSliceQuery( ko, be, be, be ); q.setColumnFamily( columnFamily.toString() ); q.setKey( bytebuffer( key ) ); ByteBuffer start_bytes = null; if ( start instanceof DynamicComposite ) { start_bytes = ( ( DynamicComposite ) start ).serialize(); } else if ( start instanceof List ) { start_bytes = DynamicComposite.toByteBuffer( ( List<?> ) start ); } else { start_bytes = bytebuffer( start ); } ByteBuffer finish_bytes = null; if ( finish instanceof DynamicComposite ) { finish_bytes = ( ( DynamicComposite ) finish ).serialize(); } else if ( finish instanceof List ) { finish_bytes = DynamicComposite.toByteBuffer( ( List<?> ) finish ); } else { finish_bytes = bytebuffer( finish ); } /* * if (reversed) { q.setRange(finish_bytes, start_bytes, reversed, count); } * else { q.setRange(start_bytes, finish_bytes, reversed, count); } */ q.setRange( start_bytes, finish_bytes, reversed, count ); QueryResult<ColumnSlice<ByteBuffer, ByteBuffer>> r = q.execute(); ColumnSlice<ByteBuffer, ByteBuffer> slice = r.get(); List<HColumn<ByteBuffer, ByteBuffer>> results = slice.getColumns(); if ( db_logger.isTraceEnabled() ) { if ( results == null ) { db_logger.trace("getColumns returned null"); } else { db_logger.trace("getColumns returned {} columns", results.size()); } } return results; }
Example 18
Source File: Cassandra12xMapDAO.java From cumulusrdf with Apache License 2.0 | 4 votes |
@Override public boolean contains(final K key) { final ColumnQuery<K, byte[], byte[]> q = createColumnQuery(_keyspace, _serializer_k, BYTE_SERIALIZER, BYTE_SERIALIZER); final QueryResult<HColumn<byte[], byte[]>> r = q.setKey(key).setName(COLUMN_NAME).setColumnFamily(_cf_name).execute(); return r.get() != null; }
Example 19
Source File: HectorPolicyManagerImpl.java From ck with Apache License 2.0 | 4 votes |
@Override public List<PolicyDAO> getAllPolicies() { List<NamedColumn<UUID, String, String>> list = CollectionUtils.list( Schema.POLICIES.SHORT_NAME, Schema.POLICIES.DESCRIPTION); RangeSlicesQuery<UUID, String, String> query = Schema.POLICIES.createRangeSlicesQuery(_keyspace, list); // TODO: may need paging of data once we have more than a few hundred. // This may need some sort of indexing since we're using RandomPartitioner, // in order to return them in a useful order. query.setRowCount(1000); // TODO: needed? // query.setKeys("fake_key_0", "fake_key_4"); QueryResult<OrderedRows<UUID, String, String>> result = query.execute(); OrderedRows<UUID, String, String> orderedRows = result.get(); if (orderedRows == null) { return Collections.emptyList(); } return Functional.filter(orderedRows.getList(), new Filter<Row<UUID, String, String>, PolicyDAO>() { @Override public PolicyDAO filter(Row<UUID, String, String> row) throws SkippedElementException { ColumnSlice<String, String> cs = row.getColumnSlice(); if (cs == null) { throw new SkippedElementException(); } String shortName; try { shortName = getNonNullStringColumn(cs, Schema.POLICIES.SHORT_NAME.getName()); } catch (NoSuchColumnException e) { // Tombstone row throw new SkippedElementException(); } String description = getStringColumnOrNull(cs, Schema.POLICIES.DESCRIPTION.getName()); // FIXME: can't get date from string result. // To fix this, we need variable-value-typed range slices queries. return new PolicyDAOImpl(new HectorPolicyIDImpl(row.getKey()), shortName, description, new Date()); } }); }
Example 20
Source File: Util.java From carbon-identity-framework with Apache License 2.0 | 4 votes |
public static String getExistingUserId(String credentialTypeName, String identifier, Keyspace keyspace) { identifier = createRowKeyForReverseLookup(identifier, credentialTypeName); ColumnQuery<String, String, String> usernameIndexQuery = HFactory.createColumnQuery(keyspace, stringSerializer, stringSerializer, stringSerializer); usernameIndexQuery.setColumnFamily(CFConstants.USERNAME_INDEX).setKey(identifier).setName(CFConstants.USER_ID); QueryResult<HColumn<String, String>> result = usernameIndexQuery.execute(); HColumn<String, String> userIdCol = result.get(); if (userIdCol == null) { return null; } return userIdCol.getValue(); }