me.prettyprint.hector.api.beans.Row Java Examples
The following examples show how to use
me.prettyprint.hector.api.beans.Row.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractSearch.java From usergrid with Apache License 2.0 | 6 votes |
/** Load the messages into an array list */ protected List<Message> loadMessages( Collection<UUID> messageIds, boolean reversed ) { Rows<UUID, String, ByteBuffer> messageResults = createMultigetSliceQuery( ko, ue, se, be ).setColumnFamily( MESSAGE_PROPERTIES.getColumnFamily() ) .setKeys( messageIds ) .setRange( null, null, false, ALL_COUNT ).execute().get(); List<Message> messages = new ArrayList<Message>( messageIds.size() ); for ( Row<UUID, String, ByteBuffer> row : messageResults ) { Message message = deserializeMessage( row.getColumnSlice().getColumns() ); if ( message != null ) { messages.add( message ); } } Collections.sort( messages, new RequestedOrderComparator( messageIds ) ); return messages; }
Example #2
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getMetadataFacets( RepositorySession session, final String repositoryId, final String facetId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .execute(); final List<String> facets = new ArrayList<>(); for ( Row<String, String, String> row : result.get() ) { facets.add( getStringValue( row.getColumnSlice(), NAME.toString() ) ); } return facets; }
Example #3
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getRootNamespaces( RepositorySession session, final String repoId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); Set<String> namespaces = new HashSet<>( result.get( ).getCount( ) ); for ( Row<String, String, String> row : result.get() ) { namespaces.add( StringUtils.substringBefore( getStringValue( row.getColumnSlice(), NAME.toString() ), "." ) ); } return new ArrayList<>( namespaces ); }
Example #4
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public Stream<ArtifactMetadata> getArtifactStream( final RepositorySession session, final String repositoryId, final QueryParameter queryParameter ) throws MetadataResolutionException { RangeSlicesQuery<String, String, String> query = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName( ) ) // .setColumnNames( ArtifactMetadataModel.COLUMNS ); // query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ); QueryResult<OrderedRows<String, String, String>> result = query.execute(); try { return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row, ArtifactMetadata last ) -> mapArtifactMetadataStringColumnSlice( row.getKey( ), row.getColumnSlice( ) ) ), false ) .skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) ); } catch ( MetadataRepositoryException e ) { throw new MetadataResolutionException( e.getMessage( ), e ); } }
Example #5
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected List<String> getNamespaces( final String repoId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); List<String> namespaces = new ArrayList<>( result.get().getCount() ); for ( Row<String, String, String> row : result.get() ) { namespaces.add( getStringValue( row.getColumnSlice(), NAME.toString() ) ); } return namespaces; }
Example #6
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getProjects( RepositorySession session, final String repoId, final String namespace ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) // .setColumnNames( PROJECT_ID.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .execute(); final Set<String> projects = new HashSet<>( result.get( ).getCount( ) ); for ( Row<String, String, String> row : result.get() ) { projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) ); } return new ArrayList<>( projects ); }
Example #7
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public void removeMetadataFacets( RepositorySession session, final String repositoryId, final String facetId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } }
Example #8
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected void removeChecksums( String artifactMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) // .setColumnNames( CHECKSUM_ALG.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey ) // .execute(); if ( result.get().getCount() < 1 ) { return; } for ( Row<String, String, String> row : result.get() ) { this.checksumTemplate.deleteRow( row.getKey() ); } }
Example #9
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected Map<String, String> getChecksums( String artifactMetadataKey ) { Map<String, String> checksums = new HashMap<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) // .setColumnNames( ARTIFACT_METADATA_MODEL_KEY, REPOSITORY_NAME.toString(), CHECKSUM_ALG.toString(), CHECKSUM_VALUE.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.checksumTemplate.queryColumns( row.getKey() ); checksums.put(columnFamilyResult.getString(CHECKSUM_ALG.toString()), columnFamilyResult.getString(CHECKSUM_VALUE.toString())); } return checksums; }
Example #10
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected void removeMailingList( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); if ( result.get().getCount() < 1 ) { return; } for ( Row<String, String, String> row : result.get() ) { this.mailingListTemplate.deleteRow( row.getKey() ); } }
Example #11
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public void removeFacetFromArtifact( RepositorySession session, final String repositoryId, final String namespace, final String project, final String version, final MetadataFacet metadataFacet ) throws MetadataRepositoryException { RangeSlicesQuery<String, String, String> query = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) // .setColumnNames( NAMESPACE_ID.toString() ); // query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .addEqualsExpression( PROJECT.toString(), project ) // .addEqualsExpression( VERSION.toString(), version ); QueryResult<OrderedRows<String, String, String>> result = query.execute(); for ( Row<String, String, String> row : result.get() ) { this.artifactMetadataTemplate.deleteRow( row.getKey() ); } }
Example #12
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getArtifactVersions( RepositorySession session, final String repoId, final String namespace, final String projectId, final String projectVersion ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) // .setColumnNames( VERSION.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .addEqualsExpression( PROJECT_ID.toString(), projectId ) // .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) // .execute(); final Set<String> versions = new HashSet<>(); for ( Row<String, String, String> row : result.get() ) { versions.add( getStringValue( row.getColumnSlice(), VERSION.toString() ) ); } return new ArrayList<>( versions ); }
Example #13
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected List<License> getLicenses( String projectVersionMetadataKey ) { List<License> licenses = new ArrayList<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) // .setColumnNames( "projectVersionMetadataModel.key" ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.licenseTemplate.queryColumns( row.getKey() ); licenses.add( new License( columnFamilyResult.getString( NAME.toString() ), columnFamilyResult.getString( URL.toString() ) ) ); } return licenses; }
Example #14
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public void removeMetadataFacet( RepositorySession session, final String repositoryId, final String facetId, final String name ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .addEqualsExpression( NAME.toString(), name ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } }
Example #15
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, final String repositoryId, final Class<T> facetClazz, final String name ) throws MetadataRepositoryException { final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz ); if (metadataFacetFactory==null) { return null; } final String facetId = metadataFacetFactory.getFacetId( ); QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .addEqualsExpression( NAME.toString(), name ) // .execute(); T metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name ); int size = result.get().getCount(); if ( size < 1 ) { return null; } Map<String, String> map = new HashMap<>( size ); for ( Row<String, String, String> row : result.get() ) { ColumnSlice<String, String> columnSlice = row.getColumnSlice(); map.put( getStringValue( columnSlice, KEY.toString() ), getStringValue( columnSlice, VALUE.toString() ) ); } metadataFacet.fromProperties( map ); return metadataFacet; }
Example #16
Source File: Cassandra12xMapDAO.java From cumulusrdf with Apache License 2.0 | 5 votes |
@Override public K getKey(final V value) throws DataAccessLayerException { RangeSlicesQuery<K, byte[], V> rq = HFactory.createRangeSlicesQuery( _keyspace, _serializer_k, BYTE_SERIALIZER, _serializer_v); rq.addEqualsExpression(COLUMN_NAME, value).setReturnKeysOnly() .setColumnFamily(_cf_name) .setColumnNames(COLUMN_NAME).setRowCount(1); final List<Row<K, byte[], V>> rows = rq.execute().get().getList(); return rows.isEmpty() ? null : _serializer_k.fromBytes((byte[])rows.get(0).getKey()); }
Example #17
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession session, final String repositoryId, final ZonedDateTime startTime, final ZonedDateTime endTime, QueryParameter queryParameter ) throws MetadataRepositoryException { LongSerializer ls = LongSerializer.get(); RangeSlicesQuery<String, String, Long> query = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ls ) // .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) // .setColumnNames( ArtifactMetadataModel.COLUMNS ); // if ( startTime != null ) { query = query.addGteExpression( WHEN_GATHERED.toString(), startTime.toInstant().toEpochMilli() ); } if ( endTime != null ) { query = query.addLteExpression( WHEN_GATHERED.toString(), endTime.toInstant().toEpochMilli() ); } QueryResult<OrderedRows<String, String, Long>> result = query.execute(); List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() ); Iterator<Row<String, String, Long>> keyIter = result.get().iterator(); if (keyIter.hasNext()) { String key = keyIter.next().getKey(); for (Row<String, String, Long> row : result.get()) { ColumnSlice<String, Long> columnSlice = row.getColumnSlice(); String repositoryName = getAsStringValue(columnSlice, REPOSITORY_NAME.toString()); if (StringUtils.equals(repositoryName, repositoryId)) { artifactMetadatas.add(mapArtifactMetadataLongColumnSlice(key, columnSlice)); } } } return artifactMetadatas; }
Example #18
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<ArtifactMetadata> getArtifactsByProjectVersionAttribute( RepositorySession session, String key, String value, String repositoryId ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) // .setColumnNames( PROJECT_ID.toString(), REPOSITORY_NAME.toString(), NAMESPACE_ID.toString(), PROJECT_VERSION.toString() ) // .addEqualsExpression( key, value ) // .execute(); int count = result.get().getCount(); if ( count < 1 ) { return Collections.emptyList(); } List<ArtifactMetadata> artifacts = new LinkedList<>( ); for ( Row<String, String, String> row : result.get() ) { // TODO doing multiple queries, there should be a way to get all the artifactMetadatas for any number of // projects try { artifacts.addAll( getArtifacts( session, getStringValue( row.getColumnSlice(), REPOSITORY_NAME ), getStringValue( row.getColumnSlice(), NAMESPACE_ID ), getStringValue( row.getColumnSlice(), PROJECT_ID ), getStringValue( row.getColumnSlice(), PROJECT_VERSION ) ) ); } catch ( MetadataResolutionException e ) { // never raised throw new IllegalStateException( e ); } } return artifacts; }
Example #19
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<ProjectVersionReference> getProjectReferences( RepositorySession session, String repoId, String namespace, String projectId, String projectVersion ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( "projectVersionMetadataModel.key" ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( GROUP_ID.toString(), namespace ) // .addEqualsExpression( ARTIFACT_ID.toString(), projectId ) // .addEqualsExpression( VERSION.toString(), projectVersion ) // .execute(); List<String> dependenciesIds = new ArrayList<>( result.get().getCount() ); for ( Row<String, String, String> row : result.get().getList() ) { dependenciesIds.add( getStringValue( row.getColumnSlice(), "projectVersionMetadataModel.key" ) ); } List<ProjectVersionReference> references = new ArrayList<>( result.get().getCount() ); for ( String key : dependenciesIds ) { ColumnFamilyResult<String, String> columnFamilyResult = this.projectVersionMetadataTemplate.queryColumns( key ); references.add( new ProjectVersionReference( ProjectVersionReference.ReferenceType.DEPENDENCY, // columnFamilyResult.getString( PROJECT_ID.toString() ), // columnFamilyResult.getString( NAMESPACE_ID.toString() ), // columnFamilyResult.getString( PROJECT_VERSION.toString() ) ) ); } return references; }
Example #20
Source File: CassandraDB.java From cassandra-river with Apache License 2.0 | 5 votes |
public CassandraCFData getCFData(String columnFamily, String start, int limit) { int columnLimit = 100; CassandraCFData data = new CassandraCFData(); String lastEnd = null; Map<String, Map<String, String>> cfData = new HashMap<String, Map<String, String>>(); RangeSlicesQuery<String, String, String> query = HFactory.createRangeSlicesQuery(keyspace, STR, STR, STR); query.setColumnFamily(columnFamily); query.setKeys(start, ""); query.setRange("", "", false, columnLimit); query.setRowCount(limit); OrderedRows<String, String, String> rows = query.execute().get(); if (rows.getCount() != 1) { lastEnd = rows.peekLast().getKey(); data.start = lastEnd; } else { data.start = null; return data; } for(Row<String,String,String> row : rows.getList()){ Map<String, String> columnMap = new HashMap<String, String>(); ColumnSlice<String, String> columnData = row.getColumnSlice(); for (HColumn<String, String> column : columnData.getColumns()){ columnMap.put(column.getName(), column.getValue()); } cfData.put(row.getKey(), columnMap); } data.rowColumnMap = cfData; return data; }
Example #21
Source File: Schema.java From usergrid with Apache License 2.0 | 5 votes |
public static Map<String, Object> deserializeEntityProperties( Row<UUID, String, ByteBuffer> row ) { if ( row == null ) { return null; } ColumnSlice<String, ByteBuffer> slice = row.getColumnSlice(); if ( slice == null ) { return null; } return deserializeEntityProperties( slice.getColumns(), true, false ); }
Example #22
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected List<Dependency> getDependencies( String projectVersionMetadataKey ) { List<Dependency> dependencies = new ArrayList<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( "projectVersionMetadataModel.key" ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.dependencyTemplate.queryColumns( row.getKey() ); Dependency dependency = new Dependency(); dependency.setClassifier( columnFamilyResult.getString( "classifier" ) ); dependency.setOptional( Boolean.parseBoolean( columnFamilyResult.getString( "optional" ) ) ); dependency.setScope( columnFamilyResult.getString( "scope" ) ); dependency.setSystemPath( columnFamilyResult.getString( "systemPath" ) ); dependency.setType( columnFamilyResult.getString( "type" ) ); dependency.setArtifactId( columnFamilyResult.getString( ARTIFACT_ID.toString() ) ); dependency.setNamespace( columnFamilyResult.getString( GROUP_ID.toString() ) ); dependency.setVersion( columnFamilyResult.getString( VERSION.toString() ) ); dependencies.add( dependency ); } return dependencies; }
Example #23
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected void removeDependencies( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) // .setColumnNames( GROUP_ID.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.dependencyTemplate.deleteRow( row.getKey() ); } }
Example #24
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected void removeLicenses( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.licenseTemplate.deleteRow( row.getKey() ); } }
Example #25
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
protected List<MailingList> getMailingLists( String projectVersionMetadataKey ) { List<MailingList> mailingLists = new ArrayList<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.mailingListTemplate.queryColumns( row.getKey() ); MailingList mailingList = new MailingList(); mailingList.setName( columnFamilyResult.getString( NAME.toString() ) ); mailingList.setMainArchiveUrl( columnFamilyResult.getString( "mainArchiveUrl" ) ); mailingList.setPostAddress( columnFamilyResult.getString( "postAddress" ) ); mailingList.setSubscribeAddress( columnFamilyResult.getString( "subscribeAddress" ) ); mailingList.setUnsubscribeAddress( columnFamilyResult.getString( "unsubscribeAddress" ) ); List<String> otherArchives = new ArrayList<>(); for ( String columnName : columnFamilyResult.getColumnNames() ) { if ( StringUtils.startsWith( columnName, "otherArchive." ) ) { otherArchives.add( columnFamilyResult.getString( columnName ) ); } } mailingList.setOtherArchives( otherArchives ); mailingLists.add( mailingList ); } return mailingLists; }
Example #26
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<String> getProjectVersions( RepositorySession session, final String repoId, final String namespace, final String projectId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) // .setColumnNames( PROJECT_VERSION.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .addEqualsExpression( PROJECT_ID.toString(), projectId ) // .execute(); int count = result.get().getCount(); if ( count < 1 ) { return Collections.emptyList(); } Set<String> versions = new HashSet<>( count ); for ( Row<String, String, String> orderedRows : result.get() ) { versions.add( getStringValue( orderedRows.getColumnSlice(), PROJECT_VERSION.toString() ) ); } return new ArrayList<>( versions ); }
Example #27
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 5 votes |
@Override public List<String> getChildNamespaces( RepositorySession session, final String repoId, final String namespaceId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); List<String> namespaces = new ArrayList<>( result.get().getCount() ); for ( Row<String, String, String> row : result.get() ) { String currentNamespace = getStringValue( row.getColumnSlice(), NAME.toString() ); if ( StringUtils.startsWith( currentNamespace, namespaceId ) // && ( StringUtils.length( currentNamespace ) > StringUtils.length( namespaceId ) ) ) { // store after namespaceId '.' but before next '.' // call org namespace org.apache.maven.shared -> stored apache String calledNamespace = StringUtils.endsWith( namespaceId, "." ) ? namespaceId : namespaceId + "."; String storedNamespace = StringUtils.substringAfter( currentNamespace, calledNamespace ); storedNamespace = StringUtils.substringBefore( storedNamespace, "." ); namespaces.add( storedNamespace ); } } return namespaces; }
Example #28
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 4 votes |
/** * Implementation is not very performant, because sorting is part of the stream. I do not know how to specify the sort * in the query. * * @param <T> * @param session * @param repositoryId * @param facetClazz * @param queryParameter * @return * @throws MetadataRepositoryException */ @Override public <T extends MetadataFacet> Stream<T> getMetadataFacetStream(RepositorySession session, String repositoryId, Class<T> facetClazz, QueryParameter queryParameter) throws MetadataRepositoryException { final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz ); final String facetId = metadataFacetFactory.getFacetId( ); QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName( ) ) // .setColumnNames( NAME.toString( ), KEY.toString( ), VALUE.toString( ) ) // .addEqualsExpression( REPOSITORY_NAME.toString( ), repositoryId ) // .addEqualsExpression( FACET_ID.toString( ), facetId ) // .setRange( null, null, false, Integer.MAX_VALUE ) .setRowCount( Integer.MAX_VALUE ) .execute( ); return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row, T lastItem)-> { ColumnSlice<String, String> columnSlice = row.getColumnSlice(); String name = getStringValue( columnSlice, NAME.toString( ) ); T updateItem; if (lastItem!=null && lastItem.getName().equals(name)) { updateItem = lastItem; } else { updateItem = metadataFacetFactory.createMetadataFacet( repositoryId, name ); } String key = getStringValue( columnSlice, KEY.toString() ); if (StringUtils.isNotEmpty( key )) { Map<String, String> map = new HashMap<>( ); map.put( key , getStringValue( columnSlice, VALUE.toString( ) ) ); updateItem.fromProperties( map ); } return updateItem; }), false ).sorted( (f1, f2) -> f1.getName()!=null ? f1.getName().compareTo( f2.getName() ) : 1 ).skip( queryParameter.getOffset()).limit( queryParameter.getLimit()); }
Example #29
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 4 votes |
private <T> Spliterator<T> createResultSpliterator( QueryResult<OrderedRows<String, String, String>> result, BiFunction<Row<String, String, String>, T, T> converter) throws MetadataRepositoryException { final int size = result.get().getCount(); final Iterator<Row<String, String, String>> it = result.get( ).iterator( ); return new Spliterator<T>( ) { private T lastItem = null; @Override public boolean tryAdvance( Consumer<? super T> action ) { if (size>=1) { if(it.hasNext()) { while ( it.hasNext( ) ) { Row<String, String, String> row = it.next( ); T item = converter.apply( row, lastItem ); if ( item != null && lastItem !=null && item != lastItem ) { action.accept( lastItem ); lastItem = item; return true; } lastItem = item; } action.accept( lastItem ); return true; } else { return false; } } return false; } @Override public Spliterator<T> trySplit( ) { return null; } @Override public long estimateSize( ) { return size; } @Override public int characteristics( ) { return ORDERED+NONNULL+SIZED; } }; }
Example #30
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 4 votes |
private void updateFacets( final FacetedMetadata facetedMetadata, final ArtifactMetadataModel artifactMetadataModel ) { String cf = cassandraArchivaManager.getMetadataFacetFamilyName(); for ( final String facetId : getSupportedFacets() ) { MetadataFacet metadataFacet = facetedMetadata.getFacet( facetId ); if ( metadataFacet == null ) { continue; } // clean first QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cf ) // .setColumnNames( REPOSITORY_NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), artifactMetadataModel.getRepositoryId() ) // .addEqualsExpression( NAMESPACE_ID.toString(), artifactMetadataModel.getNamespace() ) // .addEqualsExpression( PROJECT_ID.toString(), artifactMetadataModel.getProject() ) // .addEqualsExpression( PROJECT_VERSION.toString(), artifactMetadataModel.getProjectVersion() ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .execute(); for ( Row<String, String, String> row : result.get().getList() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } Map<String, String> properties = metadataFacet.toProperties(); for ( Map.Entry<String, String> entry : properties.entrySet() ) { String key = new MetadataFacetModel.KeyBuilder().withKey( entry.getKey() ).withArtifactMetadataModel( artifactMetadataModel ).withFacetId( facetId ).withName( metadataFacet.getName() ).build(); Mutator<String> mutator = metadataFacetTemplate.createMutator() // .addInsertion( key, cf, column( REPOSITORY_NAME.toString(), artifactMetadataModel.getRepositoryId() ) ) // .addInsertion( key, cf, column( NAMESPACE_ID.toString(), artifactMetadataModel.getNamespace() ) ) // .addInsertion( key, cf, column( PROJECT_ID.toString(), artifactMetadataModel.getProject() ) ) // .addInsertion( key, cf, column( PROJECT_VERSION.toString(), artifactMetadataModel.getProjectVersion() ) ) // .addInsertion( key, cf, column( FACET_ID.toString(), facetId ) ) // .addInsertion( key, cf, column( KEY.toString(), entry.getKey() ) ) // .addInsertion( key, cf, column( VALUE.toString(), entry.getValue() ) ); if ( metadataFacet.getName() != null ) { mutator.addInsertion( key, cf, column( NAME.toString(), metadataFacet.getName() ) ); } mutator.execute(); } } }