me.prettyprint.hector.api.factory.HFactory Java Examples
The following examples show how to use
me.prettyprint.hector.api.factory.HFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 6 votes |
/** * Maps the role to a user list. Adds the (username, tenantId) -> roleList * and (role, tenantId) -> userName * * @param userNames The username list of the user the role need to be added to. * @param roleName The role that needs to be mapped against the user list. * @param mutator Passes the mutator and returns it with the insert statements. */ private Mutator<Composite> addRoleToUsersList(String[] userNames, String roleName, Mutator<Composite> mutator) { if (userNames != null) { for (String userName : userNames) { Composite key = new Composite(); key.addComponent(userName, stringSerializer); key.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(key, CFConstants.UM_USER_ROLE, HFactory.createColumn(roleName, roleName)); Composite keyRole = new Composite(); keyRole.addComponent(roleName, stringSerializer); keyRole.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(keyRole, CFConstants.UM_ROLE_USER_INDEX, HFactory.createColumn(userName, userName)); } } return mutator; }
Example #2
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getRootNamespaces( RepositorySession session, final String repoId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); Set<String> namespaces = new HashSet<>( result.get( ).getCount( ) ); for ( Row<String, String, String> row : result.get() ) { namespaces.add( StringUtils.substringBefore( getStringValue( row.getColumnSlice(), NAME.toString() ), "." ) ); } return new ArrayList<>( namespaces ); }
Example #3
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Query ops event history util method * * @param ciId * @param startTime * @param endTime * @param count * @return */ public List<OpsEvent> getOpsEventHistory(long ciId, Long startTime, Long endTime, int count) { List<OpsEvent> events = new ArrayList<>(); RangeSlicesQuery<String, Long, String> query = HFactory.createRangeSlicesQuery( keyspace, stringSerializer, longSerializer, stringSerializer) .setColumnFamily(SchemaBuilder.OPS_EVENTS_CF) .setKeys(String.valueOf(ciId) + Character.MIN_VALUE, String.valueOf(ciId) + Character.MAX_VALUE) .setRange(startTime, endTime, false, count); OrderedRows<String, Long, String> rows = query.execute().get(); for (Row<String, Long, String> row : rows) { ColumnSlice<Long, String> resultCols = row.getColumnSlice(); for (HColumn<Long, String> col : resultCols.getColumns()) { OpsEvent event = gson.fromJson(col.getValue(), OpsEvent.class); events.add(event); } } return events; }
Example #4
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected Map<String, String> getChecksums( String artifactMetadataKey ) { Map<String, String> checksums = new HashMap<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) // .setColumnNames( ARTIFACT_METADATA_MODEL_KEY, REPOSITORY_NAME.toString(), CHECKSUM_ALG.toString(), CHECKSUM_VALUE.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.checksumTemplate.queryColumns( row.getKey() ); checksums.put(columnFamilyResult.getString(CHECKSUM_ALG.toString()), columnFamilyResult.getString(CHECKSUM_VALUE.toString())); } return checksums; }
Example #5
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Query open events with given ciId * * @param ciId * @param eventName * @return */ public long getCiOpenEventId(long ciId, String eventName) { SubColumnQuery<Long, String, String, byte[]> scolq = HFactory.createSubColumnQuery( keyspace, longSerializer, stringSerializer, stringSerializer, bytesSerializer) .setColumnFamily(SchemaBuilder.CI_OPEN_EVENTS_CF) .setKey(ciId) .setSuperColumn(eventName) .setColumn("id"); HColumn<String, byte[]> resultCol = scolq.execute().get(); if (resultCol != null) { return longSerializer.fromBytes(resultCol.getValue()); } return 0; }
Example #6
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Query the ops events with given eventId * * @param key * @param eventId * @return */ public OpsEvent getOpsEvent(String key, long eventId) { SliceQuery<String, Long, String> sliceQuery = HFactory.createSliceQuery( keyspace, stringSerializer, longSerializer, stringSerializer) .setColumnFamily(SchemaBuilder.OPS_EVENTS_CF) .setColumnNames(eventId) .setKey(key); ColumnSlice<Long, String> resultCols = sliceQuery.execute().get(); if (resultCols.getColumns().size() > 0) { String eventJson = resultCols.getColumns().get(0).getValue(); OpsEvent event = gson.fromJson(eventJson, OpsEvent.class); return event; } return null; }
Example #7
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 6 votes |
/** * Get the list of users mapped to a role. */ @Override public String[] doGetUserListOfRole(String roleName, String filter) throws UserStoreException { List<String> usersList = new ArrayList<String>(); Composite key = new Composite(); key.addComponent(roleName, stringSerializer); key.addComponent(tenantIdString, stringSerializer); SliceQuery<Composite, String, String> query = HFactory .createSliceQuery(keyspace, CompositeSerializer.get(), StringSerializer.get(), StringSerializer.get()) .setKey(key).setColumnFamily(CFConstants.UM_ROLE_USER_INDEX); ColumnSliceIterator<Composite, String, String> iterator = new ColumnSliceIterator<Composite, String, String>( query, null, "\uFFFF", false); while (iterator.hasNext()) { HColumn<String, String> column = iterator.next(); usersList.add(column.getValue()); } return usersList.toArray(new String[usersList.size()]); }
Example #8
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected List<License> getLicenses( String projectVersionMetadataKey ) { List<License> licenses = new ArrayList<>(); QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) // .setColumnNames( "projectVersionMetadataModel.key" ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); for ( Row<String, String, String> row : result.get() ) { ColumnFamilyResult<String, String> columnFamilyResult = this.licenseTemplate.queryColumns( row.getKey() ); licenses.add( new License( columnFamilyResult.getString( NAME.toString() ), columnFamilyResult.getString( URL.toString() ) ) ); } return licenses; }
Example #9
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Query and returns list of Open Events. * * @param ciId * @return */ public List<CiOpenEvent> getCiOpenEvents(long ciId) { SuperSliceQuery<Long, String, String, byte[]> ssQuery = HFactory.createSuperSliceQuery( keyspace, longSerializer, stringSerializer, stringSerializer, bytesSerializer) .setColumnFamily(SchemaBuilder.CI_OPEN_EVENTS_CF) .setKey(ciId) .setRange(null, null, false, 1000); SuperSlice<String, String, byte[]> sSlice = ssQuery.execute().get(); List<CiOpenEvent> openEvents = new ArrayList<>(); if (sSlice != null) { for (HSuperColumn<String, String, byte[]> sCol : sSlice.getSuperColumns()) { CiOpenEvent event = parseSuperCol(sCol); openEvents.add(event); } } return openEvents; }
Example #10
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Query ops event history * * @param key * @param startTime * @param endTime * @param count * @return */ public List<OpsEvent> getOpsEventHistory(String key, Long startTime, Long endTime, int count) { List<OpsEvent> events = new ArrayList<>(); SliceQuery<String, Long, String> sliceQuery = HFactory.createSliceQuery( keyspace, stringSerializer, longSerializer, stringSerializer) .setColumnFamily(SchemaBuilder.OPS_EVENTS_CF) .setRange(startTime, endTime, false, count) .setKey(key); ColumnSlice<Long, String> resultCols = sliceQuery.execute().get(); for (HColumn<Long, String> col : resultCols.getColumns()) { OpsEvent event = gson.fromJson(col.getValue(), OpsEvent.class); events.add(event); } return events; }
Example #11
Source File: OpsCiStateDao.java From oneops with Apache License 2.0 | 6 votes |
public List<CiChangeStateEvent> getCiStateHistory(long ciId, Long startTime, Long endTime, Integer count) { if (count == null) count = 1000; List<CiChangeStateEvent> states = new ArrayList<CiChangeStateEvent>(); SliceQuery<Long, Long, String> sliceQuery = HFactory.createSliceQuery(keyspace, longSerializer, longSerializer, stringSerializer); sliceQuery.setColumnFamily(SchemaBuilder.CI_STATE_HIST_CF); sliceQuery.setRange(startTime, endTime, false, count); sliceQuery.setKey(ciId); QueryResult<ColumnSlice<Long, String>> result = sliceQuery.execute(); ColumnSlice<Long, String> resultCols = result.get(); for (HColumn<Long, String> col : resultCols.getColumns()) { CiChangeStateEvent event = gson.fromJson(col.getValue(), CiChangeStateEvent.class); states.add(event); } return states; }
Example #12
Source File: OpsCiStateDao.java From oneops with Apache License 2.0 | 6 votes |
public Map<Long,Map<String,Long>> getComponentStates(List<Long> manifestIds) { Map<Long,Map<String,Long>> result = new HashMap<Long,Map<String,Long>>(); MultigetSliceCounterQuery<Long, String> query = HFactory.createMultigetSliceCounterQuery(keyspace, longSerializer, stringSerializer); query.setKeys(manifestIds); query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF); query.setRange(null, null, false, 1000); QueryResult<CounterRows<Long,String>> qResult = query.execute(); CounterRows<Long,String> rows = qResult.get(); for (CounterRow<Long, String> row : rows) { if (row.getColumnSlice().getColumns().size() >0) { if (!result.containsKey(row.getKey())) { result.put(row.getKey(), new HashMap<String,Long>()); } for (HCounterColumn<String> col : row.getColumnSlice().getColumns()) { result.get(row.getKey()).put(col.getName(), col.getValue()); } } } return result; }
Example #13
Source File: OpsCiStateDao.java From oneops with Apache License 2.0 | 6 votes |
public Map<String,Long> getComponentStates(Long manifestId) { Map<String,Long> result = new HashMap<String,Long>(); SliceCounterQuery<Long, String> query = HFactory.createCounterSliceQuery(keyspace, longSerializer, stringSerializer); query.setKey(manifestId); query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF); query.setRange(null, null, false, 100); QueryResult<CounterSlice<String>> qResult = query.execute(); CounterSlice<String> row = qResult.get(); if (row != null && row.getColumns().size()>0) { for (HCounterColumn<String> col :row.getColumns()) { result.put(col.getName(), col.getValue()); } } return result; }
Example #14
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected void removeMailingList( String projectVersionMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) // .setColumnNames( NAME.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) // .execute(); if ( result.get().getCount() < 1 ) { return; } for ( Row<String, String, String> row : result.get() ) { this.mailingListTemplate.deleteRow( row.getKey() ); } }
Example #15
Source File: ThresholdsDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Bean post init method. The following configuration is used * for initializing the ThresholdsDao cassandra cluster, * <p> * <ul> * <li>Active clients per node - 4</li> * <li>Cassandra Thrift timeout - 600 sec</li> * </ul> */ public void init() { logger.info("Initializing Monitor Thresholds Dao..."); Cluster cluster = cb.getCluster(clusterName, 4, timeout); logger.info("Connected to cluster : " + clusterName); SchemaBuilder.createSchema(cluster, keyspaceName); ConfigurableConsistencyLevel cl = new ConfigurableConsistencyLevel(); cl.setDefaultWriteConsistencyLevel(HConsistencyLevel.ONE); cl.setDefaultReadConsistencyLevel(HConsistencyLevel.ONE); keyspace = HFactory.createKeyspace(keyspaceName, cluster, cl); thresholdMutator = HFactory.createMutator(keyspace, longSerializer); manifestMapMutator = HFactory.createMutator(keyspace, longSerializer); realizedAsMutator = HFactory.createMutator(keyspace, longSerializer); }
Example #16
Source File: ThresholdsDao.java From oneops with Apache License 2.0 | 6 votes |
public Long getManifestId(long ciId) { SliceQuery<Long, String, Long> sliceQuery = HFactory.createSliceQuery( keyspace, longSerializer, stringSerializer, longSerializer) .setColumnFamily(SchemaBuilder.MANIFESTMAP_CF) .setColumnNames("manifestid") .setKey(ciId); QueryResult<ColumnSlice<String, Long>> result = sliceQuery.execute(); ColumnSlice<String, Long> resultCols = result.get(); if (resultCols.getColumnByName("manifestid") != null) { return resultCols.getColumnByName("manifestid").getValue(); } return null; }
Example #17
Source File: ThresholdsDao.java From oneops with Apache License 2.0 | 6 votes |
public Map<Long, List<Long>> getManifestCiIds(List<Long> manifestIds) { MultigetSliceQuery<Long, Long, Long> msQuery = HFactory.createMultigetSliceQuery(keyspace, longSerializer, longSerializer, longSerializer); msQuery.setColumnFamily(SchemaBuilder.REALIZED_AS_CF); msQuery.setRange(null, null, false, 10000); msQuery.setKeys(manifestIds); Rows<Long, Long, Long> rows = msQuery.execute().get(); Map<Long, List<Long>> manifestCiIds = new HashMap<>(); if (rows != null) { for (Row<Long, Long, Long> row : rows) { ColumnSlice<Long, Long> slice = row.getColumnSlice(); if (slice != null && slice.getColumns().size() > 0) { manifestCiIds.put(row.getKey(), new ArrayList<>()); for (HColumn<Long, Long> col : slice.getColumns()) { manifestCiIds.get(row.getKey()).add(col.getName()); } } } } return manifestCiIds; }
Example #18
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected List<String> getNamespaces( final String repoId ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .execute(); List<String> namespaces = new ArrayList<>( result.get().getCount() ); for ( Row<String, String, String> row : result.get() ) { namespaces.add( getStringValue( row.getColumnSlice(), NAME.toString() ) ); } return namespaces; }
Example #19
Source File: ThresholdsDao.java From oneops with Apache License 2.0 | 6 votes |
public Threshold getThreshold(long manifestId, String source) { SubSliceQuery<Long, String, String, byte[]> scolq = HFactory.createSubSliceQuery( keyspace, longSerializer, stringSerializer, stringSerializer, bytesSerializer) .setColumnFamily(SchemaBuilder.THRESHOLDS_CF) .setKey(manifestId) .setSuperColumn(source) .setRange(null, null, false, 100); QueryResult<ColumnSlice<String, byte[]>> result = scolq.execute(); ColumnSlice<String, byte[]> resultCols = result.get(); if (resultCols.getColumns().size() > 0) { return getThresholdFromCols(resultCols); } else { return null; } }
Example #20
Source File: ThresholdsDao.java From oneops with Apache License 2.0 | 6 votes |
public Threshold _getThresholdNoDef(String key) { SliceQuery<String, String, byte[]> sliceQuery = HFactory.createSliceQuery(keyspace, stringSerializer, stringSerializer, bytesSerializer); sliceQuery.setColumnFamily(SchemaBuilder.THRESHOLDS_CF); sliceQuery.setColumnNames("checksum", "historics"); sliceQuery.setKey(key); QueryResult<ColumnSlice<String, byte[]>> result = sliceQuery.execute(); ColumnSlice<String, byte[]> resultCols = result.get(); if (resultCols.getColumns().size() > 0) { Threshold threshold = new Threshold(); if (resultCols.getColumnByName("checksum") != null) { threshold.setCrc(longSerializer.fromBytes(resultCols.getColumnByName("checksum").getValue())); } if (resultCols.getColumnByName("historics") != null) { threshold.setHistorics(stringSerializer.fromBytes(resultCols.getColumnByName("historics").getValue())); } return threshold; } else { return null; } }
Example #21
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected void removeChecksums( String artifactMetadataKey ) { QueryResult<OrderedRows<String, String, String>> result = HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) // .setColumnNames( CHECKSUM_ALG.toString() ) // .setRowCount( Integer.MAX_VALUE ) // .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey ) // .execute(); if ( result.get().getCount() < 1 ) { return; } for ( Row<String, String, String> row : result.get() ) { this.checksumTemplate.deleteRow( row.getKey() ); } }
Example #22
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public void removeMetadataFacet( RepositorySession session, final String repositoryId, final String facetId, final String name ) throws MetadataRepositoryException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) // .setColumnNames( KEY.toString(), VALUE.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( FACET_ID.toString(), facetId ) // .addEqualsExpression( NAME.toString(), name ) // .execute(); for ( Row<String, String, String> row : result.get() ) { this.metadataFacetTemplate.deleteRow( row.getKey() ); } }
Example #23
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
@Override public List<String> getProjects( RepositorySession session, final String repoId, final String namespace ) throws MetadataResolutionException { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) // .setColumnNames( PROJECT_ID.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) // .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) // .execute(); final Set<String> projects = new HashSet<>( result.get( ).getCount( ) ); for ( Row<String, String, String> row : result.get() ) { projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) ); } return new ArrayList<>( projects ); }
Example #24
Source File: CassandraMetadataRepository.java From archiva with Apache License 2.0 | 6 votes |
protected Namespace getNamespace( String repositoryId, String namespaceId ) { QueryResult<OrderedRows<String, String, String>> result = HFactory // .createRangeSlicesQuery( keyspace, ss, ss, ss ) // .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) // .setColumnNames( REPOSITORY_NAME.toString(), NAME.toString() ) // .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) // .addEqualsExpression( NAME.toString(), namespaceId ) // .execute(); if ( result.get().getCount() > 0 ) { ColumnSlice<String, String> columnSlice = result.get().getList().get( 0 ).getColumnSlice(); return new Namespace( getStringValue( columnSlice, NAME.toString() ), // new Repository( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) ) ); } return null; }
Example #25
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 6 votes |
/** * Checks if the role is existing the role store. */ @Override protected boolean doCheckExistingRole(String roleNameWithTenantDomain) throws UserStoreException { RoleContext roleContext = createRoleContext(roleNameWithTenantDomain); boolean isExisting = false; String roleName = roleContext.getRoleName(); Composite key = new Composite(); key.addComponent(roleName, stringSerializer); key.addComponent(tenantIdString, stringSerializer); ColumnQuery<Composite, String, String> getCredentialQuery = HFactory.createColumnQuery(keyspace, CompositeSerializer.get(), stringSerializer, stringSerializer); getCredentialQuery.setColumnFamily(CFConstants.UM_ROLES).setKey(key).setName(CFConstants.UM_ROLE_NAME); HColumn<String, String> result = getCredentialQuery.execute().get(); if (result != null && result.getValue() != null) { isExisting = true; } return isExisting; }
Example #26
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 6 votes |
/** * Maps the users to a role list. Adds the (username, tenantId) -> roleList * and (role, tenantId) -> userName * * @param userName The username of the user the roles need to be added to. * @param roleList The list of roles that needs to be mapped against the user. */ private void addUserToRoleList(String userName, String[] roleList) { Mutator<Composite> mutator = HFactory.createMutator(keyspace, CompositeSerializer.get()); if (roleList != null) { for (String role : roleList) { Composite key = new Composite(); key.addComponent(userName, stringSerializer); key.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(key, CFConstants.UM_USER_ROLE, HFactory.createColumn(role, role)); Composite keyRole = new Composite(); keyRole.addComponent(role, stringSerializer); keyRole.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(keyRole, CFConstants.UM_ROLE_USER_INDEX, HFactory.createColumn(userName, userName)); } mutator.execute(); } }
Example #27
Source File: CassandraUserStoreManager.java From carbon-identity with Apache License 2.0 | 6 votes |
/** * Maps the users to a role list. Adds the (username, tenantId) -> roleList * and (role, tenantId) -> userName * * @param userName The username of the user the roles need to be added to. * @param roleList The list of roles that needs to be mapped against the user. * @param mutator Passes the mutator and returns it with the insert statements. */ private Mutator<Composite> addUserToRoleList(String userName, String[] roleList, Mutator<Composite> mutator) { if (roleList != null && mutator != null) { for (String role : roleList) { Composite key = new Composite(); key.addComponent(userName, stringSerializer); key.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(key, CFConstants.UM_USER_ROLE, HFactory.createColumn(role, role)); Composite keyRole = new Composite(); keyRole.addComponent(role, stringSerializer); keyRole.addComponent(tenantIdString, stringSerializer); mutator.addInsertion(keyRole, CFConstants.UM_ROLE_USER_INDEX, HFactory.createColumn(userName, userName)); } } return mutator; }
Example #28
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 6 votes |
/** * Add OpsEvent for the given ciId * * @param ciId * @param eventName * @param eventId * @param ciState * @return */ public boolean addOpenEventForCi(long ciId, String eventName, long eventId, String ciState) { boolean isNew = false; long lastOpenId = getCiOpenEventId(ciId, eventName); if (lastOpenId == 0) { List<HColumn<String, byte[]>> subCols = new ArrayList<>(); HColumn<String, byte[]> eventIdCol = HFactory.createColumn("id", longSerializer.toBytes(eventId), stringSerializer, bytesSerializer); HColumn<String, byte[]> eventStateCol = HFactory.createColumn("state", ciState.getBytes(), stringSerializer, bytesSerializer); subCols.add(eventIdCol); subCols.add(eventStateCol); ciMutator.insert(ciId, SchemaBuilder.CI_OPEN_EVENTS_CF, HFactory.createSuperColumn(eventName, subCols, stringSerializer, stringSerializer, bytesSerializer)); isNew = true; } logger.debug("there is already an open event for" + ciId + " " + eventName + " lastOpenId " + lastOpenId); return isNew; }
Example #29
Source File: Cassandra12xTripleIndexDAO.java From cumulusrdf with Apache License 2.0 | 5 votes |
/** * Insert a triple in POSC index. * * @param rowKey the row key. * @param ids the triple identifiers * @throws DataAccessLayerException in case of data access failure. */ void insertInPOSC(final byte [] rowKey, final byte[][]ids) throws DataAccessLayerException { final Composite sc_col = new Composite(); sc_col.addComponent(ids[0], BYTE_SERIALIZER); if (ids.length == 4) { sc_col.addComponent(ids[3], BYTE_SERIALIZER); } // subject col _mutators.get() .addInsertion(rowKey, PO_SC, HFactory.createColumn(sc_col, EMPTY_VAL, COMPOSITE_SERIALIZER, BYTE_SERIALIZER)) .addInsertion(rowKey, PO_SC, HFactory.createColumn(P_COL, ids[1], COMPOSITE_SERIALIZER, BYTE_SERIALIZER)); }
Example #30
Source File: OpsEventDao.java From oneops with Apache License 2.0 | 5 votes |
private String getOrphanCloseEventForCi(long ciId, String eventName, long manifestId) { SubColumnQuery<Long, Long, String, byte[]> scolq = HFactory .createSubColumnQuery(keyspace, longSerializer, longSerializer, stringSerializer, bytesSerializer) .setColumnFamily(SchemaBuilder.ORPHAN_CLOSE_EVENTS_CF).setKey(ciId).setSuperColumn(manifestId) .setColumn(eventName); HColumn<String, byte[]> resultCol = scolq.execute().get(); if (resultCol != null) { return stringSerializer.fromBytes(resultCol.getValue()); } return null; }