Java Code Examples for com.netflix.astyanax.model.Row#getColumns()
The following examples show how to use
com.netflix.astyanax.model.Row#getColumns() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AstyanaxQueueDAO.java From emodb with Apache License 2.0 | 6 votes |
@Override public Map<UUID, ByteBuffer> findMaxRecords(Collection<UUID> dataIds) { // Finding the max using a reversed column range shouldn't have to worry about skipping tombstones since // we always delete smaller column values before deleting larger column values--scanning will hit the max // before needing to skip over tombstones. Map<UUID, ByteBuffer> resultMap = Maps.newHashMap(); for (List<UUID> batch : Iterables.partition(dataIds, 10)) { Rows<UUID, ByteBuffer> rows = execute( _keyspace.prepareQuery(CF_DEDUP_DATA, ConsistencyLevel.CL_LOCAL_QUORUM) .getKeySlice(batch) .withColumnRange(new RangeBuilder() .setReversed(true) .setLimit(1) .build())); for (Row<UUID, ByteBuffer> row : rows) { UUID dataId = row.getKey(); for (Column<ByteBuffer> column : row.getColumns()) { resultMap.put(dataId, column.getName()); } } } return resultMap; }
Example 2
Source File: CassandraArchiveRepository.java From Nicobar with Apache License 2.0 | 6 votes |
/** * Get a summary of all archives in this Repository * @return List of summaries */ @Override public List<ArchiveSummary> getArchiveSummaries() throws IOException { List<ArchiveSummary> summaries = new LinkedList<ArchiveSummary>(); Iterable<Row<String, String>> rows; try { rows = getRows((EnumSet<?>)EnumSet.of(Columns.module_id, Columns.last_update, Columns.module_spec)); } catch (Exception e) { throw new IOException(e); } for (Row<String, String> row : rows) { String moduleId = row.getKey(); ColumnList<String> columns = row.getColumns(); Column<String> lastUpdateColumn = columns.getColumnByName(Columns.last_update.name()); long updateTime = lastUpdateColumn != null ? lastUpdateColumn.getLongValue() : 0; ScriptModuleSpec moduleSpec = getModuleSpec(columns); ArchiveSummary summary = new ArchiveSummary(ModuleId.fromString(moduleId), moduleSpec, updateTime, null); summaries.add(summary); } return summaries; }
Example 3
Source File: AstyanaxStorageProvider.java From emodb with Apache License 2.0 | 5 votes |
private static Iterator<Map.Entry<String, StorageSummary>> decodeMetadataRows( final Iterator<Row<ByteBuffer, Composite>> rowIter, final AstyanaxTable table) { return new AbstractIterator<Map.Entry<String, StorageSummary>>() { @Override protected Map.Entry<String, StorageSummary> computeNext() { while (rowIter.hasNext()) { Row<ByteBuffer, Composite> row = rowIter.next(); ByteBuffer key = row.getKey(); ColumnList<Composite> columns = row.getColumns(); String blobId = AstyanaxStorage.getContentKey(key); StorageSummary summary = toStorageSummary(columns); if (summary == null) { continue; // Partial blob, parts may still be replicating. } // TODO should be removed for blob s3 migration // Cleanup older versions of the blob, if any (unlikely). deleteDataColumns(table, blobId, columns, ConsistencyLevel.CL_ANY, summary.getTimestamp()); return Maps.immutableEntry(blobId, summary); } return endOfData(); } }; }
Example 4
Source File: AstyanaxMetaDaoImpl.java From staash with Apache License 2.0 | 5 votes |
public Map<String, JsonObject> runQuery(String key, String col) { OperationResult<CqlStatementResult> rs; Map<String, JsonObject> resultMap = new HashMap<String, JsonObject>(); try { String queryStr = ""; if (col != null && !col.equals("*")) { queryStr = "select column1, value from "+MetaConstants.META_KEY_SPACE + "." + MetaConstants.META_COLUMN_FAMILY +" where key='" + key + "' and column1='" + col + "';"; } else { queryStr = "select column1, value from "+MetaConstants.META_KEY_SPACE + "." + MetaConstants.META_COLUMN_FAMILY +" where key='" + key + "';"; } rs = keyspace.prepareCqlStatement().withCql(queryStr).execute(); for (Row<String, String> row : rs.getResult().getRows(METACF)) { ColumnList<String> columns = row.getColumns(); String key1 = columns.getStringValue("column1", null); String val1 = columns.getStringValue("value", null); resultMap.put(key1, new JsonObject(val1)); } } catch (ConnectionException e) { e.printStackTrace(); throw new RuntimeException(e.getMessage()); } return resultMap; }
Example 5
Source File: AstyanaxThriftDataTableResource.java From staash with Apache License 2.0 | 5 votes |
@Override public QueryResult listRows(String cursor, Integer rowLimit, Integer columnLimit) throws PaasException { try { invariant(); // Execute the query Partitioner partitioner = keyspace.getPartitioner(); Rows<ByteBuffer, ByteBuffer> result = keyspace .prepareQuery(columnFamily) .getKeyRange(null, null, cursor != null ? cursor : partitioner.getMinToken(), partitioner.getMaxToken(), rowLimit) .execute() .getResult(); // Convert raw data into a simple sparse tree SchemalessRows.Builder builder = SchemalessRows.builder(); for (Row<ByteBuffer, ByteBuffer> row : result) { Map<String, String> columns = Maps.newHashMap(); for (Column<ByteBuffer> column : row.getColumns()) { columns.put(serializers.columnAsString(column.getRawName()), serializers.valueAsString(column.getRawName(), column.getByteBufferValue())); } builder.addRow(serializers.keyAsString(row.getKey()), columns); } QueryResult dr = new QueryResult(); dr.setSrows(builder.build()); if (!result.isEmpty()) { dr.setCursor(partitioner.getTokenForKey(Iterables.getLast(result).getKey())); } return dr; } catch (ConnectionException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
Example 6
Source File: CassandraArchiveRepository.java From Nicobar with Apache License 2.0 | 4 votes |
/** * Get all of the {@link ScriptArchive}s for the given set of moduleIds. Will perform the operation in batches * as specified by {@link CassandraArchiveRepositoryConfig#getArchiveFetchBatchSize()} and outputs the jar files in * the path specified by {@link CassandraArchiveRepositoryConfig#getArchiveOutputDirectory()}. * * @param moduleIds keys to search for * @return set of ScriptArchives retrieved from the database */ @Override public Set<ScriptArchive> getScriptArchives(Set<ModuleId> moduleIds) throws IOException { Set<ScriptArchive> archives = new LinkedHashSet<ScriptArchive>(moduleIds.size()*2); Path archiveOuputDir = getConfig().getArchiveOutputDirectory(); List<ModuleId> moduleIdList = new LinkedList<ModuleId>(moduleIds); int batchSize = getConfig().getArchiveFetchBatchSize(); int start = 0; try { while (start < moduleIdList.size()) { int end = Math.min(moduleIdList.size(), start + batchSize); List<ModuleId> batchModuleIds = moduleIdList.subList(start, end); List<String> rowKeys = new ArrayList<String>(batchModuleIds.size()); for (ModuleId batchModuleId:batchModuleIds) { rowKeys.add(batchModuleId.toString()); } Rows<String, String> rows = cassandra.getRows(rowKeys.toArray(new String[0])); for (Row<String, String> row : rows) { String moduleId = row.getKey(); ColumnList<String> columns = row.getColumns(); Column<String> lastUpdateColumn = columns.getColumnByName(Columns.last_update.name()); Column<String> hashColumn = columns.getColumnByName(Columns.archive_content_hash.name()); Column<String> contentColumn = columns.getColumnByName(Columns.archive_content.name()); if (lastUpdateColumn == null || hashColumn == null || contentColumn == null) { continue; } ScriptModuleSpec moduleSpec = getModuleSpec(columns); long lastUpdateTime = lastUpdateColumn.getLongValue(); byte[] hash = hashColumn.getByteArrayValue(); byte[] content = contentColumn.getByteArrayValue(); // verify the hash if (hash != null && hash.length > 0 && !verifyHash(hash, content)) { logger.warn("Content hash validation failed for moduleId {}. size: {}", moduleId, content.length); continue; } String fileName = new StringBuilder().append(moduleId).append("-").append(lastUpdateTime).append(".jar").toString(); Path jarFile = archiveOuputDir.resolve(fileName); Files.write(jarFile, content); JarScriptArchive scriptArchive = new JarScriptArchive.Builder(jarFile) .setModuleSpec(moduleSpec) .setCreateTime(lastUpdateTime) .build(); archives.add(scriptArchive); } start = end; } } catch (Exception e) { throw new IOException(e); } return archives; }