Java Code Examples for com.datastax.driver.core.ResultSet#getColumnDefinitions()
The following examples show how to use
com.datastax.driver.core.ResultSet#getColumnDefinitions() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CassandraQuery.java From micro-integrator with Apache License 2.0 | 6 votes |
@Override public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel) throws DataServiceFault { ResultSet rs = (ResultSet) result; if (this.hasResult()) { Iterator<Row> itr = rs.iterator(); Row row; DataEntry dataEntry; ColumnDefinitions defs = rs.getColumnDefinitions(); while (itr.hasNext()) { row = itr.next(); dataEntry = this.getDataEntryFromRow(row, defs); this.writeResultEntry(xmlWriter, dataEntry, params, queryLevel); } } }
Example 2
Source File: CassandraFactory.java From database-transform-tool with Apache License 2.0 | 6 votes |
/** * 描述: 查询数据表字段名(key:字段名,value:字段类型名) * 时间: 2017年11月15日 上午11:29:32 * @author yi.zhang * @param table 表名 * @return */ public Map<String,String> queryColumns(String table){ try { String sql = "select * from "+table; ResultSet rs = session.execute(sql); ColumnDefinitions rscd = rs.getColumnDefinitions(); int count = rscd.size(); Map<String,String> reflect = new HashMap<String,String>(); for (int i = 0; i < count; i++) { String column = rscd.getName(i); String type = rscd.getType(i).getName().name().toLowerCase(); reflect.put(column, type); } return reflect; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
Example 3
Source File: CassandraDbProvider.java From ats-framework with Apache License 2.0 | 6 votes |
/** * Returns a map with column name as key and column date type as value. * * The value might be as simple as "Boolean" or more complex like * - "Set|Boolean" * - "List|String" * - "Map|String|Integer" * these are cases when the data type is a container of primitive data types. * * @param tableName * @return * @throws DbException */ public Map<String, String> getColumnInfo( String tableName ) throws DbException { connect(); ResultSet results = session.execute("SELECT * FROM " + this.dbName + "." + tableName + " LIMIT 1"); Map<String, String> columnInfo = new HashMap<String, String>(); for (Definition columnDefinition : results.getColumnDefinitions()) { DataType dataType = columnDefinition.getType(); String dataTypeName = dataType.getName().name(); if ("Set".equalsIgnoreCase(dataTypeName)) { dataTypeName = dataTypeName + "|" + dataType.getTypeArguments().get(0); } else if ("List".equalsIgnoreCase(dataTypeName)) { dataTypeName = dataTypeName + "|" + dataType.getTypeArguments().get(0); } else if ("Map".equalsIgnoreCase(dataTypeName)) { dataTypeName = dataTypeName + "|" + dataType.getTypeArguments().get(0) + "|" + dataType.getTypeArguments().get(1); } columnInfo.put(columnDefinition.getName(), dataTypeName); } return columnInfo; }
Example 4
Source File: CqlMetaDaoImpl.java From staash with Apache License 2.0 | 6 votes |
private String convertResultSet(ResultSet rs) { // TODO Auto-generated method stub String colStr = ""; String rowStr = ""; JsonObject response = new JsonObject(); List<Row> rows = rs.all(); if (!rows.isEmpty() && rows.size() == 1) { rowStr = rows.get(0).toString(); } ColumnDefinitions colDefs = rs.getColumnDefinitions(); colStr = colDefs.toString(); response.putString("columns", colStr.substring(8, colStr.length() - 1)); response.putString("values", rowStr.substring(4, rowStr.length() - 1)); return response.toString(); }
Example 5
Source File: CassandraDataHandler.java From micro-integrator with Apache License 2.0 | 5 votes |
@Override public List<ODataEntry> readTable(String tableName) throws ODataServiceFault { Statement statement = new SimpleStatement("Select * from " + this.keyspace + "." + tableName); ResultSet resultSet = this.session.execute(statement); Iterator<Row> iterator = resultSet.iterator(); List<ODataEntry> entryList = new ArrayList<>(); ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); while (iterator.hasNext()) { ODataEntry dataEntry = createDataEntryFromRow(tableName, iterator.next(), columnDefinitions); entryList.add(dataEntry); } return entryList; }
Example 6
Source File: CassandraDataHandler.java From micro-integrator with Apache License 2.0 | 5 votes |
@Override public List<ODataEntry> readTableWithKeys(String tableName, ODataEntry keys) throws ODataServiceFault { List<ColumnMetadata> cassandraTableMetaData = this.session.getCluster().getMetadata().getKeyspace(this.keyspace) .getTable(tableName).getColumns(); List<String> pKeys = this.primaryKeys.get(tableName); String query = createReadSqlWithKeys(tableName, keys); List<Object> values = new ArrayList<>(); for (String column : this.tableMetaData.get(tableName).keySet()) { if (keys.getNames().contains(column) && pKeys.contains(column)) { bindParams(column, keys.getValue(column), values, cassandraTableMetaData); } } PreparedStatement statement = this.preparedStatementMap.get(query); if (statement == null) { statement = this.session.prepare(query); this.preparedStatementMap.put(query, statement); } ResultSet resultSet = this.session.execute(statement.bind(values.toArray())); List<ODataEntry> entryList = new ArrayList<>(); Iterator<Row> iterator = resultSet.iterator(); ColumnDefinitions definitions = resultSet.getColumnDefinitions(); while (iterator.hasNext()) { ODataEntry dataEntry = createDataEntryFromRow(tableName, iterator.next(), definitions); entryList.add(dataEntry); } return entryList; }
Example 7
Source File: CassandraResultSet.java From cassandra-jdbc-driver with Apache License 2.0 | 5 votes |
protected CassandraResultSet(BaseCassandraStatement statement, CassandraCqlStatement parsedStmt, ResultSet rs) { super(statement, parsedStmt); if (rs != null) { for (Definition def : rs.getColumnDefinitions()) { CassandraColumnDefinition d = new CassandraColumnDefinition( def.getKeyspace(), def.getTable(), def.getName(), def .getType().getName().toString(), false); metadata.addColumnDefinition(d); } } _resultSet = rs; }
Example 8
Source File: CqlDataDaoImpl.java From staash with Apache License 2.0 | 5 votes |
private String convertResultSet(ResultSet rs) { // TODO Auto-generated method stub String colStr = ""; String rowStr = ""; JsonObject response = new JsonObject(); List<Row> rows = rs.all(); if (!rows.isEmpty() && rows.size() == 1) { rowStr = rows.get(0).toString(); } ColumnDefinitions colDefs = rs.getColumnDefinitions(); colStr = colDefs.toString(); response.putString("columns", colStr.substring(8, colStr.length() - 1)); response.putString("values", rowStr.substring(4, rowStr.length() - 1)); return response.toString(); }
Example 9
Source File: QueryCassandra.java From localization_nifi with Apache License 2.0 | 4 votes |
/** * Converts a result set into an Avro record and writes it to the given stream. * * @param rs The result set to convert * @param outStream The stream to which the Avro record will be written * @param timeout The max number of timeUnits to wait for a result set fetch to complete * @param timeUnit The unit of time (SECONDS, e.g.) associated with the timeout amount * @return The number of rows from the result set written to the stream * @throws IOException If the Avro record cannot be written * @throws InterruptedException If a result set fetch is interrupted * @throws TimeoutException If a result set fetch has taken longer than the specified timeout * @throws ExecutionException If any error occurs during the result set fetch */ public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, long timeout, TimeUnit timeUnit) throws IOException, InterruptedException, TimeoutException, ExecutionException { final Schema schema = createSchema(rs); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ColumnDefinitions columnDefinitions = rs.getColumnDefinitions(); long nrOfRows = 0; if (columnDefinitions != null) { do { // Grab the ones we have int rowsAvailableWithoutFetching = rs.getAvailableWithoutFetching(); if (rowsAvailableWithoutFetching == 0) { // Get more if (timeout <= 0 || timeUnit == null) { rs.fetchMoreResults().get(); } else { rs.fetchMoreResults().get(timeout, timeUnit); } } for (Row row : rs) { for (int i = 0; i < columnDefinitions.size(); i++) { final DataType dataType = columnDefinitions.getType(i); if (row.isNull(i)) { rec.put(i, null); } else { rec.put(i, getCassandraObject(row, i, dataType)); } } dataFileWriter.append(rec); nrOfRows += 1; } } while (!rs.isFullyFetched()); } return nrOfRows; } }
Example 10
Source File: QueryCassandra.java From localization_nifi with Apache License 2.0 | 4 votes |
/** * Creates an Avro schema from the given result set. The metadata (column definitions, data types, etc.) is used * to determine a schema for Avro. * * @param rs The result set from which an Avro schema will be created * @return An Avro schema corresponding to the given result set's metadata * @throws IOException If an error occurs during schema discovery/building */ public static Schema createSchema(final ResultSet rs) throws IOException { final ColumnDefinitions columnDefinitions = rs.getColumnDefinitions(); final int nrOfColumns = (columnDefinitions == null ? 0 : columnDefinitions.size()); String tableName = "NiFi_Cassandra_Query_Record"; if (nrOfColumns > 0) { String tableNameFromMeta = columnDefinitions.getTable(1); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); if (columnDefinitions != null) { for (int i = 0; i < nrOfColumns; i++) { DataType dataType = columnDefinitions.getType(i); if (dataType == null) { throw new IllegalArgumentException("No data type for column[" + i + "] with name " + columnDefinitions.getName(i)); } // Map types from Cassandra to Avro where possible if (dataType.isCollection()) { List<DataType> typeArguments = dataType.getTypeArguments(); if (typeArguments == null || typeArguments.size() == 0) { throw new IllegalArgumentException("Column[" + i + "] " + dataType.getName() + " is a collection but no type arguments were specified!"); } // Get the first type argument, to be used for lists and sets DataType firstArg = typeArguments.get(0); if (dataType.equals(DataType.set(firstArg)) || dataType.equals(DataType.list(firstArg))) { builder.name(columnDefinitions.getName(i)).type().unionOf().nullBuilder().endNull().and().array() .items(getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(firstArg))).endUnion().noDefault(); } else { // Must be an n-arg collection like map DataType secondArg = typeArguments.get(1); if (dataType.equals(DataType.map(firstArg, secondArg))) { builder.name(columnDefinitions.getName(i)).type().unionOf().nullBuilder().endNull().and().map().values( getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(secondArg))).endUnion().noDefault(); } } } else { builder.name(columnDefinitions.getName(i)) .type(getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(dataType))).noDefault(); } } } return builder.endRecord(); }
Example 11
Source File: QueryCassandra.java From nifi with Apache License 2.0 | 4 votes |
/** * Converts a result set into an Avro record and writes it to the given stream. * * @param rs The result set to convert * @param outStream The stream to which the Avro record will be written * @param timeout The max number of timeUnits to wait for a result set fetch to complete * @param timeUnit The unit of time (SECONDS, e.g.) associated with the timeout amount * @return The number of rows from the result set written to the stream * @throws IOException If the Avro record cannot be written * @throws InterruptedException If a result set fetch is interrupted * @throws TimeoutException If a result set fetch has taken longer than the specified timeout * @throws ExecutionException If any error occurs during the result set fetch */ public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, long timeout, TimeUnit timeUnit) throws IOException, InterruptedException, TimeoutException, ExecutionException { final Schema schema = createSchema(rs); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ColumnDefinitions columnDefinitions = rs.getColumnDefinitions(); long nrOfRows = 0; if (columnDefinitions != null) { do { // Grab the ones we have int rowsAvailableWithoutFetching = rs.getAvailableWithoutFetching(); if (rowsAvailableWithoutFetching == 0) { // Get more if (timeout <= 0 || timeUnit == null) { rs.fetchMoreResults().get(); } else { rs.fetchMoreResults().get(timeout, timeUnit); } } for (Row row : rs) { for (int i = 0; i < columnDefinitions.size(); i++) { final DataType dataType = columnDefinitions.getType(i); if (row.isNull(i)) { rec.put(i, null); } else { rec.put(i, getCassandraObject(row, i, dataType)); } } dataFileWriter.append(rec); nrOfRows += 1; } } while (!rs.isFullyFetched()); } return nrOfRows; } }
Example 12
Source File: QueryCassandra.java From nifi with Apache License 2.0 | 4 votes |
/** * Creates an Avro schema from the given result set. The metadata (column definitions, data types, etc.) is used * to determine a schema for Avro. * * @param rs The result set from which an Avro schema will be created * @return An Avro schema corresponding to the given result set's metadata * @throws IOException If an error occurs during schema discovery/building */ public static Schema createSchema(final ResultSet rs) throws IOException { final ColumnDefinitions columnDefinitions = rs.getColumnDefinitions(); final int nrOfColumns = (columnDefinitions == null ? 0 : columnDefinitions.size()); String tableName = "NiFi_Cassandra_Query_Record"; if (nrOfColumns > 0) { String tableNameFromMeta = columnDefinitions.getTable(0); if (!StringUtils.isBlank(tableNameFromMeta)) { tableName = tableNameFromMeta; } } final SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); if (columnDefinitions != null) { for (int i = 0; i < nrOfColumns; i++) { DataType dataType = columnDefinitions.getType(i); if (dataType == null) { throw new IllegalArgumentException("No data type for column[" + i + "] with name " + columnDefinitions.getName(i)); } // Map types from Cassandra to Avro where possible if (dataType.isCollection()) { List<DataType> typeArguments = dataType.getTypeArguments(); if (typeArguments == null || typeArguments.size() == 0) { throw new IllegalArgumentException("Column[" + i + "] " + dataType.getName() + " is a collection but no type arguments were specified!"); } // Get the first type argument, to be used for lists and sets DataType firstArg = typeArguments.get(0); if (dataType.equals(DataType.set(firstArg)) || dataType.equals(DataType.list(firstArg))) { builder.name(columnDefinitions.getName(i)).type().unionOf().nullBuilder().endNull().and().array() .items(getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(firstArg))).endUnion().noDefault(); } else { // Must be an n-arg collection like map DataType secondArg = typeArguments.get(1); if (dataType.equals(DataType.map(firstArg, secondArg))) { builder.name(columnDefinitions.getName(i)).type().unionOf().nullBuilder().endNull().and().map().values( getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(secondArg))).endUnion().noDefault(); } } } else { builder.name(columnDefinitions.getName(i)) .type(getUnionFieldType(getPrimitiveAvroTypeFromCassandraType(dataType))).noDefault(); } } } return builder.endRecord(); }
Example 13
Source File: CqlMetaDaoImpl.java From staash with Apache License 2.0 | 4 votes |
private String convertResultSet(ResultSet rs) { // TODO Auto-generated method stub String colStr = ""; String rowStr = ""; JsonObject response = new JsonObject(); List<Row> rows = rs.all(); if (!rows.isEmpty() && rows.size()==1) { rowStr = rows.get(0).toString(); } ColumnDefinitions colDefs = rs.getColumnDefinitions(); colStr = colDefs.toString(); response.putString("columns", colStr.substring(8,colStr.length()-1)); response.putString("values", rowStr.substring(4,rowStr.length()-1)); return response.toString(); // for (Row ro:rows) { // Print(ro.toString()); //// ro.getColumnDefinitions() // } // return null; // if (rm.kind == ResultMessage.Kind.ROWS) { // //ToDo maybe processInternal // boolean bSwitch = true; // if (bSwitch) { // ResultMessage.Rows cqlRows = (ResultMessage.Rows) rm; // List<ColumnSpecification> columnSpecs = cqlRows.result.metadata.names; // // for (List<ByteBuffer> row : cqlRows.result.rows) { // Map<String,Object> map = new HashMap<String,Object>(); // int i = 0; // for (ByteBuffer bytes : row) { // ColumnSpecification specs = columnSpecs.get(i++); // if (specs.name!=null && specs.type!=null && bytes!=null && bytes.hasRemaining()) { // System.out.println("name = "+specs.name.toString()+" ,type= "+specs.type.compose(bytes)); // map.put(specs.name.toString(), specs.type.compose(bytes)); // } // } // returnRows.add(map); // } // } else { // boolean convert = true;; // CqlResult result = rm.toThriftResult(); // List<CqlRow> rows = result.getRows(); // for (CqlRow row: rows) { // List<org.apache.cassandra.thrift.Column> columns = row.getColumns(); // for (org.apache.cassandra.thrift.Column c: columns){ // HashMap<String,Object> m = new HashMap<String,Object>(); // if (convert) { // m.put("name" , TypeHelper.getCqlTyped(result.schema.name_types.get(c.name), c.name) ); // m.put("value" , TypeHelper.getCqlTyped(result.schema.name_types.get(c.name), c.value) ); // } else { // m.put("value", TypeHelper.getBytes(c.value)); // m.put("name", TypeHelper.getBytes(c.name)); // } // returnRows.add(m); // } // } // } // } // JsonObject response = new JsonObject(); // JsonArray array = new JsonArray(); // for (Map<String,Object> m : returnRows) { // array.add(new JsonObject(m)); // } // response.putString(Long.toString(counter.incrementAndGet()), "OK"); // response.putArray(Long.toString(counter.incrementAndGet()), array); // String testQry = "CREATE KEYSPACE testdb WITH REPLICATION = {'class' : 'SimpleStrategy', 'replication_factor': 1};"; //// create("testdb",1); // return response.toString(); // return null; // } }