Java Code Examples for org.rocksdb.WriteBatch#put()
The following examples show how to use
org.rocksdb.WriteBatch#put() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RocksDBBlockHeaderStorage.java From WeCross with Apache License 2.0 | 6 votes |
@Override public void writeBlockHeader(long blockNumber, byte[] blockHeader) { if (dbClosed == true) { logger.warn("Write RocksDB error: RocksDB has been closed"); return; } String key = blockKeyPrefix + String.valueOf(blockNumber); try { WriteBatch writeBatch = new WriteBatch(); writeBatch.put(numberKey.getBytes(), String.valueOf(blockNumber).getBytes()); writeBatch.put(key.getBytes(), blockHeader); WriteOptions writeOptions = new WriteOptions(); rocksDB.write(writeOptions, writeBatch); onBlockHeader(blockNumber, blockHeader); } catch (RocksDBException e) { logger.error("RocksDB write error", e); } }
Example 2
Source File: RocksDBCache.java From kcache with Apache License 2.0 | 5 votes |
@Override public void addToBatch(final byte[] key, final byte[] value, final WriteBatch batch) throws RocksDBException { if (value == null) { batch.delete(columnFamily, key); } else { batch.put(columnFamily, key, value); } }
Example 3
Source File: RocksDBDAO.java From hudi with Apache License 2.0 | 5 votes |
/** * Helper to add put operation in batch. * * @param batch Batch Handle * @param columnFamilyName Column Family * @param key Key * @param value Payload * @param <T> Type of payload */ public <K extends Serializable, T extends Serializable> void putInBatch(WriteBatch batch, String columnFamilyName, K key, T value) { try { byte[] keyBytes = SerializationUtils.serialize(key); byte[] payload = SerializationUtils.serialize(value); batch.put(managedHandlesMap.get(columnFamilyName), keyBytes, payload); } catch (Exception e) { throw new HoodieException(e); } }
Example 4
Source File: WindowedRocksDbHdfsState.java From jstorm with Apache License 2.0 | 5 votes |
@Override public void putBatch(TimeWindow window, Map<K, V> batch) { try { ColumnFamilyHandle handler = getColumnFamilyHandle(window); WriteBatch writeBatch = new WriteBatch(); for (Map.Entry<K, V> entry : batch.entrySet()) { writeBatch.put(handler, serializer.serialize(entry.getKey()), serializer.serialize(entry.getValue())); } rocksDb.write(new WriteOptions(), writeBatch); } catch (RocksDBException e) { LOG.error("Failed to put batch={} for window={}", batch, window); throw new RuntimeException(e.getMessage()); } }
Example 5
Source File: RocksDbHdfsState.java From jstorm with Apache License 2.0 | 5 votes |
@Override public void putBatch(Map<K, V> batch) { try { WriteBatch writeBatch = new WriteBatch(); for (Map.Entry<K, V> entry : batch.entrySet()) { writeBatch.put(serializer.serialize(entry.getKey()), serializer.serialize(entry.getValue())); } rocksDb.write(new WriteOptions(), writeBatch); } catch (RocksDBException e) { LOG.error("Failed to put batch={}", batch); throw new RuntimeException(e.getMessage()); } }
Example 6
Source File: RocksDBLogStorage.java From sofa-jraft with Apache License 2.0 | 4 votes |
private void addConfBatch(final LogEntry entry, final WriteBatch batch) throws RocksDBException { final byte[] ks = getKeyBytes(entry.getId().getIndex()); final byte[] content = this.logEntryEncoder.encode(entry); batch.put(this.defaultHandle, ks, content); batch.put(this.confHandle, ks, content); }
Example 7
Source File: RocksDBLogStorage.java From sofa-jraft with Apache License 2.0 | 4 votes |
private void addDataBatch(final LogEntry entry, final WriteBatch batch, final WriteContext ctx) throws RocksDBException, IOException, InterruptedException { final long logIndex = entry.getId().getIndex(); final byte[] content = this.logEntryEncoder.encode(entry); batch.put(this.defaultHandle, getKeyBytes(logIndex), onDataAppend(logIndex, content, ctx)); }
Example 8
Source File: TestDB.java From DDMQ with Apache License 2.0 | 4 votes |
@Test public void testDeleteRange() { init(); WriteBatch wb = new WriteBatch(); ColumnFamilyHandle cfHandle = CFManager.CFH_DEFAULT; long st = System.currentTimeMillis(); for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356527-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356525-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356529-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } RDB.writeAsync(wb); long ed = System.currentTimeMillis(); System.out.println("write cost :" + (ed - st)); try { TimeUnit.SECONDS.sleep(2); } catch (InterruptedException e) { e.printStackTrace(); } long start = System.currentTimeMillis(); RocksIterator it = RDB.newIterator(cfHandle); byte[] now = "1324356527".getBytes(); long count = 0; for(it.seek(now); it.isValid(); it.next()) { // System.out.println(new String(it.key()) + " " + new String(it.value())); count++; if(count == 100000) break; } it.close(); long end = System.currentTimeMillis(); System.out.println("cost : " + (end - start) + " count:" +count); RDB.deleteFilesInRange(CFManager.CFH_DEFAULT, "132435653".getBytes(), "1324356529".getBytes()); count = 0; it = RDB.newIterator(cfHandle); now = "1324356525".getBytes(); for(it.seek(now); it.isValid(); it.next()) { // System.out.println(new String(it.key()) + " " + new String(it.value())); count++; if(count == 100000) break; } it.close(); end = System.currentTimeMillis(); System.out.println("cost : " + (end - start) + " count:" +count); destructor(); }
Example 9
Source File: TestDB.java From DDMQ with Apache License 2.0 | 4 votes |
@Test public void testDeleteRange() { init(); WriteBatch wb = new WriteBatch(); ColumnFamilyHandle cfHandle = CFManager.CFH_DEFAULT; long st = System.currentTimeMillis(); for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356527-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356525-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } for(int i=100000; i<200000; i++) { wb.put(cfHandle, ("1324356529-" + i + "-5-5-345-356-234-232").getBytes(), "tasdfasdgasdfestfordb".getBytes()); if(i % 30 == 0) { RDB.writeAsync(wb); wb.clear(); } } RDB.writeAsync(wb); long ed = System.currentTimeMillis(); System.out.println("write cost :" + (ed - st)); try { TimeUnit.SECONDS.sleep(2); } catch (InterruptedException e) { e.printStackTrace(); } long start = System.currentTimeMillis(); RocksIterator it = RDB.newIterator(cfHandle); byte[] now = "1324356527".getBytes(); long count = 0; for(it.seek(now); it.isValid(); it.next()) { // System.out.println(new String(it.key()) + " " + new String(it.value())); count++; if(count == 100000) break; } it.close(); long end = System.currentTimeMillis(); System.out.println("cost : " + (end - start) + " count:" +count); RDB.deleteFilesInRange(CFManager.CFH_DEFAULT, "132435653".getBytes(), "1324356529".getBytes()); count = 0; it = RDB.newIterator(cfHandle); now = "1324356525".getBytes(); for(it.seek(now); it.isValid(); it.next()) { // System.out.println(new String(it.key()) + " " + new String(it.value())); count++; if(count == 100000) break; } it.close(); end = System.currentTimeMillis(); System.out.println("cost : " + (end - start) + " count:" +count); destructor(); }
Example 10
Source File: RocksDBDAO.java From hudi with Apache License 2.0 | 3 votes |
/** * Helper to add put operation in batch. * * @param batch Batch Handle * @param columnFamilyName Column Family * @param key Key * @param value Payload * @param <T> Type of payload */ public <T extends Serializable> void putInBatch(WriteBatch batch, String columnFamilyName, String key, T value) { try { byte[] payload = SerializationUtils.serialize(value); batch.put(managedHandlesMap.get(columnFamilyName), key.getBytes(), payload); } catch (Exception e) { throw new HoodieException(e); } }