org.elasticsearch.common.io.stream.ByteBufferStreamInput Java Examples
The following examples show how to use
org.elasticsearch.common.io.stream.ByteBufferStreamInput.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LegacyDetectorRepositoryImplTest.java From adaptive-alerting with Apache License 2.0 | 6 votes |
private DeleteResponse mockDeleteResponse(String id) { DeleteResponse deleteResponse = mock(DeleteResponse.class); Result ResultOpt; when(deleteResponse.getId()).thenReturn(id); String indexName = "index"; when(deleteResponse.getIndex()).thenReturn(indexName); try { byte[] byteOpt = new byte[]{2}; // 2 - DELETED, DeleteResponse.Result ByteBuffer byteBuffer = ByteBuffer.wrap(byteOpt); ByteBufferStreamInput byteOptBufferStream = new ByteBufferStreamInput(byteBuffer); ResultOpt = DocWriteResponse.Result.readFrom(byteOptBufferStream); when(deleteResponse.getResult()).thenReturn(ResultOpt); } catch (IOException e) { } return deleteResponse; }
Example #2
Source File: DetectorMappingRepositoryImplTest.java From adaptive-alerting with Apache License 2.0 | 6 votes |
private DeleteResponse mockDeleteResponse(String id) { DeleteResponse deleteResponse = mock(DeleteResponse.class); Result ResultOpt; when(deleteResponse.getId()).thenReturn(id); String indexName = elasticSearchProperties.getIndexName(); when(deleteResponse.getIndex()).thenReturn(indexName); try { byte[] byteopt = new byte[]{2}; // 2 - DELETED, DeleteResponse.Result ByteBuffer byteBuffer = ByteBuffer.wrap(byteopt); ByteBufferStreamInput byteoptbytebufferstream = new ByteBufferStreamInput(byteBuffer); ResultOpt = DocWriteResponse.Result.readFrom(byteoptbytebufferstream); when(deleteResponse.getResult()).thenReturn(ResultOpt); } catch (IOException e) { } return deleteResponse; }
Example #3
Source File: LocalTranslog.java From Elasticsearch with Apache License 2.0 | 6 votes |
public Translog.Operation readFromLocal(Location location) { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); long fileGeneration = location.generation; FileChannel readChannel = readChannels.get(fileGeneration); if (readChannel == null) { readChannel = openReader(fileGeneration); } if (readChannel == null) { // throw exception since could not find reader throw new ElasticsearchException("could not open reader for file generation {}", fileGeneration); } ByteBuffer buffer = ByteBuffer.allocate(location.size); Channels.readFromFileChannelWithEofException(readChannel, location.translogLocation, buffer); buffer.flip(); ByteBufferStreamInput in = new ByteBufferStreamInput(buffer); Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte()); Translog.Operation operation = DistributedTranslog.newOperationFromType(type); operation.readFrom(in); in.close(); return operation; } catch (IOException e) { logger.error("errors while read from local", e); throw new ElasticsearchException("failed to read source from translog location " + location, e); } }
Example #4
Source File: StoredLtrModelParserTests.java From elasticsearch-learning-to-rank with Apache License 2.0 | 6 votes |
public void testSerializationModelDef() throws IOException { String modelDefnJson = "{\n" + " \"type\": \"model/dummy\",\n" + " \"definition\": \"completely ignored\",\n"+ " \"feature_normalizers\": {\n"+ " \"feature_2\": { \"min_max\":" + " {\"minimum\": 1.0," + " \"maximum\": 1.25}}}}"; XContentParser xContent = jsonXContent.createParser(EMPTY, LoggingDeprecationHandler.INSTANCE, modelDefnJson); StoredLtrModel.LtrModelDefinition modelDef = StoredLtrModel.LtrModelDefinition.parse(xContent, null); BytesStreamOutput out = new BytesStreamOutput(); modelDef.writeTo(out); out.close(); BytesRef ref = out.bytes().toBytesRef(); StreamInput input = ByteBufferStreamInput.wrap(ref.bytes, ref.offset, ref.length); StoredLtrModel.LtrModelDefinition modelUnserialized = new StoredLtrModel.LtrModelDefinition(input); assertEquals(modelUnserialized.getDefinition(), modelDef.getDefinition()); assertEquals(modelUnserialized.getType(), modelDef.getType()); assertEquals(modelUnserialized.getFtrNorms(), modelDef.getFtrNorms()); }
Example #5
Source File: StoredLtrModelParserTests.java From elasticsearch-learning-to-rank with Apache License 2.0 | 6 votes |
public void testSerializationUpgradeBinaryStream() throws IOException { // Below is base64 encoded a model with no feature norm data // to ensure proper parsing of a binary stream missing ftr norms // // String modelDefnJson = "{\n" + // " \"type\": \"model/dummy\",\n" + // " \"definition\": \"completely ignored\"}"; String base64Encoded = "C21vZGVsL2R1bW15EmNvbXBsZXRlbHkgaWdub3JlZAE="; byte[] bytes = Base64.getDecoder().decode(base64Encoded); StreamInput input = ByteBufferStreamInput.wrap(bytes, 0, bytes.length); input.setVersion(Version.V_7_6_0); StoredLtrModel.LtrModelDefinition modelUnserialized = new StoredLtrModel.LtrModelDefinition(input); assertEquals(modelUnserialized.getDefinition(), "completely ignored"); assertEquals(modelUnserialized.getType(), "model/dummy"); assertEquals(modelUnserialized.getFtrNorms().numNormalizers(), 0); }
Example #6
Source File: DistributedTranslog.java From Elasticsearch with Apache License 2.0 | 5 votes |
public static Operation getOperationFromLogRecord(LogRecordWithDLSN logRecord) throws IOException { ByteBuffer bf = ByteBuffer.wrap(logRecord.getPayload()); ByteBufferStreamInput in = new ByteBufferStreamInput(bf); Translog.Operation.Type type = Translog.Operation.Type.fromId(in.readByte()); Translog.Operation operation = newOperationFromType(type); operation.readFrom(in); in.close(); return operation; }
Example #7
Source File: TranslogReader.java From Elasticsearch with Apache License 2.0 | 5 votes |
/** * reads an operation at the given position and returns it. The buffer length is equal to the number * of bytes reads. */ private final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { final ByteBuffer buffer; if (reusableBuffer.capacity() >= opSize) { buffer = reusableBuffer; } else { buffer = ByteBuffer.allocate(opSize); } buffer.clear(); buffer.limit(opSize); readBytes(buffer, position); buffer.flip(); return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); }
Example #8
Source File: StoredLtrModelParserTests.java From elasticsearch-learning-to-rank with Apache License 2.0 | 5 votes |
public void testSerialization() throws IOException { String modelJson = "{\n" + " \"name\":\"my_model\",\n" + " \"feature_set\":" + getSimpleFeatureSet() + "," + " \"model\": {\n" + " \"type\": \"model/dummy\",\n" + " \"definition\": \"completely ignored\",\n"+ " \"feature_normalizers\": {\n"+ " \"feature_2\": { \"min_max\":" + " {\"minimum\": 1.0," + " \"maximum\": 1.25}}}" + " }" + "}"; StoredLtrModel model = parse(modelJson); BytesStreamOutput out = new BytesStreamOutput(); model.writeTo(out); out.close(); BytesRef ref = out.bytes().toBytesRef(); StreamInput input = ByteBufferStreamInput.wrap(ref.bytes, ref.offset, ref.length); StoredLtrModel modelUnserialized = new StoredLtrModel(input); assertEquals(model, modelUnserialized); // Confirm model def serialization itself works }
Example #9
Source File: StoredLtrQueryBuilderTests.java From elasticsearch-learning-to-rank with Apache License 2.0 | 5 votes |
public void testSerDe() throws IOException { StoredLtrQueryBuilder builder = new StoredLtrQueryBuilder(LtrTestUtils.wrapMemStore(StoredLtrQueryBuilderTests.store)); builder.activeFeatures(Collections.singletonList("match1")); BytesStreamOutput out = new BytesStreamOutput(); builder.writeTo(out); out.close(); BytesRef ref = out.bytes().toBytesRef(); StreamInput input = ByteBufferStreamInput.wrap(ref.bytes, ref.offset, ref.length); StoredLtrQueryBuilder builderFromInputStream = new StoredLtrQueryBuilder( LtrTestUtils.wrapMemStore(StoredLtrQueryBuilderTests.store), input); List<String> expected = Collections.singletonList("match1"); assertEquals(expected, builderFromInputStream.activeFeatures()); }
Example #10
Source File: BaseTranslogReader.java From crate with Apache License 2.0 | 5 votes |
/** * reads an operation at the given position and returns it. The buffer length is equal to the number * of bytes reads. */ protected final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusableBuffer, long position, int opSize, BufferedChecksumStreamInput reuse) throws IOException { final ByteBuffer buffer; if (reusableBuffer.capacity() >= opSize) { buffer = reusableBuffer; } else { buffer = ByteBuffer.allocate(opSize); } buffer.clear(); buffer.limit(opSize); readBytes(buffer, position); buffer.flip(); return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), path.toString(), reuse); }