Java Code Examples for io.netty.handler.codec.http.multipart.HttpPostRequestEncoder#ErrorDataEncoderException
The following examples show how to use
io.netty.handler.codec.http.multipart.HttpPostRequestEncoder#ErrorDataEncoderException .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HttpClient.java From multi-model-server with Apache License 2.0 | 6 votes |
public boolean predict( String modelName, DefaultFullHttpRequest req, HttpPostRequestEncoder requestEncoder) throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException { Channel channel = connect(bootstrap, inferencePort); req.setUri("/predictions/" + URLEncoder.encode(modelName, StandardCharsets.UTF_8.name())); channel.writeAndFlush(requestEncoder.finalizeRequest()); if (requestEncoder.isChunked()) { channel.writeAndFlush(requestEncoder).sync(); } channel.closeFuture().sync(); int statusCode = handler.getStatusCode(); String ret = handler.getContent(); if (statusCode == 200) { logger.info("predict: {} success.", modelName); logger.trace(ret); return true; } logger.warn("predict: {} failed: {}", modelName, ret); return false; }
Example 2
Source File: Cts.java From multi-model-server with Apache License 2.0 | 6 votes |
private void runTest(HttpClient client, ModelInfo info, Logger logger) throws HttpPostRequestEncoder.ErrorDataEncoderException, InterruptedException, IOException { String modelName = info.getModelName(); String url = info.getUrl(); int type = info.getType(); logger.info("Testing model: {}={}", modelName, url); if (!client.registerModel(modelName, url)) { failedModels.add(url); return; } try { if (!predict(client, type, modelName)) { failedModels.add(url); } } finally { if (!client.unregisterModel(modelName)) { failedModels.add(url); } } }
Example 3
Source File: ModelServerTest.java From multi-model-server with Apache License 2.0 | 6 votes |
private void testInvocationsMultipart(Channel channel) throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException { result = null; latch = new CountDownLatch(1); DefaultFullHttpRequest req = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/invocations"); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(req, true); encoder.addBodyAttribute("model_name", "noop_v0.1"); MemoryFileUpload body = new MemoryFileUpload("data", "test.txt", "text/plain", null, null, 4); body.setContent(Unpooled.copiedBuffer("test", StandardCharsets.UTF_8)); encoder.addBodyHttpData(body); channel.writeAndFlush(encoder.finalizeRequest()); if (encoder.isChunked()) { channel.writeAndFlush(encoder).sync(); } latch.await(); Assert.assertEquals(result, "OK"); }
Example 4
Source File: ModelServerTest.java From multi-model-server with Apache License 2.0 | 6 votes |
private void testModelsInvokeMultipart(Channel channel) throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException { result = null; latch = new CountDownLatch(1); DefaultFullHttpRequest req = new DefaultFullHttpRequest( HttpVersion.HTTP_1_1, HttpMethod.POST, "/models/noop/invoke"); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(req, true); MemoryFileUpload body = new MemoryFileUpload("data", "test.txt", "text/plain", null, null, 4); body.setContent(Unpooled.copiedBuffer("test", StandardCharsets.UTF_8)); encoder.addBodyHttpData(body); channel.writeAndFlush(encoder.finalizeRequest()); if (encoder.isChunked()) { channel.writeAndFlush(encoder).sync(); } latch.await(); Assert.assertEquals(result, "OK"); }
Example 5
Source File: NettyMultipartRequestTest.java From ambry with Apache License 2.0 | 6 votes |
/** * Creates a {@link HttpPostRequestEncoder} that encodes the given {@code request} and {@code parts}. * @param request the {@link HttpRequest} containing headers and other metadata about the request. * @param parts the {@link InMemoryFile}s that will form the parts of the request. * @return a {@link HttpPostRequestEncoder} that can encode the {@code request} and {@code parts}. * @throws HttpPostRequestEncoder.ErrorDataEncoderException * @throws IOException */ private HttpPostRequestEncoder createEncoder(HttpRequest request, InMemoryFile[] parts) throws HttpPostRequestEncoder.ErrorDataEncoderException, IOException { HttpDataFactory httpDataFactory = new DefaultHttpDataFactory(false); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(httpDataFactory, request, true); if (parts != null) { for (InMemoryFile part : parts) { FileUpload fileUpload = new MemoryFileUpload(part.name, part.name, "application/octet-stream", "", Charset.forName("UTF-8"), part.content.remaining()); fileUpload.setContent(Unpooled.wrappedBuffer(part.content)); encoder.addBodyHttpData(fileUpload); } } return encoder; }
Example 6
Source File: ModelServerTest.java From serve with Apache License 2.0 | 5 votes |
@Test( alwaysRun = true, dependsOnMethods = {"testInvocationsJson"}) public void testInvocationsMultipart() throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException { Channel channel = TestUtils.getInferenceChannel(configManager); TestUtils.setResult(null); TestUtils.setLatch(new CountDownLatch(1)); DefaultFullHttpRequest req = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/invocations"); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(req, true); encoder.addBodyAttribute("model_name", "noop_v1.0"); MemoryFileUpload body = new MemoryFileUpload("data", "test.txt", "text/plain", null, null, 4); body.setContent(Unpooled.copiedBuffer("test", StandardCharsets.UTF_8)); encoder.addBodyHttpData(body); channel.writeAndFlush(encoder.finalizeRequest()); if (encoder.isChunked()) { channel.writeAndFlush(encoder).sync(); } TestUtils.getLatch().await(); Assert.assertEquals(TestUtils.getResult(), "OK"); }
Example 7
Source File: ModelServerTest.java From serve with Apache License 2.0 | 5 votes |
@Test( alwaysRun = true, dependsOnMethods = {"testModelsInvokeJson"}) public void testModelsInvokeMultipart() throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException { Channel channel = TestUtils.getInferenceChannel(configManager); TestUtils.setResult(null); TestUtils.setLatch(new CountDownLatch(1)); DefaultFullHttpRequest req = new DefaultFullHttpRequest( HttpVersion.HTTP_1_1, HttpMethod.POST, "/models/noop/invoke"); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(req, true); MemoryFileUpload body = new MemoryFileUpload("data", "test.txt", "text/plain", null, null, 4); body.setContent(Unpooled.copiedBuffer("test", StandardCharsets.UTF_8)); encoder.addBodyHttpData(body); channel.writeAndFlush(encoder.finalizeRequest()); if (encoder.isChunked()) { channel.writeAndFlush(encoder).sync(); } TestUtils.getLatch().await(); Assert.assertEquals(TestUtils.getResult(), "OK"); }
Example 8
Source File: NettyMessageProcessorTest.java From ambry with Apache License 2.0 | 5 votes |
/** * Creates a {@link HttpPostRequestEncoder} that encodes the given {@code request} and {@code blobContent}. * @param request the {@link HttpRequest} containing headers and other metadata about the request. * @param blobContent the {@link ByteBuffer} that represents the content of the blob. * @return a {@link HttpPostRequestEncoder} that can encode the {@code request} and {@code blobContent}. * @throws HttpPostRequestEncoder.ErrorDataEncoderException * @throws IOException */ private HttpPostRequestEncoder createEncoder(HttpRequest request, ByteBuffer blobContent) throws HttpPostRequestEncoder.ErrorDataEncoderException, IOException { HttpDataFactory httpDataFactory = new DefaultHttpDataFactory(false); HttpPostRequestEncoder encoder = new HttpPostRequestEncoder(httpDataFactory, request, true); FileUpload fileUpload = new MemoryFileUpload(RestUtils.MultipartPost.BLOB_PART, RestUtils.MultipartPost.BLOB_PART, "application/octet-stream", "", Charset.forName("UTF-8"), blobContent.remaining()); fileUpload.setContent(Unpooled.wrappedBuffer(blobContent)); encoder.addBodyHttpData(fileUpload); return encoder; }
Example 9
Source File: ModelServerTest.java From multi-model-server with Apache License 2.0 | 4 votes |
@Test public void test() throws InterruptedException, HttpPostRequestEncoder.ErrorDataEncoderException, IOException, NoSuchFieldException, IllegalAccessException { Channel channel = null; Channel managementChannel = null; for (int i = 0; i < 5; ++i) { channel = connect(false); if (channel != null) { break; } Thread.sleep(100); } for (int i = 0; i < 5; ++i) { managementChannel = connect(true); if (managementChannel != null) { break; } Thread.sleep(100); } Assert.assertNotNull(channel, "Failed to connect to inference port."); Assert.assertNotNull(managementChannel, "Failed to connect to management port."); testPing(channel); testRoot(channel, listInferenceApisResult); testRoot(managementChannel, listManagementApisResult); testApiDescription(channel, listInferenceApisResult); testDescribeApi(channel); testUnregisterModel(managementChannel); testLoadModel(managementChannel); testSyncScaleModel(managementChannel); testScaleModel(managementChannel); testListModels(managementChannel); testDescribeModel(managementChannel); testLoadModelWithInitialWorkers(managementChannel); testLoadModelWithInitialWorkersWithJSONReqBody(managementChannel); testPredictions(channel); testPredictionsBinary(channel); testPredictionsJson(channel); testInvocationsJson(channel); testInvocationsMultipart(channel); testModelsInvokeJson(channel); testModelsInvokeMultipart(channel); testLegacyPredict(channel); testPredictionsInvalidRequestSize(channel); testPredictionsValidRequestSize(channel); testPredictionsDecodeRequest(channel, managementChannel); testPredictionsDoNotDecodeRequest(channel, managementChannel); testPredictionsModifyResponseHeader(channel, managementChannel); testPredictionsNoManifest(channel, managementChannel); testModelRegisterWithDefaultWorkers(managementChannel); testLogging(channel, managementChannel); testLoggingUnload(channel, managementChannel); testLoadingMemoryError(); testPredictionMemoryError(); testMetricManager(); testErrorBatch(); channel.close(); managementChannel.close(); // negative test case, channel will be closed by server testInvalidRootRequest(); testInvalidInferenceUri(); testInvalidPredictionsUri(); testInvalidDescribeModel(); testPredictionsModelNotFound(); testInvalidManagementUri(); testInvalidModelsMethod(); testInvalidModelMethod(); testDescribeModelNotFound(); testRegisterModelMissingUrl(); testRegisterModelInvalidRuntime(); testRegisterModelNotFound(); testRegisterModelConflict(); testRegisterModelMalformedUrl(); testRegisterModelConnectionFailed(); testRegisterModelHttpError(); testRegisterModelInvalidPath(); testScaleModelNotFound(); testScaleModelFailure(); testUnregisterModelNotFound(); testUnregisterModelTimeout(); testInvalidModel(); }