Python starlette.responses.FileResponse() Examples

The following are 11 code examples of starlette.responses.FileResponse(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module starlette.responses , or try the search function .
Example #1
Source File: start.py    From BMW-TensorFlow-Inference-API-CPU with Apache License 2.0 6 votes vote down vote up
def predict_image(model_name: str, input_data: UploadFile = File(...)):
	"""
	Draws bounding box(es) on image and returns it.
	:param model_name: Model name
	:param input_data: Image file
	:return: Image file
	"""
	try:
		output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False)
		error_logging.info('request successful;' + str(output))
		return FileResponse("/main/result.jpg", media_type="image/jpg")
	except ApplicationError as e:
		error_logging.warning(model_name + ';' + str(e))
		return ApiResponse(success=False, error=e)
	except Exception as e:
		error_logging.error(model_name + ' ' + str(e))
		return ApiResponse(success=False, error='unexpected server error') 
Example #2
Source File: start.py    From BMW-TensorFlow-Training-GUI with Apache License 2.0 6 votes vote down vote up
def run_model(model_name: str, input_data: UploadFile = File(...)):
	"""
	Draws bounding box(es) on image and returns it.
	:param model_name: Model name
	:param input_data: Image file
	:return: Image file
	"""
	draw_boxes = True
	predict_batch = False
	try:
		output = await dl_service.run_model(model_name, input_data, draw_boxes, predict_batch)
		error_logging.info('request successful;' + str(output))
		return FileResponse("/main/result.jpg", media_type="image/jpg")
	except ApplicationError as e:
		error_logging.warning(model_name+';'+str(e))
		return ApiResponse(success=False, error=e)
	except Exception as e:
		error_logging.error(model_name+' '+str(e))
		return ApiResponse(success=False, error='unexpected server error') 
Example #3
Source File: api.py    From BMW-YOLOv3-Training-Automation with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_validation():
    """Returns prediction on image with the latest saved weights \n
       Left image is the one predicted by the model and Right image is the ground truth"""
    if Path(prediction_image_path).exists():
        list_im: list = [prediction_image_path, ground_truth_image_path]
        imgs: list = [Image.open(str(i)) for i in list_im]

        min_shape: list = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1]
        imgs_comb: np = np.hstack(list((np.asarray(i.resize(min_shape)) for i in imgs)))

        imgs_comb: Image = Image.fromarray(imgs_comb)
        collage_path: Path = working_dir / "collage.jpg"
        imgs_comb.save(collage_path)

        return FileResponse(collage_path, media_type="image/jpg")

    else:
        if ground_truth_image_path.exists():
            message: str = "No predictions yet"
        else:
            message: str = "No testing set was provided"

        result: dict = {"success": True, "start_time": get_time(), "message": message}
        return result 
Example #4
Source File: start.py    From BMW-YOLOv3-Inference-API-GPU with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def predict_image(model_name: str, input_data: UploadFile = File(...)):
	"""
	Draws bounding box(es) on image and returns it.
	:param model_name: Model name
	:param input_data: Image file
	:return: Image file
	"""
	try:
		output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False)
		error_logging.info('request successful;' + str(output))
		return FileResponse("/main/result.jpg", media_type="image/jpg")
	except ApplicationError as e:
		error_logging.warning(model_name + ';' + str(e))
		return ApiResponse(success=False, error=e)
	except Exception as e:
		error_logging.error(model_name + ' ' + str(e))
		return ApiResponse(success=False, error='unexpected server error') 
Example #5
Source File: endpoints.py    From polyaxon with Apache License 2.0 6 votes vote down vote up
def download_artifact(request):
    run_uuid = request.path_params["run_uuid"]
    filepath = request.query_params.get("path", "")
    stream = to_bool(request.query_params.get("stream"), handle_none=True)
    force = to_bool(request.query_params.get("force"), handle_none=True)
    if not filepath:
        return Response(
            content="A `path` query param is required to stream a file content",
            status_code=status.HTTP_400_BAD_REQUEST,
        )
    subpath = "{}/{}".format(run_uuid, filepath).rstrip("/")
    archived_path = await download_file(subpath=subpath, check_cache=not force)
    if not archived_path:
        return Response(
            content="Artifact not found: filepath={}".format(archived_path),
            status_code=status.HTTP_404_NOT_FOUND,
        )
    if stream:
        return FileResponse(archived_path)
    return redirect(archived_path) 
Example #6
Source File: start.py    From BMW-TensorFlow-Inference-API-GPU with Apache License 2.0 6 votes vote down vote up
def predict_image(model_name: str, input_data: UploadFile = File(...)):
	"""
	Draws bounding box(es) on image and returns it.
	:param model_name: Model name
	:param input_data: Image file
	:return: Image file
	"""
	try:
		output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False)
		error_logging.info('request successful;' + str(output))
		return FileResponse("/main/result.jpg", media_type="image/jpg")
	except ApplicationError as e:
		error_logging.warning(model_name + ';' + str(e))
		return ApiResponse(success=False, error=e)
	except Exception as e:
		error_logging.error(model_name + ' ' + str(e))
		return ApiResponse(success=False, error='unexpected server error') 
Example #7
Source File: app.py    From mquery with GNU Affero General Public License v3.0 5 votes vote down vote up
def download(job_id: str, ordinal: int, file_path: str) -> FileResponse:
    """
    Sends a file from given `file_path`. This path should come from
    results of one of the previous searches.

    This endpoint needs `job_id` that found the specified file, and `ordinal`
    (index of the file in that job), to ensure that user can't download
    arbitrary files (for example "/etc/passwd").
    """
    if not db.job_contains(JobId(job_id), ordinal, file_path):
        raise NotFound("No such file in result set.")

    attach_name, ext = os.path.splitext(os.path.basename(file_path))
    return FileResponse(file_path, filename=attach_name + ext + "_") 
Example #8
Source File: app.py    From mquery with GNU Affero General Public License v3.0 5 votes vote down vote up
def serve_index(path: str) -> FileResponse:
    return FileResponse("mqueryfront/build/index.html") 
Example #9
Source File: app.py    From mquery with GNU Affero General Public License v3.0 5 votes vote down vote up
def serve_index_sub() -> FileResponse:
    return FileResponse("mqueryfront/build/index.html") 
Example #10
Source File: api.py    From browsertrix with Apache License 2.0 5 votes vote down vote up
def ui(*args, **kwargs):
    return FileResponse('static/index.html') 
Example #11
Source File: api.py    From BMW-YOLOv3-Training-Automation with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def get_prediction(
    image: bytes = File(..., description="Image to perform inference on")
):
    """Runs the last saved weights to infer on the given image"""

    prediction_path: Path = working_dir / "predictions"
    training_path: Path = trainn_dir
    weights_path: Path = training_path / "weights"
    last_weights: list = list(weights_path.glob("*_last.weights"))

    if not last_weights:
        result: dict = {
            "success": True,
            "start_time": get_time(),
            "message": "No predictions yet",
        }
        return result

    if not prediction_path.exists():
        # Create folder in working directory symlinked to darknet/data/labels because it is needed by darknet to label the bounding boxes
        Path.mkdir(prediction_path)
        os.chdir(prediction_path)
        os.mkdir(Path("data"))
        os.symlink(
            working_dir / "darknet/data/labels", working_dir / "predictions/data/labels"
        )
    try:
        img: Image = Image.open(BytesIO(image)).convert("RGB")
        img.save("image.jpg")
        config_file_path: Path = training_path / "config"
        data_path: str = str(list(config_file_path.glob("*.data"))[0])
        cfg_path: str = str(list(config_file_path.glob("*.cfg"))[0])
        last_weights: str = str(last_weights[0])
        darknet_exec_path: Path = working_dir / "darknet/darknet"
        command: list = [
            darknet_exec_path,
            "detector",
            "test",
            data_path,
            cfg_path,
            last_weights,
            "-dont_show",
        ]
        command.append(str(working_dir / "predictions/image.jpg"))

        with open(os.devnull, "w") as DEVNULL:
            subprocess.call(command, stdout=DEVNULL, stderr=DEVNULL)

    except Exception as ex:
        raise HTTPException(
            422,
            detail="Error while reading request image. Please make sure it is a valid image {}".format(
                str(ex)
            ),
        )

    return FileResponse("predictions.jpg", media_type="image/jpg")