Python fastapi.UploadFile() Examples
The following are 24
code examples of fastapi.UploadFile().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
fastapi
, or try the search function
.
Example #1
Source File: start.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 6 votes |
def detect_robotron(request: Request, background_tasks: BackgroundTasks, model: str = Form(...), image: UploadFile = File(...)): """ Performs a prediction for a specified image using one of the available models. :param request: Used if background tasks was enabled :param background_tasks: Used if background tasks was enabled :param model: Model name or model hash :param image: Image file :return: Model's Bounding boxes """ draw_boxes = False predict_batch = False try: request_start = time.time() output = await dl_service.run_model(model, image, draw_boxes, predict_batch) # background_tasks.add_task(metrics_collector,'detect',image, output, request, request_start) error_logging.info('request successful;' + str(output)) return output except ApplicationError as e: error_logging.warning(model+';'+str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model+' '+str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #2
Source File: start.py From BMW-TensorFlow-Inference-API-GPU with Apache License 2.0 | 6 votes |
def predict_image(model_name: str, input_data: UploadFile = File(...)): """ Draws bounding box(es) on image and returns it. :param model_name: Model name :param input_data: Image file :return: Image file """ try: output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False) error_logging.info('request successful;' + str(output)) return FileResponse("/main/result.jpg", media_type="image/jpg") except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #3
Source File: start.py From BMW-TensorFlow-Inference-API-GPU with Apache License 2.0 | 6 votes |
def run_model_batch(model_name: str, input_data: List[UploadFile] = File(...)): """ Performs a prediction by giving both model name and image file(s). :param model_name: Model name :param input_data: A batch of image files or a single image file :return: APIResponse containing prediction(s) bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=True) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: print(e) error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #4
Source File: start.py From BMW-TensorFlow-Inference-API-GPU with Apache License 2.0 | 6 votes |
def run_model(model_name: str, input_data: UploadFile = File(...)): """ Performs a prediction by giving both model name and image file. :param model_name: Model name :param input_data: An image file :return: APIResponse containing the prediction's bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=False) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #5
Source File: start.py From BMW-TensorFlow-Inference-API-GPU with Apache License 2.0 | 6 votes |
def detect_custom(model: str = Form(...), image: UploadFile = File(...)): """ Performs a prediction for a specified image using one of the available models. :param model: Model name or model hash :param image: Image file :return: Model's Bounding boxes """ draw_boxes = False predict_batch = False try: output = await dl_service.run_model(model, image, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return output except ApplicationError as e: error_logging.warning(model + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #6
Source File: start.py From BMW-YOLOv3-Inference-API-GPU with BSD 3-Clause "New" or "Revised" License | 6 votes |
def predict_image(model_name: str, input_data: UploadFile = File(...)): """ Draws bounding box(es) on image and returns it. :param model_name: Model name :param input_data: Image file :return: Image file """ try: output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False) error_logging.info('request successful;' + str(output)) return FileResponse("/main/result.jpg", media_type="image/jpg") except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #7
Source File: start.py From BMW-YOLOv3-Inference-API-GPU with BSD 3-Clause "New" or "Revised" License | 6 votes |
def run_model_batch(model_name: str, input_data: List[UploadFile] = File(...)): """ Performs a prediction by giving both model name and image file(s). :param model_name: Model name :param input_data: A batch of image files or a single image file :return: APIResponse containing prediction(s) bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=True) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: print(e) error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #8
Source File: start.py From BMW-YOLOv3-Inference-API-GPU with BSD 3-Clause "New" or "Revised" License | 6 votes |
def run_model(model_name: str, input_data: UploadFile = File(...)): """ Performs a prediction by giving both model name and image file. :param model_name: Model name :param input_data: An image file :return: APIResponse containing the prediction's bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=False) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #9
Source File: start.py From BMW-YOLOv3-Inference-API-GPU with BSD 3-Clause "New" or "Revised" License | 6 votes |
def detect_custom(model: str = Form(...), image: UploadFile = File(...)): """ Performs a prediction for a specified image using one of the available models. :param model: Model name or model hash :param image: Image file :return: Model's Bounding boxes """ draw_boxes = False predict_batch = False try: output = await dl_service.run_model(model, image, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return output except ApplicationError as e: error_logging.warning(model + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #10
Source File: start.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 6 votes |
def run_model(model_name: str, input_data: UploadFile = File(...)): """ Draws bounding box(es) on image and returns it. :param model_name: Model name :param input_data: Image file :return: Image file """ draw_boxes = True predict_batch = False try: output = await dl_service.run_model(model_name, input_data, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return FileResponse("/main/result.jpg", media_type="image/jpg") except ApplicationError as e: error_logging.warning(model_name+';'+str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name+' '+str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #11
Source File: start.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 6 votes |
def run_model_batch(model_name: str, input_data: List[UploadFile] = File(...)): """ Performs a prediction by giving both model name and image file(s). :param model_name: Model name :param input_data: A batch of image files or a single image file :return: APIResponse containing prediction(s) bounding boxes """ draw_boxes = False predict_batch = True try: output = await dl_service.run_model(model_name, input_data, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name+';'+str(e)) return ApiResponse(success=False, error=e) except Exception as e: print(e) error_logging.error(model_name+' '+str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #12
Source File: start.py From BMW-TensorFlow-Training-GUI with Apache License 2.0 | 6 votes |
def run_model(model_name: str, input_data: UploadFile = File(...)): """ Performs a prediction by giving both model name and image file. :param model_name: Model name :param input_data: An image file :return: APIResponse containing the prediction's bounding boxes """ draw_boxes = False predict_batch = False try: output = await dl_service.run_model(model_name, input_data, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name+';'+str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name+' '+str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #13
Source File: start.py From BMW-TensorFlow-Inference-API-CPU with Apache License 2.0 | 6 votes |
def detect_custom(model: str = Form(...), image: UploadFile = File(...)): """ Performs a prediction for a specified image using one of the available models. :param model: Model name or model hash :param image: Image file :return: Model's Bounding boxes """ draw_boxes = False predict_batch = False try: output = await dl_service.run_model(model, image, draw_boxes, predict_batch) error_logging.info('request successful;' + str(output)) return output except ApplicationError as e: error_logging.warning(model + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #14
Source File: config.py From LuWu with Apache License 2.0 | 6 votes |
def create_site_template( db: Session = Depends(get_db), name: str = Form(...), zip_file: UploadFile = File(..., alias='zipFile'), remark: Union[str, None] = Form(None), ): site_template_profile = dict( name=name, remark=remark, zip_file_name=zip_file.filename, zip_file_content=await zip_file.read() ) created_data = crud_site_template.create_site_template( db, site_template_profile ) return dict(result=created_data)
Example #15
Source File: start.py From BMW-TensorFlow-Inference-API-CPU with Apache License 2.0 | 6 votes |
def predict_image(model_name: str, input_data: UploadFile = File(...)): """ Draws bounding box(es) on image and returns it. :param model_name: Model name :param input_data: Image file :return: Image file """ try: output = await dl_service.run_model(model_name, input_data, draw=True, predict_batch=False) error_logging.info('request successful;' + str(output)) return FileResponse("/main/result.jpg", media_type="image/jpg") except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #16
Source File: start.py From BMW-TensorFlow-Inference-API-CPU with Apache License 2.0 | 6 votes |
def run_model_batch(model_name: str, input_data: List[UploadFile] = File(...)): """ Performs a prediction by giving both model name and image file(s). :param model_name: Model name :param input_data: A batch of image files or a single image file :return: APIResponse containing prediction(s) bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=True) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: print(e) error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #17
Source File: start.py From BMW-TensorFlow-Inference-API-CPU with Apache License 2.0 | 6 votes |
def run_model(model_name: str, input_data: UploadFile = File(...)): """ Performs a prediction by giving both model name and image file. :param model_name: Model name :param input_data: An image file :return: APIResponse containing the prediction's bounding boxes """ try: output = await dl_service.run_model(model_name, input_data, draw=False, predict_batch=False) error_logging.info('request successful;' + str(output)) return ApiResponse(data=output) except ApplicationError as e: error_logging.warning(model_name + ';' + str(e)) return ApiResponse(success=False, error=e) except Exception as e: error_logging.error(model_name + ' ' + str(e)) return ApiResponse(success=False, error='unexpected server error')
Example #18
Source File: web.py From mergify-engine with Apache License 2.0 | 5 votes |
def config_validator( data: fastapi.UploadFile = fastapi.File(...), ): # pragma: no cover try: rules.UserConfigurationSchema(await data.read()) except Exception as e: status = 400 message = str(e) else: status = 200 message = "The configuration is valid" return responses.PlainTextResponse(message, status_code=status)
Example #19
Source File: config.py From LuWu with Apache License 2.0 | 5 votes |
def upload_site_template_file( db: Session = Depends(get_db), *, site_template_id: int, zip_file: UploadFile = File(..., alias='zipFile'), ): update_result = crud_site_template.update_site_template( db_session=db, template_id=site_template_id, zip_file_name=zip_file.filename, zip_file_content=await zip_file.read() ) return dict(result=bool(update_result))
Example #20
Source File: config.py From LuWu with Apache License 2.0 | 5 votes |
def create_c2_profile( db: Session = Depends(get_db), name: str = Form(...), profile: UploadFile = File(...), remark: str = Form(None), ): c2_profile_obj = C2ProfileCreate( name=name, remark=remark, profile_name=profile.filename, profile_content=await profile.read() ) created_data = crud_c2.create(db, obj_in=c2_profile_obj) return dict(result=created_data)
Example #21
Source File: sectlabel.py From sciwing with MIT License | 5 votes |
def process_pdf(file: UploadFile = File(None)): global sectlabel_model if sectlabel_model is None: sectlabel_model = SectLabel() file_handle = file.file file_name = file.filename file_contents = file_handle.read() pdf_save_location = pdf_store.save_pdf_binary_string( pdf_string=file_contents, out_filename=file_name ) # noinspection PyTypeChecker pdf_reader = PdfReader(filepath=pdf_save_location) # read pdf lines lines = pdf_reader.read_pdf() all_labels = [] all_lines = [] for batch_lines in chunks(lines, 64): labels = sectlabel_model.predict_for_text_batch(texts=batch_lines) all_labels.append(labels) all_lines.append(batch_lines) all_lines = itertools.chain.from_iterable(all_lines) all_lines = list(all_lines) all_labels = itertools.chain.from_iterable(all_labels) all_labels = list(all_labels) response_tuples = [] for line, label in zip(all_lines, all_labels): response_tuples.append((line, label)) # remove the saved pdf pdf_store.delete_file(str(pdf_save_location)) return {"labels": response_tuples}
Example #22
Source File: tutorial001.py From fastapi with MIT License | 5 votes |
def create_file( file: bytes = File(...), fileb: UploadFile = File(...), token: str = Form(...) ): return { "file_size": len(file), "token": token, "fileb_content_type": fileb.content_type, }
Example #23
Source File: tutorial001.py From fastapi with MIT License | 5 votes |
def create_upload_file(file: UploadFile = File(...)): return {"filename": file.filename}
Example #24
Source File: tutorial002.py From fastapi with MIT License | 5 votes |
def create_upload_files(files: List[UploadFile] = File(...)): return {"filenames": [file.filename for file in files]}