Spaces:
Sleeping
Sleeping
Changed model from Vit Xsmall clip to resnet50_clip_gap. Changed descriptioin and .onnx file
3cb2603
| from custom_torch_module.deploy_utils import Onnx_deploy_model | |
| import gradio as gr | |
| import time | |
| from PIL import Image | |
| import os | |
| from pathlib import Path | |
| model_path = list(Path("deploying model/").glob("*.onnx"))[0] | |
| input_size = [1, 3, 224, 224] | |
| img_size = input_size[-1] | |
| title = "Gender Vision mini" | |
| description = "An resnet50_clip_gap based model(fine tuned with Custom dataset : around 800 train images & 200 test iamges) F1 Score : 100%(1.00) with the custom test dataset. Optimized with ONNX(around 1.7 times faster than PyTorch version on cpu)" | |
| article = "Through bunch of fine tuning and experiments. !REMEMBER! This model can be wrong." | |
| def predict(img): | |
| start_time = time.time() | |
| output = onnx_model.run(img, return_prob=True) | |
| end_time = time.time() | |
| elapsed_time = end_time - start_time | |
| prediction_fps = 1 / elapsed_time | |
| pred_label_and_probs = {"Men" : output[0],"Women" : output[1]} | |
| return pred_label_and_probs, prediction_fps | |
| onnx_model = Onnx_deploy_model(model_path=model_path, img_size=img_size) | |
| example_list = [["examples/" + example] for example in os.listdir("examples")] | |
| # Create the Gradio demo | |
| demo = gr.Interface(fn=predict, | |
| inputs=gr.Image(type="pil"), | |
| outputs=[gr.Label(num_top_classes=2, label="Predictions"), | |
| gr.Number(label="Prediction speed(FPS)")], | |
| examples=example_list, | |
| title=title, | |
| description=description, | |
| article=article) | |
| # Launch the demo | |
| demo.launch() |