Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import numpy as np | |
| import os | |
| import spaces | |
| from huggingface_hub import hf_hub_download | |
| from ultralytics import YOLO | |
| def yolov9_inference(img_path, model_id='YOLOv9-S_X_LOCO-converted.pt', img_size=640, conf_thres=0.1, iou_thres=0.4): | |
| """ | |
| Load a YOLOv9 model, configure it, perform inference on an image, and optionally adjust | |
| the input size and apply test time augmentation. | |
| :param model_path: Path to the YOLOv9 model file. | |
| :param conf_threshold: Confidence threshold for NMS. | |
| :param iou_threshold: IoU threshold for NMS. | |
| :param img_path: Path to the image file. | |
| :param size: Optional, input size for inference. | |
| :return: A tuple containing the detections (boxes, scores, categories) and the results object for further actions like displaying. | |
| """ | |
| model_path = download_models(model_id) | |
| model = YOLO(model_path) | |
| results = model.predict(img_path, conf=conf_thres, iou=iou_thres, imgsz=img_size) | |
| return np.array(PIL.Image.open(img_path)) | |
| def download_models(model_id): | |
| hf_hub_download("KoniHD/LOCO-Detection", filename=f"{model_id}", local_dir=f"./", | |
| token=os.getenv("HF_TOKEN")) | |
| return f"./{model_id}" | |
| def app(): | |
| with gr.Blocks(): | |
| with gr.Row(): | |
| with gr.Column(): | |
| img_path = gr.Image(type="filepath", label="Image") | |
| model_path = gr.Dropdown( | |
| label="Model", | |
| choices=[ | |
| "YOLOv9-S_X_LOCO-converted.pt", | |
| "YOLOv9-S_X_LOCO.pt", | |
| "YOLOv9-E_X_LOCO-converted.pt", | |
| "YOLOv9-E_X_LOCO.pt", | |
| ], | |
| value="YOLOv9-S_X_LOCO-converted.pt", | |
| ) | |
| image_size = gr.Slider( | |
| label="Image Size", | |
| minimum=320, | |
| maximum=1280, | |
| step=32, | |
| value=640, | |
| ) | |
| conf_threshold = gr.Slider( | |
| label="Confidence Threshold", | |
| minimum=0.1, | |
| maximum=1.0, | |
| step=0.1, | |
| value=0.4, | |
| ) | |
| iou_threshold = gr.Slider( | |
| label="IoU Threshold", | |
| minimum=0.1, | |
| maximum=1.0, | |
| step=0.1, | |
| value=0.5, | |
| ) | |
| yolov9_infer = gr.Button(value="Inference") | |
| with gr.Column(): | |
| output_numpy = gr.Image(type="numpy",label="Output") | |
| yolov9_infer.click( | |
| fn=yolov9_inference, | |
| inputs=[ | |
| img_path, | |
| model_path, | |
| image_size, | |
| conf_threshold, | |
| iou_threshold, | |
| ], | |
| outputs=[output_numpy], | |
| ) | |
| gr.Examples( | |
| examples=[ | |
| [ | |
| "data/zidane.jpg", | |
| "YOLOv9-S_X_LOCO-converted.pt", | |
| 640, | |
| 0.4, | |
| 0.5, | |
| ], | |
| [ | |
| "data/huggingface.jpg", | |
| "YOLOv9-E_X_LOCO-converted.pt", | |
| 640, | |
| 0.4, | |
| 0.5, | |
| ], | |
| ], | |
| fn=yolov9_inference, | |
| inputs=[ | |
| img_path, | |
| model_path, | |
| image_size, | |
| conf_threshold, | |
| iou_threshold, | |
| ], | |
| outputs=[output_numpy], | |
| cache_examples=True, | |
| ) | |
| gradio_app = gr.Blocks() | |
| with gradio_app: | |
| gr.HTML( | |
| """ | |
| <h1 style='text-align: center'> | |
| YOLOv9: Learning What You Want to Learn Using Programmable Gradient Information | |
| </h1> | |
| """) | |
| gr.HTML( | |
| """ | |
| <h3 style='text-align: center'> | |
| Follow me for more! | |
| <a href='https://twitter.com/konihd_7' target='_blank'>Twitter</a> | <a href='https://github.com/KoniHD' target='_blank'>Github</a> | <a href='https://www.linkedin.com/in/konstantin-zeck/' target='_blank'>Linkedin</a> | <a href='https://www.huggingface.co/KoniHD/' target='_blank'>HuggingFace</a> | |
| </h3> | |
| """) | |
| with gr.Row(): | |
| with gr.Column(): | |
| app() | |
| gradio_app.launch(debug=True) |