Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import os
|
| 3 |
|
| 4 |
-
os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt")
|
| 5 |
os.system("wget https://github.com/hustvl/YOLOP/raw/main/weights/End-to-end.pth")
|
| 6 |
#os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7-e6e.pt")
|
| 7 |
#os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7-e6.pt")
|
|
@@ -23,13 +23,6 @@ from numpy import random
|
|
| 23 |
#from utils.plots import plot_one_box
|
| 24 |
#from utils.torch_utils import select_device, load_classifier, time_synchronized, TracedModel
|
| 25 |
|
| 26 |
-
from utils.functions import \
|
| 27 |
-
time_synchronized,select_device, increment_path,\
|
| 28 |
-
scale_coords,xyxy2xywh,non_max_suppression,split_for_trace_model,\
|
| 29 |
-
driving_area_mask,lane_line_mask,plot_one_box,show_seg_result,\
|
| 30 |
-
AverageMeter,\
|
| 31 |
-
LoadImages
|
| 32 |
-
|
| 33 |
|
| 34 |
from PIL import Image
|
| 35 |
|
|
@@ -190,6 +183,13 @@ def detect(img,model):
|
|
| 190 |
|
| 191 |
|
| 192 |
if weights == 'yolopv2.pt':
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
stride =32
|
| 194 |
model = torch.jit.load(weights,map_location=device)
|
| 195 |
model.eval()
|
|
@@ -296,4 +296,4 @@ def detect(img,model):
|
|
| 296 |
return Image.fromarray(im0[:,:,::-1])
|
| 297 |
|
| 298 |
|
| 299 |
-
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import os
|
| 3 |
|
| 4 |
+
#os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt")
|
| 5 |
os.system("wget https://github.com/hustvl/YOLOP/raw/main/weights/End-to-end.pth")
|
| 6 |
#os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7-e6e.pt")
|
| 7 |
#os.system("wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7-e6.pt")
|
|
|
|
| 23 |
#from utils.plots import plot_one_box
|
| 24 |
#from utils.torch_utils import select_device, load_classifier, time_synchronized, TracedModel
|
| 25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
from PIL import Image
|
| 28 |
|
|
|
|
| 183 |
|
| 184 |
|
| 185 |
if weights == 'yolopv2.pt':
|
| 186 |
+
from utils.functions import \
|
| 187 |
+
time_synchronized,select_device, increment_path,\
|
| 188 |
+
scale_coords,xyxy2xywh,non_max_suppression,split_for_trace_model,\
|
| 189 |
+
driving_area_mask,lane_line_mask,plot_one_box,show_seg_result,\
|
| 190 |
+
AverageMeter,\
|
| 191 |
+
LoadImages
|
| 192 |
+
|
| 193 |
stride =32
|
| 194 |
model = torch.jit.load(weights,map_location=device)
|
| 195 |
model.eval()
|
|
|
|
| 296 |
return Image.fromarray(im0[:,:,::-1])
|
| 297 |
|
| 298 |
|
| 299 |
+
gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>YOLOPv2</a> 🚀: Better, Faster, Stronger for Panoptic driving Perception").launch()
|