Clementapa
commited on
Commit
•
57ecc83
1
Parent(s):
7816559
First commit
Browse files- app.py +90 -0
- requirements.txt +2 -0
- style.css +60 -0
- train_7best.pt +3 -0
app.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import List
|
2 |
+
|
3 |
+
import gradio as gr
|
4 |
+
import supervision as sv
|
5 |
+
import torch
|
6 |
+
from PIL import Image
|
7 |
+
from ultralytics import YOLO
|
8 |
+
|
9 |
+
MARKDOWN = """
|
10 |
+
# Orang Outan Detection
|
11 |
+
"""
|
12 |
+
EXAMPLES = []
|
13 |
+
|
14 |
+
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
15 |
+
|
16 |
+
YOLO_MODEL = YOLO("train_7best.pt")
|
17 |
+
|
18 |
+
BOX_ANNOTATOR = sv.BoxAnnotator()
|
19 |
+
|
20 |
+
|
21 |
+
def annotate(
|
22 |
+
image_bgr_numpy: Image.Image,
|
23 |
+
detections: sv.Detections,
|
24 |
+
annotator: sv.BoxAnnotator,
|
25 |
+
labels: str,
|
26 |
+
) -> Image.Image:
|
27 |
+
annotated_bgr_image = annotator.annotate(
|
28 |
+
scene=image_bgr_numpy, detections=detections, labels=labels
|
29 |
+
)
|
30 |
+
return Image.fromarray(annotated_bgr_image[:, :, ::-1])
|
31 |
+
|
32 |
+
|
33 |
+
def inference(image_rgb_pil: Image.Image, confidence: float) -> List[Image.Image]:
|
34 |
+
output = YOLO_MODEL(image_rgb_pil, verbose=False)[0]
|
35 |
+
detections = sv.Detections.from_ultralytics(output)
|
36 |
+
|
37 |
+
detections = detections[detections.confidence >= confidence]
|
38 |
+
|
39 |
+
labels = [
|
40 |
+
f"{output.names[class_id]} {confidence:0.2f}"
|
41 |
+
for _, _, confidence, class_id, _ in detections
|
42 |
+
]
|
43 |
+
|
44 |
+
return annotate(
|
45 |
+
image_bgr_numpy=output.orig_img.copy(),
|
46 |
+
detections=detections,
|
47 |
+
annotator=BOX_ANNOTATOR,
|
48 |
+
labels=labels,
|
49 |
+
)
|
50 |
+
|
51 |
+
|
52 |
+
def run_demo():
|
53 |
+
custom_theme = gr.themes.Soft(primary_hue="blue").set(
|
54 |
+
button_secondary_background_fill="*neutral_100",
|
55 |
+
button_secondary_background_fill_hover="*neutral_200",
|
56 |
+
)
|
57 |
+
|
58 |
+
with gr.Blocks(theme=custom_theme, css="style.css") as demo:
|
59 |
+
gr.Markdown(MARKDOWN)
|
60 |
+
|
61 |
+
with gr.Row():
|
62 |
+
with gr.Column():
|
63 |
+
input_image = gr.Image(image_mode="RGB", type="pil", height=500)
|
64 |
+
confidence_slider = gr.Slider(
|
65 |
+
label="Confidence", minimum=0.1, maximum=1.0, step=0.05, value=0.6
|
66 |
+
)
|
67 |
+
submit_button = gr.Button("Submit")
|
68 |
+
output_image = gr.Image(label="Results", type="pil")
|
69 |
+
|
70 |
+
# with gr.Row():
|
71 |
+
# gr.Examples(
|
72 |
+
# examples=EXAMPLES,
|
73 |
+
# fn=inference,
|
74 |
+
# inputs=[input_image, prompt_text, confidence_slider],
|
75 |
+
# outputs=[gallery],
|
76 |
+
# cache_examples=True,
|
77 |
+
# run_on_click=True
|
78 |
+
# )
|
79 |
+
|
80 |
+
submit_button.click(
|
81 |
+
inference,
|
82 |
+
inputs=[input_image, confidence_slider],
|
83 |
+
outputs=output_image,
|
84 |
+
queue=True,
|
85 |
+
)
|
86 |
+
demo.queue(max_size=20, api_open=False).launch()
|
87 |
+
|
88 |
+
|
89 |
+
if __name__ == "__main__":
|
90 |
+
run_demo()
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
supervision
|
2 |
+
ultralytics
|
style.css
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#disp_image {
|
2 |
+
text-align: center;
|
3 |
+
/* Horizontally center the content */
|
4 |
+
}
|
5 |
+
|
6 |
+
#share-btn-container {
|
7 |
+
padding-left: 0.5rem !important;
|
8 |
+
padding-right: 0.5rem !important;
|
9 |
+
background-color: #000000;
|
10 |
+
justify-content: center;
|
11 |
+
align-items: center;
|
12 |
+
border-radius: 9999px !important;
|
13 |
+
max-width: 13rem;
|
14 |
+
margin-left: auto;
|
15 |
+
}
|
16 |
+
|
17 |
+
div#share-btn-container>div {
|
18 |
+
flex-direction: row;
|
19 |
+
background: black;
|
20 |
+
align-items: center
|
21 |
+
}
|
22 |
+
|
23 |
+
#share-btn-container:hover {
|
24 |
+
background-color: #060606
|
25 |
+
}
|
26 |
+
|
27 |
+
#share-btn {
|
28 |
+
all: initial;
|
29 |
+
color: #ffffff;
|
30 |
+
font-weight: 600;
|
31 |
+
cursor: pointer;
|
32 |
+
font-family: 'IBM Plex Sans', sans-serif;
|
33 |
+
margin-left: 0.5rem !important;
|
34 |
+
padding-top: 0.5rem !important;
|
35 |
+
padding-bottom: 0.5rem !important;
|
36 |
+
right: 0;
|
37 |
+
}
|
38 |
+
|
39 |
+
#share-btn * {
|
40 |
+
all: unset
|
41 |
+
}
|
42 |
+
|
43 |
+
#share-btn-container div:nth-child(-n+2) {
|
44 |
+
width: auto !important;
|
45 |
+
min-height: 0px !important;
|
46 |
+
}
|
47 |
+
|
48 |
+
#share-btn-container .wrap {
|
49 |
+
display: none !important
|
50 |
+
}
|
51 |
+
|
52 |
+
#share-btn-container.hidden {
|
53 |
+
display: none !important
|
54 |
+
}
|
55 |
+
|
56 |
+
#duplicate-button {
|
57 |
+
margin-left: auto;
|
58 |
+
color: #fff;
|
59 |
+
background: #1565c0;
|
60 |
+
}
|
train_7best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b07359a9e5d652236f17c62e37a959918427261f84a24b09f0b111ca67b8925
|
3 |
+
size 6237657
|