bayramsn
		
	commited on
		
		
					Commit 
							
							·
						
						c5cdf41
	
1
								Parent(s):
							
							8c738c4
								
apps: improve device handling, CUDA status, and error messages; add DirectML option
Browse files- apps/webcam_app.py +72 -25
    	
        apps/webcam_app.py
    CHANGED
    
    | @@ -22,7 +22,7 @@ st.caption("Live object detection with YOLO11 using your webcam. Use the sidebar | |
| 22 | 
             
            def _resolve_device(device: str) -> str:
         | 
| 23 | 
             
                """Map UI device option to a valid torch/Ultralytics device string."""
         | 
| 24 | 
             
                d = (device or "").lower().strip()
         | 
| 25 | 
            -
                if d in {"cpu", "cuda", "mps", "xpu"}:
         | 
| 26 | 
             
                    return d
         | 
| 27 | 
             
                if d == "auto":
         | 
| 28 | 
             
                    if torch.cuda.is_available():
         | 
| @@ -30,6 +30,12 @@ def _resolve_device(device: str) -> str: | |
| 30 | 
             
                    # Apple MPS (macOS). Kept for completeness even if not typical on Windows.
         | 
| 31 | 
             
                    if hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
         | 
| 32 | 
             
                        return "mps"
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 33 | 
             
                    return "cpu"
         | 
| 34 | 
             
                # Fallback to CPU for any unknown value
         | 
| 35 | 
             
                return "cpu"
         | 
| @@ -47,33 +53,52 @@ def load_model(model_source: str, device: str = "auto"): | |
| 47 | 
             
                # Resolve device (maps 'auto' to an actual device)
         | 
| 48 | 
             
                resolved_device = _resolve_device(device)
         | 
| 49 |  | 
| 50 | 
            -
                #  | 
| 51 | 
            -
                 | 
| 52 | 
            -
                     | 
| 53 | 
            -
             | 
| 54 | 
            -
             | 
| 55 | 
            -
             | 
| 56 | 
            -
             | 
| 57 | 
            -
             | 
| 58 | 
            -
             | 
| 59 | 
            -
             | 
| 60 | 
            -
                    if " | 
| 61 | 
            -
                         | 
| 62 | 
            -
             | 
| 63 | 
            -
             | 
| 64 | 
            -
             | 
| 65 | 
            -
             | 
| 66 | 
            -
                         | 
| 67 | 
            -
             | 
| 68 | 
            -
             | 
| 69 | 
            -
                         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 70 |  | 
| 71 | 
            -
                #  | 
| 72 | 
             
                try:
         | 
| 73 | 
            -
                    return  | 
| 74 | 
             
                except Exception as e:
         | 
| 75 | 
             
                    st.warning(f"{resolved_device} cihaza taşınamadı, CPU'ya düşülüyor. Detay: {e}")
         | 
| 76 | 
            -
                     | 
|  | |
|  | |
|  | |
|  | |
| 77 |  | 
| 78 |  | 
| 79 | 
             
            class YOLOProcessor(VideoProcessorBase):
         | 
| @@ -93,11 +118,28 @@ with st.sidebar: | |
| 93 | 
             
                st.header("Ayarlar")
         | 
| 94 | 
             
                default_model = "yolo11n.pt"  # Ultralytics dağıtımından otomatik indirilmeye çalışılır
         | 
| 95 | 
             
                model_path = st.text_input("Model yolu veya alias", value=default_model, help="Yerel .pt yolu, yerleşik alias (örn. yolo11n.pt) veya hf://Ultralytics/YOLO11/yolo11n.pt")
         | 
| 96 | 
            -
                device = st.selectbox("Cihaz", options=["auto", "cpu", "cuda"], index=0)
         | 
| 97 | 
             
                conf = st.slider("Confidence", min_value=0.1, max_value=0.9, value=0.25, step=0.05)
         | 
| 98 | 
             
                iou = st.slider("IoU", min_value=0.1, max_value=0.9, value=0.45, step=0.05)
         | 
| 99 | 
             
                load_btn = st.button("Modeli Yükle")
         | 
| 100 |  | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 101 |  | 
| 102 | 
             
            st.session_state.setdefault("model", None)
         | 
| 103 |  | 
| @@ -110,6 +152,11 @@ if load_btn: | |
| 110 | 
             
                        st.success(f"Model yüklendi. Sınıflar: {len(names)}")
         | 
| 111 | 
             
                    else:
         | 
| 112 | 
             
                        st.success("Model yüklendi.")
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
| 113 | 
             
                except Exception as e:
         | 
| 114 | 
             
                    st.exception(e)
         | 
| 115 |  | 
|  | |
| 22 | 
             
            def _resolve_device(device: str) -> str:
         | 
| 23 | 
             
                """Map UI device option to a valid torch/Ultralytics device string."""
         | 
| 24 | 
             
                d = (device or "").lower().strip()
         | 
| 25 | 
            +
                if d in {"cpu", "cuda", "mps", "xpu", "dml"}:
         | 
| 26 | 
             
                    return d
         | 
| 27 | 
             
                if d == "auto":
         | 
| 28 | 
             
                    if torch.cuda.is_available():
         | 
|  | |
| 30 | 
             
                    # Apple MPS (macOS). Kept for completeness even if not typical on Windows.
         | 
| 31 | 
             
                    if hasattr(torch.backends, "mps") and torch.backends.mps.is_available():
         | 
| 32 | 
             
                        return "mps"
         | 
| 33 | 
            +
                    # Windows DirectML (requires torch-directml)
         | 
| 34 | 
            +
                    try:
         | 
| 35 | 
            +
                        import torch_directml  # noqa: F401
         | 
| 36 | 
            +
                        return "dml"
         | 
| 37 | 
            +
                    except Exception:
         | 
| 38 | 
            +
                        pass
         | 
| 39 | 
             
                    return "cpu"
         | 
| 40 | 
             
                # Fallback to CPU for any unknown value
         | 
| 41 | 
             
                return "cpu"
         | 
|  | |
| 53 | 
             
                # Resolve device (maps 'auto' to an actual device)
         | 
| 54 | 
             
                resolved_device = _resolve_device(device)
         | 
| 55 |  | 
| 56 | 
            +
                # Gentle hint for common mistake: loading a multi-model container instead of a single YOLO weight
         | 
| 57 | 
            +
                try:
         | 
| 58 | 
            +
                    base_name = os.path.basename(model_source)
         | 
| 59 | 
            +
                    if base_name.lower().startswith("models_multi"):
         | 
| 60 | 
            +
                        st.info("Seçilen .pt birden fazla ağırlık içeren bir paket olabilir. Lütfen tek bir YOLO ağırlık dosyası seçin (ör. yolo11n.pt).")
         | 
| 61 | 
            +
                except Exception:
         | 
| 62 | 
            +
                    pass
         | 
| 63 | 
            +
             | 
| 64 | 
            +
                # 1) Kaynağı yerel dosyaya indir/çöz ve YOLO nesnesini oluştur
         | 
| 65 | 
            +
                def _create_yolo(path_or_alias: str):
         | 
| 66 | 
            +
                    if path_or_alias.startswith("hf://"):
         | 
| 67 | 
            +
                        try:
         | 
| 68 | 
            +
                            from huggingface_hub import hf_hub_download
         | 
| 69 | 
            +
                        except Exception:
         | 
| 70 | 
            +
                            st.error("huggingface_hub yüklü değil. Yükleyin veya yerel/yerleşik model kullanın.")
         | 
| 71 | 
            +
                            raise
         | 
| 72 | 
            +
                        path = path_or_alias.replace("hf://", "", 1)
         | 
| 73 | 
            +
                        if "/" not in path:
         | 
| 74 | 
            +
                            raise ValueError("hf:// için hf://<repo_id>/<filename> biçimini kullanın")
         | 
| 75 | 
            +
                        repo_id, filename = path.split("/", 1)
         | 
| 76 | 
            +
                        st.info(f"{repo_id} deposundan {filename} indiriliyor…")
         | 
| 77 | 
            +
                        local_path = hf_hub_download(repo_id=repo_id, filename=filename, token=os.getenv("HF_TOKEN"))
         | 
| 78 | 
            +
                        return YOLO(local_path)
         | 
| 79 | 
            +
                    else:
         | 
| 80 | 
            +
                        return YOLO(path_or_alias)
         | 
| 81 | 
            +
             | 
| 82 | 
            +
                # Önce modeli yüklemeyi deneyin (hata ==> dosya/format sorunu)
         | 
| 83 | 
            +
                try:
         | 
| 84 | 
            +
                    model = _create_yolo(model_source)
         | 
| 85 | 
            +
                except Exception as e:
         | 
| 86 | 
            +
                    st.error(
         | 
| 87 | 
            +
                        "Model dosyası yüklenemedi. Bu dosya geçerli bir YOLO ağırlığı olmayabilir (örn. eğitim checkpoint'i veya paketlenmiş container).\n"
         | 
| 88 | 
            +
                        f"Detay: {e}"
         | 
| 89 | 
            +
                    )
         | 
| 90 | 
            +
                    raise
         | 
| 91 |  | 
| 92 | 
            +
                # 2) Sonra cihaza taşımayı deneyin (hata ==> CUDA/MPS taşınamıyor → CPU'ya düş)
         | 
| 93 | 
             
                try:
         | 
| 94 | 
            +
                    return model.to(resolved_device)
         | 
| 95 | 
             
                except Exception as e:
         | 
| 96 | 
             
                    st.warning(f"{resolved_device} cihaza taşınamadı, CPU'ya düşülüyor. Detay: {e}")
         | 
| 97 | 
            +
                    try:
         | 
| 98 | 
            +
                        return model.to("cpu")
         | 
| 99 | 
            +
                    except Exception:
         | 
| 100 | 
            +
                        # CPU'ya taşınma da başarısızsa, ham modeli döndür (son çare)
         | 
| 101 | 
            +
                        return model
         | 
| 102 |  | 
| 103 |  | 
| 104 | 
             
            class YOLOProcessor(VideoProcessorBase):
         | 
|  | |
| 118 | 
             
                st.header("Ayarlar")
         | 
| 119 | 
             
                default_model = "yolo11n.pt"  # Ultralytics dağıtımından otomatik indirilmeye çalışılır
         | 
| 120 | 
             
                model_path = st.text_input("Model yolu veya alias", value=default_model, help="Yerel .pt yolu, yerleşik alias (örn. yolo11n.pt) veya hf://Ultralytics/YOLO11/yolo11n.pt")
         | 
| 121 | 
            +
                device = st.selectbox("Cihaz", options=["auto", "cpu", "cuda", "dml"], index=0)
         | 
| 122 | 
             
                conf = st.slider("Confidence", min_value=0.1, max_value=0.9, value=0.25, step=0.05)
         | 
| 123 | 
             
                iou = st.slider("IoU", min_value=0.1, max_value=0.9, value=0.45, step=0.05)
         | 
| 124 | 
             
                load_btn = st.button("Modeli Yükle")
         | 
| 125 |  | 
| 126 | 
            +
                # Ortam durumu (Torch/CUDA)
         | 
| 127 | 
            +
                try:
         | 
| 128 | 
            +
                    tv = torch.__version__
         | 
| 129 | 
            +
                    cv = getattr(torch.version, "cuda", None)
         | 
| 130 | 
            +
                    ca = torch.cuda.is_available()
         | 
| 131 | 
            +
                    # Detect DirectML
         | 
| 132 | 
            +
                    has_dml = False
         | 
| 133 | 
            +
                    try:
         | 
| 134 | 
            +
                        import torch_directml  # noqa: F401
         | 
| 135 | 
            +
                        has_dml = True
         | 
| 136 | 
            +
                    except Exception:
         | 
| 137 | 
            +
                        has_dml = False
         | 
| 138 | 
            +
                    dml_txt = " • DirectML: Hazır" if has_dml else ""
         | 
| 139 | 
            +
                    st.caption(f"Torch {tv} • CUDA: {cv or 'yok'} • GPU etkin (CUDA): {'Evet' if ca else 'Hayır'}{dml_txt}")
         | 
| 140 | 
            +
                except Exception:
         | 
| 141 | 
            +
                    pass
         | 
| 142 | 
            +
             | 
| 143 |  | 
| 144 | 
             
            st.session_state.setdefault("model", None)
         | 
| 145 |  | 
|  | |
| 152 | 
             
                        st.success(f"Model yüklendi. Sınıflar: {len(names)}")
         | 
| 153 | 
             
                    else:
         | 
| 154 | 
             
                        st.success("Model yüklendi.")
         | 
| 155 | 
            +
                    try:
         | 
| 156 | 
            +
                        active_device = next((p.device.type for p in st.session_state.model.parameters()), "unknown")
         | 
| 157 | 
            +
                        st.info(f"Aktif cihaz: {active_device}")
         | 
| 158 | 
            +
                    except Exception:
         | 
| 159 | 
            +
                        pass
         | 
| 160 | 
             
                except Exception as e:
         | 
| 161 | 
             
                    st.exception(e)
         | 
| 162 |  | 
