Spaces:
Sleeping
Sleeping
Upload 5 files
Browse files- Dockerfile +17 -0
- app.py +74 -0
- docker-compose.yml +23 -0
- dockerignore +11 -0
- requirements.txt +14 -0
Dockerfile
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9-slim-buster
|
2 |
+
|
3 |
+
ENV PYTHONUNBUFFERED=1 \
|
4 |
+
HF_HOME=/app/.cache \
|
5 |
+
TORCH_HOME=/app/cache \
|
6 |
+
USE_CUDA=false
|
7 |
+
|
8 |
+
RUN apt-get update && apt-get install -y --no-install-recommends gcc python3-dev
|
9 |
+
|
10 |
+
WORKDIR /app
|
11 |
+
COPY . .
|
12 |
+
|
13 |
+
RUN pip install --upgrade pip && \
|
14 |
+
pip install --no-cache-dir -r requirements.txt && \
|
15 |
+
python -c "from transformers import pipeline; pipeline('zero-shot-classification', model='joeddav/xlm-roberta-large-xnli')"
|
16 |
+
|
17 |
+
CMD ["gunicorn", "--bind", "0.0.0.0:7860", "--workers", "1", "--threads", "4", "app:app"]
|
app.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import logging
|
3 |
+
from flask import Flask, request, jsonify
|
4 |
+
from flask_cors import CORS
|
5 |
+
from transformers import pipeline
|
6 |
+
import torch
|
7 |
+
|
8 |
+
app = Flask(__name__)
|
9 |
+
CORS(app)
|
10 |
+
|
11 |
+
# تهيئة السجل (Logging)
|
12 |
+
logging.basicConfig(level=logging.INFO)
|
13 |
+
logger = logging.getLogger(__name__)
|
14 |
+
|
15 |
+
# الفئات المحددة مسبقًا
|
16 |
+
PREDEFINED_LABELS = [
|
17 |
+
"مطار", "مطعم", "سوق", "حديقة", "ملعب", "مسجد",
|
18 |
+
"طريق", "مخبز", "صيدلية", "مستشفى", "مصنع",
|
19 |
+
"محطة وقود", "جامعة", "مطبخ", "غرفة نوم",
|
20 |
+
"حمام", "غرفة معيشة", "شرفة", "مكتب", "صف دراسي"
|
21 |
+
]
|
22 |
+
|
23 |
+
# متغيرات عامة للنموذج والتهيئة
|
24 |
+
MODEL = None
|
25 |
+
DEVICE = None
|
26 |
+
LABELS_ENCODED = None # سيتم تخزين تمثيل الـ Labels هنا
|
27 |
+
|
28 |
+
def initialize():
|
29 |
+
global MODEL, DEVICE, LABELS_ENCODED
|
30 |
+
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
|
31 |
+
logger.info(f"جار التحميل على الجهاز: {DEVICE}")
|
32 |
+
|
33 |
+
MODEL = pipeline(
|
34 |
+
task="zero-shot-classification",
|
35 |
+
model="joeddav/xlm-roberta-large-xnli",
|
36 |
+
device=DEVICE,
|
37 |
+
torch_dtype=torch.float16 if DEVICE == "cuda" else torch.float32,
|
38 |
+
)
|
39 |
+
|
40 |
+
# تهيئة النموذج مع الـ Labels كاملة (Warm-up)
|
41 |
+
logger.info("جار تهيئة النموذج مع الـ Labels...")
|
42 |
+
dummy_result = MODEL(
|
43 |
+
"تهيئة النموذج",
|
44 |
+
PREDEFINED_LABELS,
|
45 |
+
multi_label=False
|
46 |
+
)
|
47 |
+
logger.info("تم تحميل النموذج والـ Labels بنجاح")
|
48 |
+
|
49 |
+
# استدعاء التهيئة عند بدء التشغيل
|
50 |
+
initialize()
|
51 |
+
|
52 |
+
@app.route('/classify', methods=['POST'])
|
53 |
+
def classify():
|
54 |
+
try:
|
55 |
+
text = request.json.get('text', '').strip()
|
56 |
+
if not text:
|
57 |
+
return jsonify({"error": "يجب تقديم نص للتصنيف"}), 400
|
58 |
+
|
59 |
+
# الاستدعاء باستخدام الـ Labels المخزنة مسبقًا
|
60 |
+
result = MODEL(text, PREDEFINED_LABELS, multi_label=False)
|
61 |
+
|
62 |
+
return jsonify({
|
63 |
+
"prediction": {
|
64 |
+
"label": result["labels"][0],
|
65 |
+
"score": float(result["scores"][0])
|
66 |
+
}
|
67 |
+
})
|
68 |
+
|
69 |
+
except Exception as e:
|
70 |
+
logger.error(f"خطأ في التصنيف: {str(e)}")
|
71 |
+
return jsonify({"error": "حدث خطأ أثناء المعالجة"}), 500
|
72 |
+
|
73 |
+
if __name__ == '__main__':
|
74 |
+
app.run(host='0.0.0.0', port=7860)
|
docker-compose.yml
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
|
3 |
+
services:
|
4 |
+
classifier:
|
5 |
+
build: .
|
6 |
+
ports:
|
7 |
+
- "7860:7860"
|
8 |
+
environment:
|
9 |
+
- USE_CUDA=false
|
10 |
+
- HF_HOME=/app/.cache/huggingface
|
11 |
+
- TORCH_HOME=/app/.cache/torch
|
12 |
+
- TOKENIZERS_PARALLELISM=true
|
13 |
+
volumes:
|
14 |
+
- ./logs:/app/logs
|
15 |
+
- ./model_cache:/app/.cache
|
16 |
+
restart: unless-stopped
|
17 |
+
deploy:
|
18 |
+
resources:
|
19 |
+
limits:
|
20 |
+
cpus: '4'
|
21 |
+
memory: 16G
|
22 |
+
reservations:
|
23 |
+
memory: 8G
|
dockerignore
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.git
|
2 |
+
__pycache__
|
3 |
+
*.pyc
|
4 |
+
*.pyo
|
5 |
+
*.pyd
|
6 |
+
.DS_Store
|
7 |
+
.env
|
8 |
+
venv
|
9 |
+
logs/*
|
10 |
+
*.log
|
11 |
+
test.py
|
requirements.txt
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
flask==3.0.2
|
2 |
+
flask-cors==4.0.0
|
3 |
+
transformers==4.40.1
|
4 |
+
torch==2.1.0
|
5 |
+
accelerate==0.30.1
|
6 |
+
gunicorn==21.2.0
|
7 |
+
sentencepiece==0.2.0
|
8 |
+
huggingface-hub==0.23.1
|
9 |
+
cachetools==5.3.3
|
10 |
+
protobuf==3.20.3
|
11 |
+
onnxruntime==1.16.0
|
12 |
+
optimum==1.16.0
|
13 |
+
torchvision==0.16.0
|
14 |
+
numpy==1.26.4
|