Hennassi commited on
Commit
32ce9ae
·
verified ·
1 Parent(s): f8879bc

Upload 5 files

Browse files
Files changed (5) hide show
  1. app.js +29 -0
  2. app.py +41 -0
  3. index.html +18 -0
  4. requirements.txt +6 -0
  5. style.css +5 -0
app.js ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const chatBox = document.getElementById("chat-box");
2
+ const userInput = document.getElementById("user-input");
3
+ const sendBtn = document.getElementById("send-btn");
4
+
5
+ sendBtn.onclick = async () => {
6
+ const message = userInput.value;
7
+ if (!message) return;
8
+
9
+ // عرض رسالة المستخدم
10
+ const userDiv = document.createElement("div");
11
+ userDiv.textContent = "أنت: " + message;
12
+ chatBox.appendChild(userDiv);
13
+
14
+ userInput.value = "";
15
+
16
+ // إرسال للـ API
17
+ const response = await fetch("http://localhost:8000/chat", {
18
+ method: "POST",
19
+ headers: { "Content-Type": "application/json" },
20
+ body: JSON.stringify({ user_id: "user1", message: message })
21
+ });
22
+ const data = await response.json();
23
+
24
+ const novaDiv = document.createElement("div");
25
+ novaDiv.textContent = "NOVA AI: " + data.response;
26
+ chatBox.appendChild(novaDiv);
27
+
28
+ chatBox.scrollTop = chatBox.scrollHeight;
29
+ };
app.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ import torch
5
+
6
+ app = FastAPI()
7
+
8
+ # نموذج مفتوح المصدر
9
+ MODEL_NAME = "TheBloke/vicuna-7B-1.1-HF"
10
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
11
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, device_map="auto", torch_dtype=torch.float16)
12
+
13
+ # تخزين تاريخ المحادثة
14
+ chat_histories = {}
15
+
16
+ class ChatRequest(BaseModel):
17
+ user_id: str
18
+ message: str
19
+
20
+ @app.post("/chat")
21
+ def chat_endpoint(request: ChatRequest):
22
+ user_id = request.user_id
23
+ message = request.message
24
+
25
+ if user_id not in chat_histories:
26
+ chat_histories[user_id] = []
27
+
28
+ conversation = "NOVA AI: أنا كوميدي ومغربي. نفهم أي حاجة.\n"
29
+ for q, a in chat_histories[user_id]:
30
+ conversation += f"User: {q}\nNOVA AI: {a}\n"
31
+ conversation += f"User: {message}\nNOVA AI:"
32
+
33
+ inputs = tokenizer(conversation, return_tensors="pt").to(model.device)
34
+ outputs = model.generate(**inputs, max_new_tokens=200)
35
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True).split("NOVA AI:")[-1].strip()
36
+
37
+ chat_histories[user_id].append((message, response))
38
+ if len(chat_histories[user_id]) > 10:
39
+ chat_histories[user_id] = chat_histories[user_id][-10:]
40
+
41
+ return {"response": response}
index.html ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="ar">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>NOVA AI Chat</title>
7
+ <link rel="stylesheet" href="style.css">
8
+ </head>
9
+ <body>
10
+ <div class="container">
11
+ <h1>NOVA AI Chat 💡</h1>
12
+ <div id="chat-box"></div>
13
+ <input type="text" id="user-input" placeholder="أدخل سؤالك هنا...">
14
+ <button id="send-btn">إرسال</button>
15
+ </div>
16
+ <script src="app.js"></script>
17
+ </body>
18
+ </html>
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ torch>=2.0
2
+ transformers
3
+ accelerate
4
+ safetensors
5
+ fastapi
6
+ uvicorn
style.css ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ body { font-family: Arial, sans-serif; background: #121212; color: #fff; display: flex; justify-content: center; align-items: center; height: 100vh; margin: 0; }
2
+ .container { width: 400px; }
3
+ #chat-box { height: 300px; overflow-y: auto; border: 1px solid #555; padding: 10px; margin-bottom: 10px; background: #1e1e1e; }
4
+ #user-input { width: 75%; padding: 10px; }
5
+ #send-btn { padding: 10px; width: 20%; background: #0ff; color: #000; border: none; cursor: pointer; }