Update app.py
Browse files
app.py
CHANGED
|
@@ -23,6 +23,11 @@ MODEL = "nousresearch/deephermes-3-llama-3-8b-preview:free"
|
|
| 23 |
|
| 24 |
app = FastAPI()
|
| 25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
# ✅ Context fetcher
|
| 27 |
def get_context(query, top_k=5, threshold=0.3):
|
| 28 |
query_vec = embed_model.encode([query])
|
|
@@ -43,16 +48,20 @@ def chat_fn(message, history):
|
|
| 43 |
|
| 44 |
context = get_context(message)
|
| 45 |
|
|
|
|
| 46 |
if not context.strip():
|
|
|
|
|
|
|
| 47 |
return "❌ I couldn’t find any relevant info to answer that. Please ask something else."
|
| 48 |
|
|
|
|
| 49 |
messages = [
|
| 50 |
{
|
| 51 |
"role": "system",
|
| 52 |
"content": (
|
| 53 |
-
"You are a
|
| 54 |
-
"
|
| 55 |
-
"
|
| 56 |
)
|
| 57 |
}
|
| 58 |
]
|
|
@@ -66,7 +75,7 @@ def chat_fn(message, history):
|
|
| 66 |
payload = {
|
| 67 |
"model": MODEL,
|
| 68 |
"messages": messages,
|
| 69 |
-
"max_tokens":
|
| 70 |
}
|
| 71 |
|
| 72 |
try:
|
|
@@ -74,7 +83,7 @@ def chat_fn(message, history):
|
|
| 74 |
"https://openrouter.ai/api/v1/chat/completions",
|
| 75 |
headers=headers,
|
| 76 |
json=payload,
|
| 77 |
-
timeout=30
|
| 78 |
)
|
| 79 |
response.raise_for_status()
|
| 80 |
reply = response.json()["choices"][0]["message"]["content"]
|
|
|
|
| 23 |
|
| 24 |
app = FastAPI()
|
| 25 |
|
| 26 |
+
# ✅ Greeting checker
|
| 27 |
+
def is_greeting(text):
|
| 28 |
+
greetings = ["hi", "hello", "hey", "good morning", "good afternoon", "good evening"]
|
| 29 |
+
return any(g in text.lower() for g in greetings)
|
| 30 |
+
|
| 31 |
# ✅ Context fetcher
|
| 32 |
def get_context(query, top_k=5, threshold=0.3):
|
| 33 |
query_vec = embed_model.encode([query])
|
|
|
|
| 48 |
|
| 49 |
context = get_context(message)
|
| 50 |
|
| 51 |
+
# ✅ Allow fallback for greetings
|
| 52 |
if not context.strip():
|
| 53 |
+
if is_greeting(message):
|
| 54 |
+
return "👋 Hello! How can I assist you today?"
|
| 55 |
return "❌ I couldn’t find any relevant info to answer that. Please ask something else."
|
| 56 |
|
| 57 |
+
# ✅ Construct message payload
|
| 58 |
messages = [
|
| 59 |
{
|
| 60 |
"role": "system",
|
| 61 |
"content": (
|
| 62 |
+
"You are a helpful, precise assistant. Use the provided context to answer when available."
|
| 63 |
+
" If no relevant context is found, reply politely or ask the user to rephrase."
|
| 64 |
+
"\n\nContext:\n" + context
|
| 65 |
)
|
| 66 |
}
|
| 67 |
]
|
|
|
|
| 75 |
payload = {
|
| 76 |
"model": MODEL,
|
| 77 |
"messages": messages,
|
| 78 |
+
"max_tokens": 300,
|
| 79 |
}
|
| 80 |
|
| 81 |
try:
|
|
|
|
| 83 |
"https://openrouter.ai/api/v1/chat/completions",
|
| 84 |
headers=headers,
|
| 85 |
json=payload,
|
| 86 |
+
timeout=30
|
| 87 |
)
|
| 88 |
response.raise_for_status()
|
| 89 |
reply = response.json()["choices"][0]["message"]["content"]
|