Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, Request | |
from fastapi.responses import JSONResponse | |
from pydantic import BaseModel | |
import gradio as gr | |
import os | |
import requests | |
import nest_asyncio | |
import uvicorn | |
import re | |
import json | |
# β API config | |
API_KEY = os.environ.get("OPENROUTER_API_KEY") | |
MODEL = "deepseek/deepseek-chat-v3-0324:free" | |
app = FastAPI() | |
# β Chat Function | |
def chat_fn(message, history): | |
headers = { | |
"Authorization": f"Bearer {API_KEY}", | |
"Content-Type": "application/json" | |
} | |
messages = [ | |
{ | |
"role": "system", | |
"content": ( | |
"You are a coding assistant with a focus on programming tasks. You should provide clear, friendly, " | |
"and concise responses to coding questions or tasks. If you're unsure, say: 'Iβm not sure about that. Could you rephrase?'" | |
) | |
} | |
] | |
for user, assistant in history: | |
messages.append({"role": "user", "content": user}) | |
messages.append({"role": "assistant", "content": assistant}) | |
messages.append({"role": "user", "content": message + "\n\nReply in a natural tone."}) | |
payload = { | |
"model": MODEL, | |
"messages": messages, | |
"max_tokens": 16384, | |
"temperature": 0.7, | |
"top_p": 0.9 | |
} | |
try: | |
response = requests.post( | |
"https://openrouter.ai/api/v1/chat/completions", | |
headers=headers, | |
json=payload, | |
timeout=300 | |
) | |
response.raise_for_status() | |
reply = response.json()["choices"][0]["message"]["content"] | |
except Exception as e: | |
reply = f"β οΈ API error: {str(e)[:200]}" | |
return reply.strip() | |
# β /chat API endpoint | |
async def chat_api(request: Request): | |
body = await request.json() | |
message = body.get("message", "").strip() | |
history = body.get("history", []) | |
if not message: | |
return JSONResponse(content={"response": "β οΈ Please enter a valid message."}) | |
response = chat_fn(message, history) | |
return JSONResponse(content={"response": response}) | |
# β New: /code-check endpoint for Monaco Editor integration | |
class CodePayload(BaseModel): | |
code: str | |
async def code_check(payload: CodePayload): | |
code = payload.code | |
if not code.strip(): | |
return JSONResponse(content={"suggestions": []}) | |
headers = { | |
"Authorization": f"Bearer {API_KEY}", | |
"Content-Type": "application/json" | |
} | |
prompt = ( | |
"Analyze the following code. Identify syntax issues or improvements.\n" | |
"Return a JSON array of suggestions with these keys:\n" | |
"- message\n" | |
"- severity ('error' or 'hint')\n" | |
"- fix (string to replace)\n" | |
"- line (1-based)\n" | |
"- startColumn, endColumn\n\n" | |
f"Code:\n{code}\n\n" | |
"Respond ONLY with the JSON array." | |
) | |
api_payload = { | |
"model": MODEL, | |
"messages": [ | |
{"role": "system", "content": "You are a code analysis tool."}, | |
{"role": "user", "content": prompt} | |
], | |
"temperature": 0.3, | |
"top_p": 0.9, | |
"max_tokens": 2048 | |
} | |
try: | |
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=api_payload) | |
response.raise_for_status() | |
raw = response.json()["choices"][0]["message"]["content"] | |
# Try to extract JSON array from output | |
match = re.search(r"\[\s*{.*?}\s*\]", raw, re.DOTALL) | |
suggestions = json.loads(match.group(0)) if match else [] | |
return JSONResponse(content={"suggestions": suggestions}) | |
except Exception as e: | |
return JSONResponse(content={"suggestions": [], "error": str(e)}, status_code=500) | |
# β Gradio interface | |
demo = gr.ChatInterface( | |
fn=chat_fn, | |
title="\ud83d\udcac CODEX MIRXA KAMRAN", | |
description="Ask coding or general programming questions! Short, natural, and helpful responses.", | |
theme="soft" | |
) | |
# β Mount Gradio on FastAPI | |
app = gr.mount_gradio_app(app, demo, path="/") | |
# β Local debug (won't run on HF Spaces, but okay locally) | |
if __name__ == "__main__": | |
nest_asyncio.apply() | |
uvicorn.run(app, host="0.0.0.0", port=7860) |