Spaces:
Sleeping
Sleeping
File size: 4,256 Bytes
f5a644c 03021b9 1792ed8 03021b9 f5a644c 03021b9 43086b1 03021b9 1792ed8 f5a644c 05c0b76 03021b9 43086b1 03021b9 43086b1 03021b9 f5a644c 43086b1 03021b9 43086b1 03021b9 f5a644c 03021b9 43086b1 03021b9 1792ed8 03021b9 1792ed8 03021b9 f5a644c 43086b1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from pydantic import BaseModel
import gradio as gr
import os
import requests
import nest_asyncio
import uvicorn
import re
import json
# β
API config
API_KEY = os.environ.get("OPENROUTER_API_KEY")
MODEL = "deepseek/deepseek-chat-v3-0324:free"
app = FastAPI()
# β
Chat Function
def chat_fn(message, history):
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
messages = [
{
"role": "system",
"content": (
"You are a coding assistant with a focus on programming tasks. You should provide clear, friendly, "
"and concise responses to coding questions or tasks. If you're unsure, say: 'Iβm not sure about that. Could you rephrase?'"
)
}
]
for user, assistant in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assistant})
messages.append({"role": "user", "content": message + "\n\nReply in a natural tone."})
payload = {
"model": MODEL,
"messages": messages,
"max_tokens": 16384,
"temperature": 0.7,
"top_p": 0.9
}
try:
response = requests.post(
"https://openrouter.ai/api/v1/chat/completions",
headers=headers,
json=payload,
timeout=300
)
response.raise_for_status()
reply = response.json()["choices"][0]["message"]["content"]
except Exception as e:
reply = f"β οΈ API error: {str(e)[:200]}"
return reply.strip()
# β
/chat API endpoint
@app.post("/chat")
async def chat_api(request: Request):
body = await request.json()
message = body.get("message", "").strip()
history = body.get("history", [])
if not message:
return JSONResponse(content={"response": "β οΈ Please enter a valid message."})
response = chat_fn(message, history)
return JSONResponse(content={"response": response})
# β
New: /code-check endpoint for Monaco Editor integration
class CodePayload(BaseModel):
code: str
@app.post("/code-check")
async def code_check(payload: CodePayload):
code = payload.code
if not code.strip():
return JSONResponse(content={"suggestions": []})
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"
}
prompt = (
"Analyze the following code. Identify syntax issues or improvements.\n"
"Return a JSON array of suggestions with these keys:\n"
"- message\n"
"- severity ('error' or 'hint')\n"
"- fix (string to replace)\n"
"- line (1-based)\n"
"- startColumn, endColumn\n\n"
f"Code:\n{code}\n\n"
"Respond ONLY with the JSON array."
)
api_payload = {
"model": MODEL,
"messages": [
{"role": "system", "content": "You are a code analysis tool."},
{"role": "user", "content": prompt}
],
"temperature": 0.3,
"top_p": 0.9,
"max_tokens": 2048
}
try:
response = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=api_payload)
response.raise_for_status()
raw = response.json()["choices"][0]["message"]["content"]
# Try to extract JSON array from output
match = re.search(r"\[\s*{.*?}\s*\]", raw, re.DOTALL)
suggestions = json.loads(match.group(0)) if match else []
return JSONResponse(content={"suggestions": suggestions})
except Exception as e:
return JSONResponse(content={"suggestions": [], "error": str(e)}, status_code=500)
# β
Gradio interface
demo = gr.ChatInterface(
fn=chat_fn,
title="\ud83d\udcac CODEX MIRXA KAMRAN",
description="Ask coding or general programming questions! Short, natural, and helpful responses.",
theme="soft"
)
# β
Mount Gradio on FastAPI
app = gr.mount_gradio_app(app, demo, path="/")
# β
Local debug (won't run on HF Spaces, but okay locally)
if __name__ == "__main__":
nest_asyncio.apply()
uvicorn.run(app, host="0.0.0.0", port=7860) |