unimy / app.py
MiklX's picture
Update app.py
23211bc
raw
history blame
2.85 kB
import json
import random
import string
import time
from typing import Any
import freeGPT
from asyncio import run
from flask import Flask, request
from flask_cors import CORS
from g4f import ChatCompletion, Provider
app = Flask(__name__)
CORS(app)
@app.route("/")
def main():
return """Just iqai.ru for more!!!"""
@app.route("/chat/completions", methods=["POST"])
def chatcompletions():
data = request.get_json()
model = data.get("model", "gpt-3.5-turbo")
stream = data.get("stream", False)
messages = data.get("messages")
if messages is None:
return {"error": "No messages provided"}, 400
response = gpt3.Completion.create(prompt=messages)
completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
if not stream:
return {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": response,
},
"finishreason": "stop",
}
],
"usage": {
"prompttokens": None,
"completiontokens": None,
"totaltokens": None,
},
}
def streaming():
for chunk in response:
completion_data = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {
"content": chunk,
},
"finishreason": None,
}
],
}
content = json.dumps(completion_data, separators=(",", ":"))
yield f"data: {content}nn"
time.sleep(0.1)
end_completion_data: dict[str, Any] = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {},
"finishreason": "stop",
}
],
}
content = json.dumps(end_completion_data, separators=(",", ":"))
yield f"data: {content}nn"
return app.response_class(streaming(), mimetype="text/event-stream")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, use_reloader=False)