File size: 2,849 Bytes
63345e4 d5509b5 28ba3f7 d292f3f d5509b5 0b6bf79 d5509b5 ae2dfa3 d5509b5 8c29ba3 d5509b5 63345e4 8c29ba3 63345e4 8c29ba3 d5509b5 63345e4 d5509b5 8c29ba3 d5509b5 8c29ba3 d5509b5 8c29ba3 d5509b5 8c29ba3 d5509b5 01a6f1c 23211bc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
import json
import random
import string
import time
from typing import Any
import freeGPT
from asyncio import run
from flask import Flask, request
from flask_cors import CORS
from g4f import ChatCompletion, Provider
app = Flask(__name__)
CORS(app)
@app.route("/")
def main():
return """Just iqai.ru for more!!!"""
@app.route("/chat/completions", methods=["POST"])
def chatcompletions():
data = request.get_json()
model = data.get("model", "gpt-3.5-turbo")
stream = data.get("stream", False)
messages = data.get("messages")
if messages is None:
return {"error": "No messages provided"}, 400
response = gpt3.Completion.create(prompt=messages)
completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
if not stream:
return {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": response,
},
"finishreason": "stop",
}
],
"usage": {
"prompttokens": None,
"completiontokens": None,
"totaltokens": None,
},
}
def streaming():
for chunk in response:
completion_data = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {
"content": chunk,
},
"finishreason": None,
}
],
}
content = json.dumps(completion_data, separators=(",", ":"))
yield f"data: {content}nn"
time.sleep(0.1)
end_completion_data: dict[str, Any] = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {},
"finishreason": "stop",
}
],
}
content = json.dumps(end_completion_data, separators=(",", ":"))
yield f"data: {content}nn"
return app.response_class(streaming(), mimetype="text/event-stream")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, use_reloader=False)
|