File size: 2,930 Bytes
63345e4 d5509b5 63345e4 d5509b5 b6a7c97 d5509b5 63345e4 0b6bf79 d5509b5 63345e4 d5509b5 63345e4 d5509b5 63345e4 d5509b5 63345e4 d5509b5 63345e4 d5509b5 63345e4 d5509b5 63345e4 d5509b5 0d7b5a0 d5509b5 63345e4 d5509b5 16f2cf9 63345e4 01a6f1c d5509b5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 |
import json
import random
import string
import time
from typing import Any
from flask import Flask, request
from flask_cors import CORS
from g4f import ChatCompletion, Provider
app = Flask(__name__)
CORS(app)
@app.route("/")
def main():
return """Just GPT3.5 api created by xtekky<br>See <a href='https://huggingface.co/spaces/ddosxd/gpt-3.5-ponos/blob/main/README.md'> for examples</a>"""
@app.route("/chat/completions", methods=["POST"])
def chat_completions():
model = request.get_json().get("model", "gpt-3.5-turbo")
stream = request.get_json().get("stream", False)
messages = request.get_json().get("messages")
response = ChatCompletion.create(model=model, stream=stream, messages=messages, provider=Provider.DeepAi)
completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28))
completion_timestamp = int(time.time())
if not stream:
return {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": response,
},
"finish_reason": "stop",
}
],
"usage": {
"prompt_tokens": None,
"completion_tokens": None,
"total_tokens": None,
},
}
def streaming():
for chunk in response:
completion_data = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {
"content": chunk,
},
"finish_reason": None,
}
],
}
content = json.dumps(completion_data, separators=(",", ":"))
yield f"data: {content}\n\n"
time.sleep(0.1)
end_completion_data: dict[str, Any] = {
"id": f"chatcmpl-{completion_id}",
"object": "chat.completion.chunk",
"created": completion_timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {},
"finish_reason": "stop",
}
],
}
content = json.dumps(end_completion_data, separators=(",", ":"))
yield f"data: {content}\n\n"
return app.response_class(streaming(), mimetype="text/event-stream")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=7860, debug=False) |