MiklX commited on
Commit
68d114f
·
1 Parent(s): 4920ca4

Create copyapp.py

Browse files
Files changed (1) hide show
  1. copyapp.py +106 -0
copyapp.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import random
3
+ import string
4
+ import time
5
+ from typing import Any
6
+
7
+ #new
8
+ import freeGPT
9
+ from asyncio import run
10
+ #/new
11
+
12
+ from flask import Flask, request
13
+ from flask_cors import CORS
14
+
15
+ from g4f import ChatCompletion, Provider
16
+
17
+ app = Flask(__name__)
18
+ CORS(app)
19
+
20
+ @app.route("/")
21
+ def main():
22
+ return """Just iqai.ru for more!!!"""
23
+
24
+ @app.route("/chat/completions", methods=["POST"])
25
+ def chat_completions():
26
+ model = request.get_json().get("model", "gpt-3.5-turbo")
27
+ stream = request.get_json().get("stream", False)
28
+ messages = request.get_json().get("messages")
29
+
30
+ #old
31
+ #response = ChatCompletion.create(model=model, messages=messages)
32
+ #/old
33
+
34
+ #new
35
+ response = freeGPT.gpt3.Completion().create(messages)
36
+ #/new
37
+
38
+ completion_id = "".join(random.choices(string.ascii_letters + string.digits, k=28))
39
+ completion_timestamp = int(time.time())
40
+
41
+ if not stream:
42
+ return {
43
+ "id": f"chatcmpl-{completion_id}",
44
+ "object": "chat.completion",
45
+ "created": completion_timestamp,
46
+ "model": model,
47
+ "choices": [
48
+ {
49
+ "index": 0,
50
+ "message": {
51
+ "role": "assistant",
52
+ "content": response,
53
+ },
54
+ "finish_reason": "stop",
55
+ }
56
+ ],
57
+ "usage": {
58
+ "prompt_tokens": None,
59
+ "completion_tokens": None,
60
+ "total_tokens": None,
61
+ },
62
+ }
63
+
64
+ def streaming():
65
+ for chunk in response:
66
+ completion_data = {
67
+ "id": f"chatcmpl-{completion_id}",
68
+ "object": "chat.completion.chunk",
69
+ "created": completion_timestamp,
70
+ "model": model,
71
+ "choices": [
72
+ {
73
+ "index": 0,
74
+ "delta": {
75
+ "content": chunk,
76
+ },
77
+ "finish_reason": None,
78
+ }
79
+ ],
80
+ }
81
+
82
+ content = json.dumps(completion_data, separators=(",", ":"))
83
+ yield f"data: {content}\n\n"
84
+ time.sleep(0.1)
85
+
86
+ end_completion_data: dict[str, Any] = {
87
+ "id": f"chatcmpl-{completion_id}",
88
+ "object": "chat.completion.chunk",
89
+ "created": completion_timestamp,
90
+ "model": model,
91
+ "choices": [
92
+ {
93
+ "index": 0,
94
+ "delta": {},
95
+ "finish_reason": "stop",
96
+ }
97
+ ],
98
+ }
99
+ content = json.dumps(end_completion_data, separators=(",", ":"))
100
+ yield f"data: {content}\n\n"
101
+
102
+ return app.response_class(streaming(), mimetype="text/event-stream")
103
+
104
+
105
+ if __name__ == "__main__":
106
+ app.run(host="0.0.0.0", port=7860, debug=False)