Dooratre commited on
Commit
efd2d4b
·
verified ·
1 Parent(s): fff734a

Upload 4 files

Browse files
Files changed (3) hide show
  1. db_analysis.py +125 -0
  2. db_signals.py +125 -0
  3. db_system.py +125 -0
db_analysis.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import json
4
+
5
+ TOKEN = "_device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; _octo=GH1.1.1517954811.1753352111; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; ai_session=v+6N2XPmhlxugZWyisQ+ZD|1753646990500|1753646990500; saved_user_sessions=155741452%3ASnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; user_session=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; __Host-user_session_same_site=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; tz=Africa%2FTripoli; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; logged_in=yes; dotcom_user=omarnuwrar; _gh_sess=7FzMK5K8ffnmyyh5LKKv%2FOXiqZJR4qLXxTdaV66E844ZCPq5qw%2FClaVmXHNfu8oc61N461wsjEr7d8vhEwrs0N0X7ITUed9Zj01RnwHGT8mMRUn6oYSv94LpIh2FwmotPwp8jkSQkZ%2BotdEpdYtp3ZoJZKfiZOcpHBtT7g2VwIPgoW2Qx5RpnKNdI3Hq31C6IIPaSzAqqny7O7c6L8nWv1nfx%2FAbF4UFSo7UfW%2F9JLUYF5lVJ2kXdYoesKOL7c2KItGDTaZCwjYr9cHKlHWD4E9wLo22GjFveVKxrEz5dgIrNdAj8WxWXuY5Ou4eYmxaBn2ovIhvnFz8%2F6qLURX81YxLLZbymGERA3MaRzDDzY3yE76U8y8lLPve0Duqc0lr34R3XUiMKE5A3%2FNPQ273e36yNlLsgBGDyuYIEcsQ84XLq2IQygBxX4y%2B6WSPwXAgOku6MiEP8Ro9ihF6scOhbJRrVCPp0toSY3RmJToUy6XRmBF2B0oyJstKbqLPfmZI8p%2B2bQo8DBKARHWWUzTJdjF%2BfgZtm%2Flb3qijcKT5I6SPU%2BiLMH%2Fl2GwHw73d1OFGUNy4tdLT5SO5vCFrf1GIiV7qUhhQdA21dXsAeQ4qoF5LHiGftyhOUBHto3ZZB%2FJ87uqACflXOfbbTCQCAYNa2u4o8I9iKQp9r2ripVxqQF1oyVu12FSIN%2BS%2Fd4Rm%2FN7E1tOw3tcVgYcsFEcbsOViUZBXXmo1Qfd9H%2B4IGnbv3hZe%2FPeJqb33SxWeQpamEWhLjVJL2hMCbZ8v79azeUL93QzkLXuryStKTXOdoyrbD2n93V36z5Sxhzi9Ku6OxVK1PCZW0R7JiYtQOWoeMAMd4oe3Bqrxyc%2BdAdb0sW3L%2FOD8J2nbvJ5gGA%3D%3D--Ngvrt5zzlDZazWNi--k%2F8wjhX57aMmLOJc8i6L7w%3D%3D"
6
+ # Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page
7
+ def fetch_authenticity_token_and_commit_oid():
8
+ url = "https://github.com/omarnuwrar/Trading/edit/main/analysis.json"
9
+
10
+ headers = {
11
+ "cookie": TOKEN,
12
+ "if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"',
13
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
14
+ "x-github-target": "dotcom",
15
+ "x-react-router": "json",
16
+ "x-requested-with": "XMLHttpRequest",
17
+ }
18
+
19
+ response = requests.get(url, headers=headers)
20
+
21
+ if response.status_code == 200:
22
+ soup = BeautifulSoup(response.text, 'html.parser')
23
+ script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
24
+
25
+ if script_tag:
26
+ try:
27
+ json_data = json.loads(script_tag.string.strip())
28
+ authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/analysis.json"]["post"]
29
+ commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
30
+ return authenticity_token, commit_oid
31
+ except (KeyError, json.JSONDecodeError) as e:
32
+ print(f"Error: Failed to extract data. Details: {str(e)}")
33
+ return None, None
34
+ else:
35
+ print("Error: Could not find the required <script> tag.")
36
+ return None, None
37
+ else:
38
+ print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
39
+ return None, None
40
+
41
+ # Step 2: Send the POST request to update the analysis.json file
42
+ def update_user_json_file(authenticity_token, commit_oid, new_content):
43
+ url = "https://github.com/omarnuwrar/Trading/tree-save/main/analysis.json"
44
+
45
+ headers = {
46
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
47
+ "x-requested-with": "XMLHttpRequest",
48
+ "github-verified-fetch": "true",
49
+ "content-type": "application/x-www-form-urlencoded",
50
+ "cookie": TOKEN,
51
+ }
52
+
53
+ payload = {
54
+ "message": "Update analysis.json",
55
+ "placeholder_message": "Update analysis.json",
56
+ "description": "",
57
+ "commit-choice": "direct",
58
+ "target_branch": "main",
59
+ "quick_pull": "",
60
+ "guidance_task": "",
61
+ "commit": commit_oid,
62
+ "same_repo": "1",
63
+ "pr": "",
64
+ "content_changed": "true",
65
+ "filename": "analysis.json",
66
+ "new_filename": "analysis.json",
67
+ "value": new_content,
68
+ "authenticity_token": authenticity_token,
69
+ }
70
+
71
+ response = requests.post(url, headers=headers, data=payload)
72
+
73
+ if response.status_code == 200:
74
+ return {"success": True, "message": "analysis.json has been updated!"}
75
+ else:
76
+ return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
77
+
78
+
79
+
80
+ # Function to fetch and extract the JSON data
81
+ def fetch_json_from_github():
82
+ # URL of the GitHub page
83
+ url = "https://github.com/omarnuwrar/Trading/blob/main/analysis.json"
84
+
85
+ # Custom headers
86
+ headers = {
87
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
88
+ "Cookie": TOKEN
89
+ }
90
+
91
+ try:
92
+ # Fetch the HTML content of the page
93
+ response = requests.get(url, headers=headers)
94
+ response.raise_for_status() # Raise an exception for HTTP errors
95
+
96
+ # Parse the HTML using BeautifulSoup
97
+ soup = BeautifulSoup(response.text, 'html.parser')
98
+
99
+ # Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"`
100
+ script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
101
+ if script_tag:
102
+ # Load the JSON content from the <script> tag
103
+ embedded_data = json.loads(script_tag.string)
104
+
105
+ # Navigate to the "blob" > "rawLines" key for the JSON in the file
106
+ raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
107
+ if raw_lines:
108
+ # The JSON content is in the first element of the rawLines list
109
+ json_content = raw_lines[0]
110
+
111
+ # Parse the JSON content
112
+ data = json.loads(json_content)
113
+
114
+ # Return the extracted JSON data
115
+ return {"success": True, "data": data}
116
+ else:
117
+ return {"success": False, "message": "JSON data not found in the 'rawLines' key."}
118
+ else:
119
+ return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."}
120
+ except requests.exceptions.RequestException as e:
121
+ return {"success": False, "message": f"Error fetching data: {e}"}
122
+ except json.JSONDecodeError as je:
123
+ return {"success": False, "message": f"Error parsing JSON: {je}"}
124
+
125
+
db_signals.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import json
4
+
5
+ TOKEN = "_device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; _octo=GH1.1.1517954811.1753352111; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; ai_session=v+6N2XPmhlxugZWyisQ+ZD|1753646990500|1753646990500; saved_user_sessions=155741452%3ASnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; user_session=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; __Host-user_session_same_site=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; tz=Africa%2FTripoli; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; logged_in=yes; dotcom_user=omarnuwrar; _gh_sess=7FzMK5K8ffnmyyh5LKKv%2FOXiqZJR4qLXxTdaV66E844ZCPq5qw%2FClaVmXHNfu8oc61N461wsjEr7d8vhEwrs0N0X7ITUed9Zj01RnwHGT8mMRUn6oYSv94LpIh2FwmotPwp8jkSQkZ%2BotdEpdYtp3ZoJZKfiZOcpHBtT7g2VwIPgoW2Qx5RpnKNdI3Hq31C6IIPaSzAqqny7O7c6L8nWv1nfx%2FAbF4UFSo7UfW%2F9JLUYF5lVJ2kXdYoesKOL7c2KItGDTaZCwjYr9cHKlHWD4E9wLo22GjFveVKxrEz5dgIrNdAj8WxWXuY5Ou4eYmxaBn2ovIhvnFz8%2F6qLURX81YxLLZbymGERA3MaRzDDzY3yE76U8y8lLPve0Duqc0lr34R3XUiMKE5A3%2FNPQ273e36yNlLsgBGDyuYIEcsQ84XLq2IQygBxX4y%2B6WSPwXAgOku6MiEP8Ro9ihF6scOhbJRrVCPp0toSY3RmJToUy6XRmBF2B0oyJstKbqLPfmZI8p%2B2bQo8DBKARHWWUzTJdjF%2BfgZtm%2Flb3qijcKT5I6SPU%2BiLMH%2Fl2GwHw73d1OFGUNy4tdLT5SO5vCFrf1GIiV7qUhhQdA21dXsAeQ4qoF5LHiGftyhOUBHto3ZZB%2FJ87uqACflXOfbbTCQCAYNa2u4o8I9iKQp9r2ripVxqQF1oyVu12FSIN%2BS%2Fd4Rm%2FN7E1tOw3tcVgYcsFEcbsOViUZBXXmo1Qfd9H%2B4IGnbv3hZe%2FPeJqb33SxWeQpamEWhLjVJL2hMCbZ8v79azeUL93QzkLXuryStKTXOdoyrbD2n93V36z5Sxhzi9Ku6OxVK1PCZW0R7JiYtQOWoeMAMd4oe3Bqrxyc%2BdAdb0sW3L%2FOD8J2nbvJ5gGA%3D%3D--Ngvrt5zzlDZazWNi--k%2F8wjhX57aMmLOJc8i6L7w%3D%3D"
6
+ # Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page
7
+ def fetch_authenticity_token_and_commit_oid():
8
+ url = "https://github.com/omarnuwrar/Trading/edit/main/signals.json"
9
+
10
+ headers = {
11
+ "cookie": TOKEN,
12
+ "if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"',
13
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
14
+ "x-github-target": "dotcom",
15
+ "x-react-router": "json",
16
+ "x-requested-with": "XMLHttpRequest",
17
+ }
18
+
19
+ response = requests.get(url, headers=headers)
20
+
21
+ if response.status_code == 200:
22
+ soup = BeautifulSoup(response.text, 'html.parser')
23
+ script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
24
+
25
+ if script_tag:
26
+ try:
27
+ json_data = json.loads(script_tag.string.strip())
28
+ authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/signals.json"]["post"]
29
+ commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
30
+ return authenticity_token, commit_oid
31
+ except (KeyError, json.JSONDecodeError) as e:
32
+ print(f"Error: Failed to extract data. Details: {str(e)}")
33
+ return None, None
34
+ else:
35
+ print("Error: Could not find the required <script> tag.")
36
+ return None, None
37
+ else:
38
+ print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
39
+ return None, None
40
+
41
+ # Step 2: Send the POST request to update the signals.json file
42
+ def update_user_json_file(authenticity_token, commit_oid, new_content):
43
+ url = "https://github.com/omarnuwrar/Trading/tree-save/main/signals.json"
44
+
45
+ headers = {
46
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
47
+ "x-requested-with": "XMLHttpRequest",
48
+ "github-verified-fetch": "true",
49
+ "content-type": "application/x-www-form-urlencoded",
50
+ "cookie": TOKEN,
51
+ }
52
+
53
+ payload = {
54
+ "message": "Update signals.json",
55
+ "placeholder_message": "Update signals.json",
56
+ "description": "",
57
+ "commit-choice": "direct",
58
+ "target_branch": "main",
59
+ "quick_pull": "",
60
+ "guidance_task": "",
61
+ "commit": commit_oid,
62
+ "same_repo": "1",
63
+ "pr": "",
64
+ "content_changed": "true",
65
+ "filename": "signals.json",
66
+ "new_filename": "signals.json",
67
+ "value": new_content,
68
+ "authenticity_token": authenticity_token,
69
+ }
70
+
71
+ response = requests.post(url, headers=headers, data=payload)
72
+
73
+ if response.status_code == 200:
74
+ return {"success": True, "message": "signals.json has been updated!"}
75
+ else:
76
+ return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
77
+
78
+
79
+
80
+ # Function to fetch and extract the JSON data
81
+ def fetch_json_from_github():
82
+ # URL of the GitHub page
83
+ url = "https://github.com/omarnuwrar/Trading/blob/main/signals.json"
84
+
85
+ # Custom headers
86
+ headers = {
87
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
88
+ "Cookie": TOKEN
89
+ }
90
+
91
+ try:
92
+ # Fetch the HTML content of the page
93
+ response = requests.get(url, headers=headers)
94
+ response.raise_for_status() # Raise an exception for HTTP errors
95
+
96
+ # Parse the HTML using BeautifulSoup
97
+ soup = BeautifulSoup(response.text, 'html.parser')
98
+
99
+ # Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"`
100
+ script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
101
+ if script_tag:
102
+ # Load the JSON content from the <script> tag
103
+ embedded_data = json.loads(script_tag.string)
104
+
105
+ # Navigate to the "blob" > "rawLines" key for the JSON in the file
106
+ raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
107
+ if raw_lines:
108
+ # The JSON content is in the first element of the rawLines list
109
+ json_content = raw_lines[0]
110
+
111
+ # Parse the JSON content
112
+ data = json.loads(json_content)
113
+
114
+ # Return the extracted JSON data
115
+ return {"success": True, "data": data}
116
+ else:
117
+ return {"success": False, "message": "JSON data not found in the 'rawLines' key."}
118
+ else:
119
+ return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."}
120
+ except requests.exceptions.RequestException as e:
121
+ return {"success": False, "message": f"Error fetching data: {e}"}
122
+ except json.JSONDecodeError as je:
123
+ return {"success": False, "message": f"Error parsing JSON: {je}"}
124
+
125
+
db_system.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ import json
4
+
5
+ TOKEN = "_device_id=0038e28d4f7d4f9baf8f76b6b9fb8980; GHCC=Required:1-Analytics:1-SocialMedia:1-Advertising:1; MicrosoftApplicationsTelemetryDeviceId=c58113b4-9acb-4ba8-b9f2-4217bdef379a; MSFPC=GUID=79b87b010d464a8783fbf43e19eccddf&HASH=79b8&LV=202408&V=4&LU=1723654762596; _octo=GH1.1.1517954811.1753352111; cpu_bucket=lg; preferred_color_mode=dark; tz=Africa%2FTripoli; ai_session=v+6N2XPmhlxugZWyisQ+ZD|1753646990500|1753646990500; saved_user_sessions=155741452%3ASnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; user_session=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; __Host-user_session_same_site=SnvRnrrf0nAjVGrTz3q28Oda2y6wCt6rCADoDwrCx2M6pORt; tz=Africa%2FTripoli; color_mode=%7B%22color_mode%22%3A%22auto%22%2C%22light_theme%22%3A%7B%22name%22%3A%22light%22%2C%22color_mode%22%3A%22light%22%7D%2C%22dark_theme%22%3A%7B%22name%22%3A%22dark%22%2C%22color_mode%22%3A%22dark%22%7D%7D; logged_in=yes; dotcom_user=omarnuwrar; _gh_sess=7FzMK5K8ffnmyyh5LKKv%2FOXiqZJR4qLXxTdaV66E844ZCPq5qw%2FClaVmXHNfu8oc61N461wsjEr7d8vhEwrs0N0X7ITUed9Zj01RnwHGT8mMRUn6oYSv94LpIh2FwmotPwp8jkSQkZ%2BotdEpdYtp3ZoJZKfiZOcpHBtT7g2VwIPgoW2Qx5RpnKNdI3Hq31C6IIPaSzAqqny7O7c6L8nWv1nfx%2FAbF4UFSo7UfW%2F9JLUYF5lVJ2kXdYoesKOL7c2KItGDTaZCwjYr9cHKlHWD4E9wLo22GjFveVKxrEz5dgIrNdAj8WxWXuY5Ou4eYmxaBn2ovIhvnFz8%2F6qLURX81YxLLZbymGERA3MaRzDDzY3yE76U8y8lLPve0Duqc0lr34R3XUiMKE5A3%2FNPQ273e36yNlLsgBGDyuYIEcsQ84XLq2IQygBxX4y%2B6WSPwXAgOku6MiEP8Ro9ihF6scOhbJRrVCPp0toSY3RmJToUy6XRmBF2B0oyJstKbqLPfmZI8p%2B2bQo8DBKARHWWUzTJdjF%2BfgZtm%2Flb3qijcKT5I6SPU%2BiLMH%2Fl2GwHw73d1OFGUNy4tdLT5SO5vCFrf1GIiV7qUhhQdA21dXsAeQ4qoF5LHiGftyhOUBHto3ZZB%2FJ87uqACflXOfbbTCQCAYNa2u4o8I9iKQp9r2ripVxqQF1oyVu12FSIN%2BS%2Fd4Rm%2FN7E1tOw3tcVgYcsFEcbsOViUZBXXmo1Qfd9H%2B4IGnbv3hZe%2FPeJqb33SxWeQpamEWhLjVJL2hMCbZ8v79azeUL93QzkLXuryStKTXOdoyrbD2n93V36z5Sxhzi9Ku6OxVK1PCZW0R7JiYtQOWoeMAMd4oe3Bqrxyc%2BdAdb0sW3L%2FOD8J2nbvJ5gGA%3D%3D--Ngvrt5zzlDZazWNi--k%2F8wjhX57aMmLOJc8i6L7w%3D%3D"
6
+ # Step 1: Fetch the authenticity_token and commitOid from the GitHub edit page
7
+ def fetch_authenticity_token_and_commit_oid():
8
+ url = "https://github.com/omarnuwrar/Trading/edit/main/news.json"
9
+
10
+ headers = {
11
+ "cookie": TOKEN,
12
+ "if-none-match": 'W/"2ff86bd1792cfee5ed79ee070b3b46de"',
13
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
14
+ "x-github-target": "dotcom",
15
+ "x-react-router": "json",
16
+ "x-requested-with": "XMLHttpRequest",
17
+ }
18
+
19
+ response = requests.get(url, headers=headers)
20
+
21
+ if response.status_code == 200:
22
+ soup = BeautifulSoup(response.text, 'html.parser')
23
+ script_tag = soup.find("script", {"type": "application/json", "data-target": "react-app.embeddedData"})
24
+
25
+ if script_tag:
26
+ try:
27
+ json_data = json.loads(script_tag.string.strip())
28
+ authenticity_token = json_data["payload"]["csrf_tokens"]["/omarnuwrar/Trading/tree-save/main/news.json"]["post"]
29
+ commit_oid = json_data["payload"]["webCommitInfo"]["commitOid"]
30
+ return authenticity_token, commit_oid
31
+ except (KeyError, json.JSONDecodeError) as e:
32
+ print(f"Error: Failed to extract data. Details: {str(e)}")
33
+ return None, None
34
+ else:
35
+ print("Error: Could not find the required <script> tag.")
36
+ return None, None
37
+ else:
38
+ print(f"Error: Failed to fetch the page. Status code: {response.status_code}")
39
+ return None, None
40
+
41
+ # Step 2: Send the POST request to update the news.json file
42
+ def update_user_json_file(authenticity_token, commit_oid, new_content):
43
+ url = "https://github.com/omarnuwrar/Trading/tree-save/main/news.json"
44
+
45
+ headers = {
46
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
47
+ "x-requested-with": "XMLHttpRequest",
48
+ "github-verified-fetch": "true",
49
+ "content-type": "application/x-www-form-urlencoded",
50
+ "cookie": TOKEN,
51
+ }
52
+
53
+ payload = {
54
+ "message": "Update news.json",
55
+ "placeholder_message": "Update news.json",
56
+ "description": "",
57
+ "commit-choice": "direct",
58
+ "target_branch": "main",
59
+ "quick_pull": "",
60
+ "guidance_task": "",
61
+ "commit": commit_oid,
62
+ "same_repo": "1",
63
+ "pr": "",
64
+ "content_changed": "true",
65
+ "filename": "news.json",
66
+ "new_filename": "news.json",
67
+ "value": new_content,
68
+ "authenticity_token": authenticity_token,
69
+ }
70
+
71
+ response = requests.post(url, headers=headers, data=payload)
72
+
73
+ if response.status_code == 200:
74
+ return {"success": True, "message": "news.json has been updated!"}
75
+ else:
76
+ return {"success": False, "message": f"Request failed with status code {response.status_code}", "details": response.text}
77
+
78
+
79
+
80
+ # Function to fetch and extract the JSON data
81
+ def fetch_json_from_github():
82
+ # URL of the GitHub page
83
+ url = "https://github.com/omarnuwrar/Trading/blob/main/news.json"
84
+
85
+ # Custom headers
86
+ headers = {
87
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
88
+ "Cookie": TOKEN
89
+ }
90
+
91
+ try:
92
+ # Fetch the HTML content of the page
93
+ response = requests.get(url, headers=headers)
94
+ response.raise_for_status() # Raise an exception for HTTP errors
95
+
96
+ # Parse the HTML using BeautifulSoup
97
+ soup = BeautifulSoup(response.text, 'html.parser')
98
+
99
+ # Find the <script> tag with type="application/json" and `data-target="react-app.embeddedData"`
100
+ script_tag = soup.find('script', {'type': 'application/json', 'data-target': 'react-app.embeddedData'})
101
+ if script_tag:
102
+ # Load the JSON content from the <script> tag
103
+ embedded_data = json.loads(script_tag.string)
104
+
105
+ # Navigate to the "blob" > "rawLines" key for the JSON in the file
106
+ raw_lines = embedded_data.get("payload", {}).get("blob", {}).get("rawLines", [])
107
+ if raw_lines:
108
+ # The JSON content is in the first element of the rawLines list
109
+ json_content = raw_lines[0]
110
+
111
+ # Parse the JSON content
112
+ data = json.loads(json_content)
113
+
114
+ # Return the extracted JSON data
115
+ return {"success": True, "data": data}
116
+ else:
117
+ return {"success": False, "message": "JSON data not found in the 'rawLines' key."}
118
+ else:
119
+ return {"success": False, "message": "Could not find the <script> tag with embedded JSON data."}
120
+ except requests.exceptions.RequestException as e:
121
+ return {"success": False, "message": f"Error fetching data: {e}"}
122
+ except json.JSONDecodeError as je:
123
+ return {"success": False, "message": f"Error parsing JSON: {je}"}
124
+
125
+