Spaces:
Paused
Paused
File size: 51,865 Bytes
250e784 43fd6f4 7d7d0d5 250e784 81851df 250e784 ca33266 250e784 ca33266 7d7d0d5 250e784 ca33266 250e784 ca33266 6ec6f28 ca33266 812217d ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 b040c7a ca33266 b040c7a ca33266 b040c7a ca33266 250e784 ca33266 250e784 b6f17eb 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 7d7d0d5 250e784 573350a b6f17eb 573350a ca33266 250e784 ca33266 7d7d0d5 ca33266 7d7d0d5 ca33266 250e784 ca33266 0fb8e13 43fd6f4 ca33266 0fb8e13 ca33266 7d7d0d5 ca33266 a88e60c ca33266 b040c7a ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 7d7d0d5 ca33266 7d7d0d5 0fb8e13 250e784 0fb8e13 bc3650c 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 ca33266 250e784 0fb8e13 ca33266 0fb8e13 ca33266 250e784 b6f17eb 0fb8e13 250e784 ca33266 250e784 0fb8e13 ca33266 0fb8e13 250e784 7d7d0d5 ca33266 250e784 2502c01 0fb8e13 7d7d0d5 ca33266 250e784 0fb8e13 250e784 0fb8e13 250e784 0fb8e13 ca33266 0fb8e13 0c0ee88 1bbd262 2a4d0b1 0fb8e13 ca33266 0fb8e13 b567a2b 2a4d0b1 0fb8e13 0c0ee88 2a4d0b1 b1bf3f6 2a4d0b1 15ea157 2a4d0b1 0fb8e13 250e784 ca33266 0fb8e13 0c0ee88 2d22105 2a4d0b1 b1bf3f6 2a4d0b1 15ea157 2a4d0b1 0fb8e13 b6f17eb ca33266 250e784 b6f17eb 250e784 b6f17eb ca33266 250e784 ca33266 250e784 ca33266 a88e60c ca33266 a88e60c f3c4a17 a88e60c ca33266 2f6e924 a88e60c ca33266 2f6e924 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 a88e60c ca33266 ca62cec a985035 43a7fb3 f5a6052 ca62cec f5a6052 a985035 ca62cec a985035 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 2f6e924 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 73e16c8 ca33266 250e784 ca33266 250e784 ca33266 250e784 b6f17eb 250e784 7d7d0d5 250e784 43fd6f4 250e784 ca33266 250e784 7d7d0d5 250e784 ca33266 250e784 ca33266 43fd6f4 250e784 73e16c8 ca33266 0fb8e13 573350a ca33266 573350a ca33266 0fb8e13 ca33266 0fb8e13 ca33266 0fb8e13 73e16c8 ca33266 73e16c8 ca33266 250e784 ca33266 250e784 ca33266 250e784 aeb1703 250e784 43fd6f4 250e784 aeb1703 7d7d0d5 250e784 ca33266 7d7d0d5 ca33266 7d7d0d5 250e784 b6f17eb 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 7d7d0d5 250e784 b6f17eb 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 ca33266 250e784 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 |
from flask import Flask, request, jsonify
import requests
import json
import time
import pytz
import logging
import threading
import re
from datetime import datetime, timezone
# Flask setup
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# External APIs
MESSAGE_API_URL = "https://aoamrnuwara.pythonanywhere.com/api/send-message"
MESSAGE_API_KEY = "Seakp0683asppoit"
AI_API_URL = "https://corvo-ai-xx-gpt-5.hf.space/chat"
ALERT_API_URL = "https://dooratre-alert.hf.space/monitor"
TS_POINTS_API = "https://dooratre-tracker.hf.space/ts_points"
# New Chart Pro API
CHART_API_BASE = "https://corvo-ai-chart-pro.hf.space"
# Track API for scenarios
SCENARIO_TRACK_API = "https://dooratre-tracker.hf.space/track"
# Retries
MAX_RETRIES = 5
RETRY_DELAY = 30
# DB modules
import db_system
import db_signals
import db_analysis
import get_price
# ======= Globals (place near other globals) =======
analysis_cancel_flags = {} # key: session_id, value: True/False
# Indicator name to TradingView PUB ID map (extend this as needed)
INDICATOR_MAP = {
"FIBO": "STD;Auto%251Fib%251Retracement%251", # fixed
"ADX": "PUB;932",
"RSI": "STD;Divergence%251Indicator", # fixed
"VWAP": "STD;VWAP",
"EMA": "PUB;WzGi7PQBB1HQofcRJ0mq6vxEpIlsHWvw",
"BRCH": "PUB;8c2d234156044effa75d531d82b247b3",
# Add more mappings here...
}
def get_session_id_from_request(req):
try:
data = req.get_json(silent=True) or {}
except Exception:
data = {}
sid = (
data.get("session_id")
or req.args.get("session_id")
or req.headers.get("X-Session-Id")
or "default"
)
return str(sid)
def mark_analysis_cancelled(session_id):
analysis_cancel_flags[session_id] = True
def is_analysis_cancelled(session_id):
return analysis_cancel_flags.get(session_id, False)
def clear_analysis_cancel(session_id):
if session_id in analysis_cancel_flags:
del analysis_cancel_flags[session_id]
def send_message_to_api(message, max_retries=5, retry_delay=10):
headers = {"Content-Type": "application/json", "X-API-Key": MESSAGE_API_KEY}
payload = {"message": message}
for attempt in range(1, max_retries + 1):
try:
response = requests.post(MESSAGE_API_URL, headers=headers, data=json.dumps(payload))
if response.status_code == 200:
logger.info(f"Message sent on attempt {attempt}")
return {"success": True, "response": response.json()}
else:
logger.warning(f"Attempt {attempt}: status {response.status_code}")
except requests.exceptions.RequestException as e:
logger.warning(f"Attempt {attempt}: request error: {e}")
if attempt < max_retries:
time.sleep(retry_delay)
logger.error("Max retries reached. Failed to send message.")
return {"success": False, "error": "Failed after multiple retries"}
def post_ts_points(tp_value, sl_value):
try:
payload = {"TP": str(tp_value), "SL": str(sl_value)}
r = requests.post(TS_POINTS_API, json=payload, timeout=20)
if r.status_code == 200:
return True, None
return False, f"status {r.status_code}"
except Exception as e:
return False, str(e)
def get_time_zones():
zones = {
"Greenwich": "UTC",
"London": "Europe/London",
"New York": "America/New_York",
"Tokyo": "Asia/Tokyo",
"Sydney": "Australia/Sydney"
}
times = {}
for city, zone in zones.items():
tz = pytz.timezone(zone)
current_time = datetime.now(tz)
times[city] = current_time.strftime("%Y-%m-%d %H:%M:%S %Z")
return times
def build_signal_timestamps():
tz_times = get_time_zones()
iso_utc = datetime.now(timezone.utc).isoformat()
return {"zones": tz_times, "iso_utc": iso_utc}
def get_live_prices_for_pairs():
pairs = ["XAUUSD"]
prices = {}
for p in pairs:
try:
data = get_price.get_live_rates_for_pair(p)
if data:
prices[p] = {
"bid": data.get("bid", "N/A"),
"ask": data.get("ask", "N/A"),
"difference": data.get("difference", "N/A")
}
else:
prices[p] = {"bid": "N/A", "ask": "N/A", "difference": "N/A"}
except Exception as e:
logger.warning(f"Price fetch failed for {p}: {e}")
prices[p] = {"bid": "N/A", "ask": "N/A", "difference": "N/A"}
return prices
def format_live_prices_text(prices_dict):
# Short, one-line per pair for attaching after image/user nudges
lines = []
for pair, obj in prices_dict.items():
bid = obj.get("bid", "N/A")
ask = obj.get("ask", "N/A")
diff = obj.get("difference", "N/A")
lines.append(f"{pair}: bid {bid}, ask {ask}, Δ {diff}")
return "\n".join(lines)
def to_float_safe(val):
try:
return float(str(val).strip())
except Exception:
return None
def parse_alert_block(alert_xml: str):
dur_match = re.search(r'<duration_min>(.*?)</duration_min>', alert_xml, re.DOTALL)
if not dur_match:
raise ValueError("duration_min missing in <Alert>")
try:
duration_minutes = int(str(dur_match.group(1)).strip())
if duration_minutes <= 0:
raise ValueError
except Exception:
raise ValueError("duration_min must be positive integer")
price_blocks = re.findall(r'<price>(.*?)</price>', alert_xml, re.DOTALL)
if not price_blocks:
raise ValueError("At least one <price>...</price> block is required")
price_messages = []
for block in price_blocks:
msg_match = re.search(r'<message>(.*?)</message>', block, re.DOTALL)
message = ""
if msg_match:
message = msg_match.group(1).strip()
price_text = re.sub(r'<message>.*?</message>', '', block, flags=re.DOTALL).strip()
price_val = to_float_safe(price_text)
if price_val is None:
raise ValueError(f"Invalid price value in <price> block: '{price_text}'")
price_messages.append({"price": price_val, "message": message})
return {"duration": duration_minutes, "price_messages": price_messages}
def build_monitor_payload_from_alert(parsed_alert: dict, symbol="XAUUSD"):
payload = {
"symbol": symbol,
"duration_minutes": parsed_alert["duration"],
"price_messages": parsed_alert["price_messages"]
}
return payload
def pair_number(val):
cleaned = re.sub(r'[^0-9\.\-]', '', val)
if cleaned in ('', '-', '.'):
return val.strip()
try:
num = float(cleaned)
s = f"{num:.2f}"
return s.rstrip('0').rstrip('.') if '.' in s else s
except:
return val.strip()
def count_xml_tags(ai_response):
counts = {
'scenario': len(re.findall(r'<scenario>', ai_response)),
'user_messages': len(re.findall(r'<send_group>', ai_response)),
'alerts': len(re.findall(r'<Alert>', ai_response)),
'edits': len(re.findall(r'<Edit>', ai_response)),
'final': len(re.findall(r'<final>', ai_response))
}
return counts
def save_latest_final_analysis(final_text):
"""
Save ONLY the latest final analysis to db_analysis as an array: [ { ... } ]
"""
try:
record = {
"timestamp_utc": datetime.now(timezone.utc).isoformat(),
"response": final_text
}
# Wrap as array for saving
payload_list = [record]
payload_text = json.dumps(payload_list, ensure_ascii=False)
auth_token, commit_oid = db_analysis.fetch_authenticity_token_and_commit_oid()
if auth_token and commit_oid:
result = db_analysis.update_user_json_file(auth_token, commit_oid, payload_text)
return result.get("success", False)
logger.error("Failed to fetch auth or commit OID for final analysis saving.")
return False
except Exception as e:
logger.error(f"Error saving final analysis: {e}")
return False
def get_chart_screenshot(symbol="XAUUSD", exchange="OANDA", interval="15m", indicators=None, width=1920, height=1080, full_page=False):
if indicators is None:
indicators = [INDICATOR_MAP["EMA"]] # default
payload = {
"symbol": symbol,
"exchange": exchange,
"interval": interval,
"indicators": indicators,
"theme": "dark",
"width": "3840",
"height": "2160",
"fullPage": full_page
}
url = f"{CHART_API_BASE}/api/screenshot"
resp = requests.post(url, json=payload, timeout=90)
resp.raise_for_status()
return resp.json()
def load_system_prompt_from_files(has_active_signal: bool, has_scenario: bool):
"""
Returns system prompt string based on current state:
- If has_active_signal: use prompt_signal.txt
- Else (no active signal): use prompt_scenario.txt
If files are missing, fall back to a minimal default in Arabic.
"""
prompt_file = "prompt_signal.txt" if has_active_signal else "prompt_scenario.txt"
try:
with open(prompt_file, "r", encoding="utf-8") as f:
text = f.read().strip()
if text:
return text
except Exception as e:
logger.warning(f"Failed to load system prompt from {prompt_file}: {e}")
# Fallbacks
if has_active_signal:
return "وضع متابعة الصفقة: لا تنشئ سيناريو جديد. حلّل الصفقة الحالية فقط ويمكنك استخدام <Edit> و<send_group> و<Alert>."
else:
return "وضع بناء السيناريو: حلّل وأنشئ سيناريو داخل <final> يتضمن <scenario> مع Buy/Sell و(@/SL/TP)."
def fetch_signals_raw():
"""
Returns:
{
"has_active_signal": bool,
"active_signal": list or None, # when active, it's a list with 1 object (normalized)
"has_scenario": bool,
"scenario": dict or None,
"raw": original
}
Accepts both legacy object and new array shapes, but normalizes in-memory to arrays when needed.
"""
out = {
"has_active_signal": False,
"active_signal": None,
"has_scenario": False,
"scenario": None,
"raw": None
}
try:
res = db_signals.fetch_json_from_github()
if res["success"] and res["data"]:
raw = res["data"][0]
out["raw"] = raw
# If array and first element has pair/type => active signal
if isinstance(raw, list) and raw and isinstance(raw[0], dict) and "pair" in raw[0] and "type" in raw[0]:
out["has_active_signal"] = True
out["active_signal"] = raw
# If object with "scenario" => scenario mode
elif isinstance(raw, dict) and "scenario" in raw:
out["has_scenario"] = True
out["scenario"] = raw["scenario"]
# Legacy: single signal object (not array) => treat as active signal
elif isinstance(raw, dict) and "pair" in raw and "type" in raw:
out["has_active_signal"] = True
out["active_signal"] = [raw]
except Exception as e:
logger.error(f"Error fetching signals/scenario: {e}")
return out
def save_scenario_object(scenario_obj):
"""
Save scenario to db_signals as an array: [ { "scenario": {...} } ]
"""
try:
payload_list = [{"scenario": scenario_obj}]
payload_text = json.dumps(payload_list, ensure_ascii=False)
auth_token, commit_oid = db_signals.fetch_authenticity_token_and_commit_oid()
if auth_token and commit_oid:
result = db_signals.update_user_json_file(auth_token, commit_oid, payload_text)
return result.get("success", False)
return False
except Exception as e:
logger.error(f"Error saving scenario: {e}")
return False
def post_scenario_to_tracker(buy_at, sell_at):
try:
payload = {"Buy": buy_at, "Sell": sell_at}
r = requests.post(SCENARIO_TRACK_API, json=payload, timeout=20)
if r.status_code == 200:
return True, None
return False, f"status {r.status_code}"
except Exception as e:
return False, str(e)
def build_initial_chat_history(alert_message=None):
chat_history = []
# Determine current state (active signal vs scenario/no state)
try:
state = fetch_signals_raw()
except Exception as e:
logger.error(f"Error determining state for system prompt: {e}")
state = {"has_active_signal": False, "has_scenario": False}
has_active = state.get("has_active_signal", False)
has_scen = state.get("has_scenario", False)
# Load system prompt from files based on state
try:
system_base_prompt = load_system_prompt_from_files(has_active, has_scen)
except Exception as e:
logger.error(f"Error loading system prompt: {e}")
system_base_prompt = "ابدأ التحليل وفق حالتك (صفقة نشطة أو سيناريو)."
# Fetch news summary from db_system and name it 'news'
news = ""
try:
system_data = db_system.fetch_json_from_github()
if system_data["success"] and system_data["data"]:
news = system_data["data"][0].get("response", "") or ""
except Exception as e:
logger.error(f"Error fetching news from db_system: {e}")
news = ""
# Build system turn (system prompt + time zones + news)
try:
times = get_time_zones()
time_info = "\n".join([f"{city}: {time}" for city, time in times.items()])
parts = [system_base_prompt, f"[Time Zones]\n{time_info}"]
if news.strip():
parts.append(f"[News]\n{news.strip()}")
system_full = "\n\n".join(parts)
chat_history.append({
"role": "system",
"content": system_full
})
except Exception as e:
logger.error(f"Error building system turn: {e}")
chat_history.append({
"role": "system",
"content": system_base_prompt
})
multipart_content = []
# Previous analysis (optional) - Read from db_analysis; supports array and legacy object
try:
analysis_data = db_analysis.fetch_json_from_github()
prev_text = ""
if analysis_data["success"] and analysis_data["data"]:
raw_obj = analysis_data["data"][0]
if isinstance(raw_obj, list) and raw_obj:
raw_text = raw_obj[-1].get("response", "")
elif isinstance(raw_obj, dict):
raw_text = raw_obj.get("response", "")
else:
raw_text = ""
prev_text = str(raw_text)[:1500]
if prev_text:
multipart_content.append({"type": "text", "text": f"LAST ANALYSIS HAPPEN :\n{prev_text}"})
except Exception as e:
logger.error(f"Error fetching previous analysis: {e}")
# Alert + current context (active signal or scenario or none)
try:
times = get_time_zones()
time_info = "\n".join([f"{city}: {time}" for city, time in times.items()])
prices_text = format_live_prices_text(get_live_prices_for_pairs())
message_content = ""
if alert_message:
message_content += f" ALERT MESSAGE: {alert_message}\n\n"
else:
message_content += "NO Any Message from ALERT\n\n"
if has_active:
sig = state["active_signal"][0]
message_content += (
"The user is currently in an active trade (one of the scenarios has been triggered):\n"
f"- Pair: {sig.get('pair','N/A')}\n"
f"- Type: {sig.get('type','N/A')}\n"
f"- Entry: {sig.get('entry','N/A')}\n"
f"- Stop Loss: {sig.get('stop_loss','N/A')}\n"
f"- Take Profit: {sig.get('take_profit','N/A')}\n"
"Important Instructions:\n"
"- Provide only ONE <img> at a time in the format:\n"
" <img><XAUUSD><id_for_IND><id_for_IND><id_for_IND>// you can use maxumum 3 indecators in same image<timeframe></img>\nExample <img><XAUUSD><XXX><XXX><XXX><15m></img>"
"- Use <final> only if you think the conversation already has enough information to conclude.\n\nYou need put 5 images i this conversation start with first one"
'''
Remeber :
<final>
<Analysis>here put your summrasion of the chat analysis</Analysis>
<wait>this is just using when you are Acually think no need to change anything in this trade</wait>
📊 Dynamic Trade Editing Feature
<Edit><stop_lose>...</stop_lose><take_profit>...</take_profit></Edit>
<send_group>what you put here gonna send to the tele group and it is not optional you need to use it every time you use final , to make the group in clear(need to be in arabic becasue all of members Arabic)</send_group>
<Alert>
<price>3...<message>here.......</message></price> //put more than 1 price as you want for next analysis , for messages put the saved messages in the prices for Auto send to user just if the price hit as + 50 pips ....etc of messages (all in arabic)
..... may more than one price
<duration_min>10(EXP)</duration_min> // that using for if the price still in same place and don't moving alot so the duration if end gonna back to you
</Alert>
</final>
'''
f"Current Time:\n{time_info}\n\n"
f"Live Prices:\n{prices_text}"
)
elif has_scen:
sc = state["scenario"]
buy = sc.get("Buy", {})
sell = sc.get("Sell", {})
message_content += (
"There is a previously saved scenario that hasn’t been triggered yet. Creating a new scenario will replace the old one:\n"
f"- Buy: @={buy.get('at','N/A')}, SL={buy.get('SL','N/A')}, TP={buy.get('TP','N/A')}\n"
f"- Sell: @={sell.get('at','N/A')}, SL={sell.get('SL','N/A')}, TP={sell.get('TP','N/A')}\n\n"
"Your Goal now Update scenarios or not that is it \n the Users NOT in any trade Now so we will wait your scenario"
"Continue analyzing. If you want to update the scenario, send a <final> with a new <scenario> to replace it. If no new scenario is created, we will wait for one of the scenarios to be triggered.\n\n"
"Important Instructions:\n"
"- Provide only ONE <img> at a time:\n"
" <img><XAUUSD><id_for_IND><id_for_IND><id_for_IND>// you can use maxumum 3 indecators in same image<timeframe></img>\nExample <img><XAUUSD><XXX><XXX><XXX><15m></img>"
"- Use <final> only if you believe there is enough information in the conversation.\n\nYou need put 5 images i this conversation start with first one\n\n"
'''
<final>
<Analysis>here put your summrasion of the chat analysis</Analysis>
<wait>this is just using when you are Acually added a scenario and you don't want to change it, put here the reason about why you don't want to change the scenario</wait>
<scenario>
<Buy><@>...</@><SL>...</SL><TP>...</TP></Buy> // make sure just put price number no text
<Sell><@>...</@><SL>...</SL><TP>...</TP></Sell> // make sure just put price number no text
</scenario>
<send_group>what you put here gonna send to the tele group and it is not optional you need to use it every time you use final , to make the group in clear(need to be in arabic becasue all of members Arabic)</send_group>
<Alert>
<price>3...<message>here.......</message></price> //put more than 1 price as you want for next analysis , for messages put the saved messages in the prices for Auto send for example prices for hit start scenarios etc... (all in arabic)
..... may more than one price
<duration_min>10</duration_min> // that using for if the price still in same place and don't moving alot so the duration if end gonna back to you
</Alert>
</final>
'''
f"Current Time:\n{time_info}\n\n"
f"Live Prices:\n{prices_text}"
)
else:
message_content += (
"No scenario or active trade exists (first run). Please analyze and create the first scenario within <final> when done.\n\n"
"Important Instructions:\n"
"- Provide only ONE <img> at a time:\n"
" <img><XAUUSD><id_for_IND><id_for_IND><id_for_IND>// you can use maxumum 3 indecators in same image<timeframe></img>\nExample <img><XAUUSD><XXX><XXX><XXX><15m></img>"
"- make SL from 3$ to 5$ and TP from 7$ to 10$\n\n You need put 5 images i this conversation start with first one\n\n"
'''
<final>
<Analysis>here put your summrasion of the chat analysis</Analysis>
<wait>this is just using when you are Acually added a scenario and you don't want to change it, put here the reason about why you don't want to change the scenario</wait>
<scenario>
<Buy><@>...</@><SL>...</SL><TP>...</TP></Buy> // make sure just put price number no text
<Sell><@>...</@><SL>...</SL><TP>...</TP></Sell> // make sure just put price number no text
</scenario>
<send_group>what you put here gonna send to the tele group and it is not optional you need to use it every time you use final , to make the group in clear(need to be in arabic becasue all of members Arabic)</send_group>
<Alert>
<price>3...<message>here.......</message></price> //put more than 1 price as you want for next analysis , for messages put the saved messages in the prices for Auto send for example prices for hit start scenarios etc... (all in arabic)
..... may more than one price
<duration_min>10</duration_min> // that using for if the price still in same place and don't moving alot so the duration if end gonna back to you
</Alert>
</final>
'''
f"Current Time:\n{time_info}\n\n"
f"Live Prices:\n{prices_text}"
)
multipart_content.append({"type": "text", "text": message_content})
except Exception as e:
logger.error(f"Error building initial user content: {e}")
if multipart_content:
chat_history.append({
"role": "user",
"type": "multipart",
"content": multipart_content
})
else:
chat_history.append({
"role": "user",
"content": "No additional context available."
})
return chat_history
def call_o1_ai_api(formatted_chat_history, timeout=600):
headers = {"Content-Type": "application/json"}
payload = {"chat_history": formatted_chat_history}
for attempt in range(MAX_RETRIES):
try:
response = requests.post(AI_API_URL, headers=headers, data=json.dumps(payload), timeout=timeout)
response.raise_for_status()
assistant_response = response.json().get("assistant_response", "No response received.")
formatted_chat_history.append({"role": "assistant", "content": assistant_response})
return assistant_response, formatted_chat_history
except requests.exceptions.Timeout:
logger.warning(f"AI timeout attempt {attempt+1}, retrying...")
time.sleep(RETRY_DELAY)
except Exception as e:
logger.warning(f"AI error attempt {attempt+1}: {e}, retrying...")
time.sleep(RETRY_DELAY)
return "Error processing request. Please try again.", formatted_chat_history
def parse_img_request(ai_text):
m = re.search(r'<img>([\s\S]*?)</img>', ai_text, re.IGNORECASE)
if not m:
return None
inner = m.group(1)
tokens = re.findall(r'<\s*([^<>]+?)\s*>', inner)
if not tokens:
return None
symbol = None
interval = None
indicators = []
# helpers
def is_timeframe(tok):
t = tok.strip().lower()
if t in ("d","w"):
return True
return bool(re.fullmatch(r'\d+[mh]', t)) # 1m,5m,15m,1h,4h
def normalize_timeframe(tok):
low = tok.strip().lower()
return low.upper() if low in ("d","w") else low
def is_symbol(tok):
return bool(re.fullmatch(r'[A-Z0-9_]{3,15}', tok.strip()))
# Pass 1: determine symbol as the FIRST token that looks like a symbol
for tok in tokens:
t = tok.strip()
if is_symbol(t):
symbol = t
break
# Default if none provided
if not symbol:
symbol = "XAUUSD"
# Pass 2: pick timeframe (first valid)
for tok in tokens:
t = tok.strip()
if is_timeframe(t):
interval = normalize_timeframe(t)
break
if not interval:
interval = "15m"
# Pass 3: indicators = tokens that are KNOWN in INDICATOR_MAP keys
known_inds = set(INDICATOR_MAP.keys())
for tok in tokens:
t = tok.strip()
# Skip if token is symbol or timeframe
if t == symbol or is_timeframe(t):
continue
# Only accept if token is a known indicator key
if t in known_inds:
indicators.append(t)
else:
logger.warning(f"Unknown token in <img>: '{t}' (ignored)")
# At least one indicator: if none valid, fallback to EMA
if not indicators:
indicators = ["EMA"]
return {"symbol": symbol, "interval": interval, "indicators": indicators}
def indicators_to_pub_ids(indicator_names):
ids = []
for name in indicator_names:
key = name.strip()
if key in INDICATOR_MAP:
ids.append(INDICATOR_MAP[key])
else:
logger.warning(f"Unknown indicator name '{key}', skipping.")
if not ids:
ids = [INDICATOR_MAP.get("EMA")]
# de-duplicate while preserving order
seen = set()
out = []
for i in ids:
if i and i not in seen:
out.append(i)
seen.add(i)
return out
def build_image_reply_user_turn(png_url):
prices_text = format_live_prices_text(get_live_prices_for_pairs())
# Add current times for all major zones
tz_times = get_time_zones()
time_info = "\n".join([f"{city}: {time}" for city, time in tz_times.items()])
content = [
{"type": "image", "url": png_url},
{"type": "text", "text": (
"📊 Your chart is ready for analysis.\n\n"
"⚠️ First: Analyze the image I just sent you before asking for any new one.\n"
"- If you need more confirmation → request **only ONE <img>** at a time for the next chart.\n"
" Example: <img><XAUUSD><id_for_IND><id_for_IND><id_for_IND><timeframe></img>\n"
"- If you already have enough information → finish with <final>.\n\n"
"🚫 Do NOT request multiple images at once.\n"
"🚫 If you use <final>, don’t request another <img> after it.\n\n"
"Be smart with your analysis – choose indicators and timeframes like a pro. Now, go ahead with your analysis and tell me what’s image you need? or that enough ?."
"Don't forget to tell group the summary after end all 5 images analysis okay using <send_group> it is soo important just make sure in the <final>"
)},
{"type": "text", "text": f"⏰ Current Time:\n{time_info}"},
{"type": "text", "text": f"💰 Live Prices:\n{prices_text}"}
]
return {"role": "user", "type": "multipart", "content": content}
def extract_final_block(ai_text):
m = re.search(r'<final>([\s\S]*?)</final>', ai_text)
if not m:
return None
return m.group(0), m.group(1)
def parse_scenario_from_final(final_inner):
# Extract scenario block and return structured dict or None
scen_match = re.search(r'<scenario>([\s\S]*?)</scenario>', final_inner)
if not scen_match:
return None
scen_inner = scen_match.group(1)
# Buy
buy_block = re.search(r'<Buy>([\s\S]*?)</Buy>', scen_inner)
sell_block = re.search(r'<Sell>([\s\S]*?)</Sell>', scen_inner)
def parse_side(block_text):
if not block_text:
return None
at_match = re.search(r'<@>(.*?)</@>', block_text, re.DOTALL)
sl_match = re.search(r'<SL>(.*?)</SL>', block_text, re.DOTALL)
tp_match = re.search(r'<TP>(.*?)</TP>', block_text, re.DOTALL)
at = at_match.group(1).strip() if at_match else ""
sl = pair_number(sl_match.group(1).strip()) if sl_match else ""
tp = pair_number(tp_match.group(1).strip()) if tp_match else ""
return {"at": at, "SL": sl, "TP": tp}
buy = parse_side(buy_block.group(1) if buy_block else None)
sell = parse_side(sell_block.group(1) if sell_block else None)
if not buy and not sell:
return None
scenario_obj = {
"Buy": buy or {"at": "", "SL": "", "TP": ""},
"Sell": sell or {"at": "", "SL": "", "TP": ""},
"timestamps": build_signal_timestamps()
}
return scenario_obj
def parse_and_execute_final(final_xml_full, final_inner):
actions_performed = []
# Save latest final text
try:
ok = save_latest_final_analysis(final_xml_full)
if ok:
actions_performed.append("✅ تم حفظ التحليل النهائي (آخر واحد فقط)")
else:
actions_performed.append("❌ فشل في حفظ التحليل النهائي")
except Exception as e:
logger.error(f"Error saving final analysis: {e}")
actions_performed.append("❌ خطأ في حفظ التحليل النهائي")
# If there is an active signal in DB, we should not replace it with scenario.
state = fetch_signals_raw()
active_signal_present = state["has_active_signal"]
# Process <Edit> if present (for active signal only)
if active_signal_present:
edit_matches = re.finditer(r'<Edit>(.*?)</Edit>', final_inner, re.DOTALL)
for edit_match in edit_matches:
try:
edit_xml = edit_match.group(1)
edit_data = {}
sl_match = re.search(r'<stop_lose>(.*?)</stop_lose>', edit_xml) or re.search(r'<stop_loss>(.*?)</stop_loss>', edit_xml)
if sl_match:
edit_data["stop_loss"] = pair_number(sl_match.group(1).strip())
tp_match = re.search(r'<take_profit>(.*?)</take_profit>', edit_xml)
if tp_match:
edit_data["take_profit"] = pair_number(tp_match.group(1).strip())
result = edit_existing_signal(edit_data) if edit_data else {"success": False, "error": "No changes to apply"}
if result.get("success"):
# Build a detailed message reflecting exactly what changed
parts = []
if "take_profit" in edit_data:
parts.append(f"💰 تم تغيير الهدف إلى: {edit_data['take_profit']} 💰")
if "stop_loss" in edit_data:
parts.append(f"🛑 تم تغيير وقف الخسارة إلى: {edit_data['stop_loss']} 🛑")
change_text = "\n".join(parts) if parts else "تم التنفيذ دون تغييرات واضحة."
# TS Points status message
ts_info = result.get("ts_points", {})
if ts_info.get("success"):
ts_msg = "✅ تم إرسال القيم الجديدة إلى نظام TS Points"
else:
err = ts_info.get("error") or "غير معروف"
ts_msg = f"⚠️ فشل إرسال TS Points: {err}"
send_message_to_api(f"🔄 تم تحديث الصفقة المفتوحة (SL/TP).\n{change_text}\n{ts_msg}")
actions_performed.append(f"✅ تم تحديث الصفقة المفتوحة | {ts_msg}")
else:
actions_performed.append(f"⚠️ لم يتم تحديث الصفقة: {result.get('error')}")
except Exception as e:
logger.error(f"Error processing Edit: {e}")
actions_performed.append(f"❌ خطأ في معالجة Edit: {str(e)}")
# Process <scenario> only if no active signal
if not active_signal_present:
scenario_obj = parse_scenario_from_final(final_inner)
if scenario_obj:
# Save scenario
try:
ok = save_scenario_object(scenario_obj)
if ok:
actions_performed.append("✅ تم حفظ السيناريو (استبدال أي سيناريو سابق)")
else:
actions_performed.append("❌ فشل حفظ السيناريو")
except Exception as e:
logger.error(f"Error saving scenario: {e}")
actions_performed.append(f"❌ خطأ حفظ السيناريو: {str(e)}")
# Post to tracker
buy_at = (scenario_obj.get("Buy") or {}).get("at", "")
sell_at = (scenario_obj.get("Sell") or {}).get("at", "")
ok, err = post_scenario_to_tracker(buy_at, sell_at)
if ok:
actions_performed.append("✅ تم إشعار نظام التتبع بالسيناريو")
else:
actions_performed.append(f"⚠️ فشل إشعار نظام التتبع: {err}")
else:
actions_performed.append("ℹ️ لا يوجد <scenario> في التحليل النهائي أو غير صالح")
# Process <send_group>
user_msg_matches = re.finditer(r'<send_group>(.*?)</send_group>', final_inner, re.DOTALL)
for user_msg_match in user_msg_matches:
try:
user_message = user_msg_match.group(1).strip()
if user_message:
send_result = send_message_to_api(user_message)
if send_result["success"]:
actions_performed.append("✅ تم إرسال رسالة للمستخدم")
else:
actions_performed.append("❌ فشل في إرسال رسالة للمستخدم")
else:
actions_performed.append("⚠️ رسالة فارغة تم تجاهلها")
except Exception as e:
logger.error(f"Error sending user message: {e}")
actions_performed.append(f"❌ خطأ في إرسال الرسالة: {str(e)}")
# Process <Alert>
alert_matches = re.finditer(r'<Alert>(.*?)</Alert>', final_inner, re.DOTALL)
for alert_match in alert_matches:
try:
alert_xml = alert_match.group(1)
try:
parsed = parse_alert_block(alert_xml)
except ValueError as ve:
actions_performed.append(f"❌ Alert parse error: {str(ve)}")
continue
alert_payload = build_monitor_payload_from_alert(parsed, symbol="XAUUSD")
try:
response = requests.post(ALERT_API_URL, json=alert_payload, timeout=30)
if response.status_code == 200:
alert_message = (
"⏰ تم تعيين منبه جديد 🔔\n\n"
f"الرمز: {alert_payload.get('symbol', 'XAUUSD')}\n"
f"⏱️ المدة: {alert_payload['duration_minutes']} دقيقة\n"
"📊 سيتم إرسال تنبيه عند أول مستوى يتم عبوره."
)
send_result = send_message_to_api(alert_message)
if send_result["success"]:
actions_performed.append("✅ تم إنشاء منبه جديد وإرسال الإشعار")
else:
actions_performed.append("⚠️ تم إنشاء المنبه لكن فشل إرسال الإشعار")
else:
actions_performed.append(f"❌ فشل في إنشاء المنبه (كود: {response.status_code})")
except Exception as req_e:
actions_performed.append(f"❌ خطأ اتصال أثناء إنشاء المنبه: {str(req_e)}")
except Exception as e:
logger.error(f"Error creating alert: {e}")
actions_performed.append(f"❌ خطأ في إنشاء المنبه: {str(e)}")
return actions_performed
def edit_existing_signal(edit_data):
"""
Edit the active signal (assumed stored as an array with a single signal object) and save back as an array.
"""
try:
signals_data = db_signals.fetch_json_from_github()
if not (signals_data["success"] and signals_data["data"]):
return {"success": False, "error": "No active signal found to edit"}
raw = signals_data["data"][0]
# Expecting current storage shape to be an array; ensure we handle both array and object safely
if isinstance(raw, list) and raw and isinstance(raw[0], dict):
current_signal = raw[0]
container_is_list = True
elif isinstance(raw, dict):
# Legacy/object format, normalize to a single-element list
current_signal = raw
container_is_list = False
logger.warning("Signals DB returned an object; normalizing to array on save.")
else:
return {"success": False, "error": "No active signal found to edit"}
updates_made = []
if "stop_loss" in edit_data:
old_sl = current_signal.get("stop_loss", "N/A")
current_signal["stop_loss"] = edit_data["stop_loss"]
updates_made.append(f"stop_loss: {old_sl} → {edit_data['stop_loss']}")
if "take_profit" in edit_data:
old_tp = current_signal.get("take_profit", "N/A")
current_signal["take_profit"] = edit_data["take_profit"]
updates_made.append(f"take_profit: {old_tp} → {edit_data['take_profit']}")
if not updates_made:
return {"success": False, "error": "No changes to apply"}
auth_token, commit_oid = db_signals.fetch_authenticity_token_and_commit_oid()
if auth_token and commit_oid:
# Always save as array
updated_signal_list = [current_signal]
updated_json = json.dumps(updated_signal_list, ensure_ascii=False)
result = db_signals.update_user_json_file(auth_token, commit_oid, updated_json)
if result.get("success"):
# Push new TP/SL to TS Points system
tp_val = current_signal.get("take_profit", "")
sl_val = current_signal.get("stop_loss", "")
# convert to float safely if possible
def to_num(x):
try:
return float(str(x).strip())
except:
return None
tp_num = to_num(tp_val)
sl_num = to_num(sl_val)
ts_ok = None
ts_err = None
if tp_num is not None or sl_num is not None:
ts_ok, ts_err = post_ts_points(tp_num if tp_num is not None else "", sl_num if sl_num is not None else "")
if not ts_ok:
logger.warning(f"post_ts_points failed: {ts_err}")
return {"success": True, "updates": updates_made, "ts_points": {"success": bool(ts_ok), "error": ts_err}}
else:
return {"success": False, "error": "Failed to update signal"}
else:
return {"success": False, "error": "Failed to get auth tokens"}
except Exception as e:
logger.error(f"Error editing signal: {e}")
return {"success": False, "error": str(e)}
def get_current_active_tp_sl():
try:
signals_data = db_signals.fetch_json_from_github()
if signals_data["success"] and signals_data["data"]:
raw = signals_data["data"][0]
if isinstance(raw, list) and raw and isinstance(raw[0], dict):
tp = str(raw[0].get("take_profit", "")).strip()
sl = str(raw[0].get("stop_loss", "")).strip()
return {"TP": tp, "SL": sl, "found": True}
except Exception as e:
logger.error(f"Error fetching current TP/SL: {e}")
return {"TP": "", "SL": "", "found": False}
def run_multi_turn_analysis(chat_history, max_steps=10, session_id="default"):
"""
Multi-turn loop with cancellation support:
- Checks is_analysis_cancelled(session_id) at each step
- If cancelled, clears flag and returns immediately
- Otherwise continues as before
"""
steps = 0
last_ai_response = ""
while steps < max_steps:
# Cancellation check before each step
if is_analysis_cancelled(session_id):
clear_analysis_cancel(session_id)
return {
"success": True,
"message": "Analysis cancelled by stop_analysis",
"steps": steps,
"actions_performed": [],
"ai_response_preview": last_ai_response[:300]
}
steps += 1
ai_response, chat_history = call_o1_ai_api(chat_history)
last_ai_response = ai_response or ""
# If AI returned <final>, break and process
if re.search(r'<final>', last_ai_response):
final_full, final_inner = extract_final_block(last_ai_response)
if not final_full:
return {
"success": True,
"message": "AI final detected but malformed.",
"steps": steps,
"actions_performed": ["❌ final block malformed"],
"ai_response_preview": last_ai_response[:300]
}
# NEW: Print full chat history as roles before executing actions
try:
logger.info("=== Full Chat History (before final actions) ===")
for turn in chat_history:
role = turn.get("role", "unknown")
if turn.get("type") == "multipart":
logger.info(f"ROLE: {role}")
parts = turn.get("content", [])
for p in parts:
if p.get("type") == "text":
txt = str(p.get("text", ""))[:1000]
logger.info(f" [text] {txt}")
elif p.get("type") == "image":
logger.info(f" [image] {p.get('url','')}")
else:
logger.info(f" [part] {p}")
else:
content = turn.get("content", "")
content_preview = str(content)[:1000]
logger.info(f"ROLE: {role}\n{content_preview}")
logger.info("=== End Chat History ===")
except Exception as e:
logger.warning(f"Failed to print chat history: {e}")
actions = parse_and_execute_final(final_full, final_inner)
return {
"success": True,
"message": "Final actions executed",
"steps": steps,
"actions_performed": actions,
"ai_response_preview": last_ai_response[:300]
}
# Else check for <img> request
img_req = parse_img_request(last_ai_response)
if img_req:
try:
symbol = img_req["symbol"]
interval = img_req["interval"]
indicator_names = img_req["indicators"]
indicator_ids = indicators_to_pub_ids(indicator_names)
data = get_chart_screenshot(
symbol=symbol,
exchange="OANDA",
interval=interval,
indicators=indicator_ids,
width=1080,
height=1920,
full_page=False
)
png_url = data.get("imageUrl") or data.get("imageURL") or data.get("png") or "" or data.get("image_url", "")
if png_url:
user_turn = build_image_reply_user_turn(png_url)
chat_history.append(user_turn)
else:
prices_text = format_live_prices_text(get_live_prices_for_pairs())
chat_history.append({
"role": "user",
"content": f"تعذر الحصول على صورة من الخادم. اطلب صورة أخرى أو أرسل <final>.\nالأسعار الحية الآن:\n{prices_text}"
})
except Exception as e:
logger.error(f"Chart fetch error: {e}")
prices_text = format_live_prices_text(get_live_prices_for_pairs())
chat_history.append({
"role": "user",
"content": f"حدث خطأ أثناء جلب الصورة. اطلب صورة أخرى أو أرسل <final>.\nالأسعار الحية الآن:\n{prices_text}"
})
continue
# If neither <final> nor <img> was used, nudge AI and include price snapshot
prices_text = format_live_prices_text(get_live_prices_for_pairs())
chat_history.append({
"role": "user",
"content": f"يرجى طلب صورة باستخدام <img> أو إنهاء التحليل بإرسال <final>.\nالأسعار الحية الآن:\n{prices_text}"
})
# Max steps reached without final
return {
"success": True,
"message": "Maximum steps reached without final.",
"steps": steps,
"actions_performed": [],
"ai_response_preview": last_ai_response[:300]
}
@app.route('/stop_analysis', methods=['POST'])
def stop_analysis():
try:
session_id = get_session_id_from_request(request)
mark_analysis_cancelled(session_id)
return jsonify({
"success": True,
"message": f"Stop signal received. Session '{session_id}' will be cancelled at the next safe point."
})
except Exception as e:
logger.error(f"Error in stop_analysis: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/analysis_now', methods=['POST'])
def analysis_now():
try:
data = request.get_json()
alert_message = data.get('message', '') if data else ''
logger.info(f"Received alert message: {alert_message}")
# Run background processing in separate thread
def background_task(alert_message):
try:
chat_history = build_initial_chat_history(alert_message)
result = run_multi_turn_analysis(chat_history)
tags = count_xml_tags(result.get("ai_response_preview", ""))
logger.info(f"Background analysis completed. Tags: {tags}")
except Exception as e:
logger.error(f"Error in background task: {e}")
threading.Thread(target=background_task, args=(alert_message,)).start()
# Immediately return 200 OK with no data
return Response(status=200)
except Exception as e:
logger.error(f"Error in analysis_now: {e}")
return Response(status=200) # still return 200 so client knows it arrived
@app.route('/start_analysis', methods=['GET'])
def start_analysis():
try:
logger.info("Starting initial analysis (multi-turn)...")
chat_history = build_initial_chat_history()
result = run_multi_turn_analysis(chat_history)
tags = count_xml_tags(result.get("ai_response_preview", ""))
return jsonify({
"success": True,
"message": "Initial analysis completed",
"xml_tags_found": tags,
"actions_performed": result.get("actions_performed", []),
"total_actions": len(result.get("actions_performed", [])),
"steps": result.get("steps", 0),
"ai_response_preview": result.get("ai_response_preview", "")
})
except Exception as e:
logger.error(f"Error in start_analysis: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/test_actions', methods=['POST'])
def test_actions():
"""
Test endpoint for providing a <final> block directly.
It will:
- Save only the latest final
- Execute actions: scenario (if no active trade), Edit (if active trade), send_user, Alert
"""
try:
data = request.get_json()
test_response = data.get('test_response', '') if data else ''
if not test_response:
return jsonify({"success": False, "error": "Please provide test_response in the request body"}), 400
final_tuple = extract_final_block(test_response)
if not final_tuple:
return jsonify({
"success": False,
"error": "No <final> block found in test_response"
}), 400
final_full, final_inner = final_tuple
actions = parse_and_execute_final(final_full, final_inner)
tags = count_xml_tags(final_full)
return jsonify({
"success": True,
"message": "Test final processed",
"xml_tags_found": tags,
"actions_performed": actions,
"total_actions": len(actions),
"test_response_preview": final_full[:200] + "..." if len(final_full) > 200 else final_full
})
except Exception as e:
logger.error(f"Error in test_actions: {e}")
return jsonify({"success": False, "error": str(e)}), 500
@app.route('/health', methods=['GET'])
def health_check():
return jsonify({
"status": "healthy",
"timestamp": datetime.now().isoformat(),
"system": "XAUUSD Trading AI (multi-turn, chart-pro, scenario-mode)",
"execution_order": [
"1. Multi-turn image requests until <final>",
"2. Save only the single latest <final>",
"3. If active trade exists: allow Edit/send_user/Alert only (no scenario creation).",
"4. If no active trade: process <scenario>, save, notify tracker.",
"5. No <signal> creation by AI (signals will be created by external tracker when hit)."
]
})
@app.route('/', methods=['GET'])
def index():
return jsonify({
"message": "نظام الذكاء الاصطناعي لإشارات تداول XAUUSD (وضع السيناريو متعدد المراحل)",
"endpoints": {
"/start_analysis": "بدء التحليل متعدد المراحل (GET)",
"/analysis_now": "webhook للتحليل متعدد المراحل (POST: {message})",
"/test_actions": "اختبار معالجة <final> (POST: {test_response})",
"/health": "فحص حالة النظام (GET)"
},
"version": "4.0.0",
"notes": [
"استبدال <signal> بـ <scenario> في مخرجات الذكاء الاصطناعي",
"يتم حفظ السيناريو الأخير فقط في db_signals",
"يتم إشعار نظام التتبع بالـ @ لكل من Buy/Sell",
"في حال وجود صفقة نشطة، لا يتم إنشاء سيناريو جديد بل متابعة الصفقة فقط",
"بعد كل صورة نضيف الأسعار الحية لتتبع حركة السعر خلال المحادثة"
]
})
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=7860) |