import os, json from datetime import datetime # import alpaca_trade_api as tradeapi import gspread from oauth2client.service_account import ServiceAccountCredentials from apscheduler.schedulers.background import BackgroundScheduler import gradio as gr from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.requests import StockLatestBarRequest import pytz askjdhakjsdhkasj def parse_alpaca_timestamp(iso_ts: str) -> datetime: """ Parse Alpaca's nanosecond‐precision ISO timestamp into a datetime. """ # 1) Make it a valid fromisoformat string # e.g. '2025-04-09T05:00:07.357309954Z' -> '2025-04-09T05:00:07.357309954+00:00' ts = iso_ts.replace("Z", "+00:00") # 2) Truncate/pad fractional seconds to 6 digits if "." in ts: date_part, rest = ts.split(".", 1) # '2025-04-09T05:00:07', '357309954+00:00' frac, tz = rest.split("+", 1) # '357309954', '00:00' micro = (frac[:6]).ljust(6, "0") # take first 6 digits, pad if needed ts = f"{date_part}.{micro}+{tz}" # 3) Parse into datetime return datetime.fromisoformat(ts) # 1. Load credentials from Space secrets ALPACA_API_KEY = os.environ["ALPACA_API_KEY"] ALPACA_SECRET_KEY = os.environ["ALPACA_SECRET_KEY"] SHEET_KEY = os.environ["GOOGLE_SHEET_KEY"] json = { "type": "service_account", "project_id": "gen-lang-client-0977182810", "private_key_id": "cc563868e9761d1e4f71364a4250b44784da4bc4", "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCu/eyGj0GEjzUT\nXzkJva5hx88AMvOVSa6Lx/p5CbBXNOAZLH/4fAtxmHYA0q4UyYRHkAWxNh/KXig8\nfuXSfoGjhPoeEOZbtUm3Ve08AIULELPK19fQcmpMTv3xbIt5Ix/fwvpV2L2D4s0z\nr2uEJfowlDMKZf/UkR2+JMIr7y73umZaWplxyzvbAo3G6DEZRMQE9kpRRZ1ProOK\nI+T+X7c2aAC88inghb3KAUml0NS9d9ecZRED98tljQywihkTYVCkEEqdbRVA8sf3\n53U+K/CcNaT6jl+pr4FLRygw97Qtw1TlCuv5L+gOkapPOuXzZblO2neQVZxGkRRG\nffuZJN7lAgMBAAECggEAB4mb/HrOChPyEmd1s+qgnjPGrJNFRI8D2UpaoAflDt3n\nRjumt9ETXB1X/eE/9YNtnXiBv5hIJnByVWgWOwAkfOLk3tKAXam8Jw0TjClYkbld\nM+BNZTw8bGoy9t54+SrkZqrFxi4GZxh9RBvrABTzsc/RuAGx6JnR7mdebsPddyGq\nYPfxrsJ4Pduzdz8HhJh0tgs8YCXvdG7c/LLYqvT3wv6gmZixd1l4ZBB6GMDj/h19\nxKGDkQIsF2LgH2fjziJIVOqvG/do43cTwkcwMrwEJD4OxIqyRzibUtrmq5FIIg5K\njkz5NF/CB2JgYhCNBDYgYZr1X4VHJdpNzIBKDbfR2QKBgQDr1MHbHnuCLKF7EZQw\n0oeWWGiRL3PD68p9D6FR8iuErm/qbPaQ1+mXTJKaTHYCgmGhbq4lO18oHFPNT8HB\npTuvDvplq7naObsN3UJamdwLBI0ugnDiOKqEgTGk8yvZ1BqoVkFXJDaUlec0jsO7\nD6iMNRAV3CTK/fH/HpZSwskQnQKBgQC99SobhmRNrokYygFLOY3Vwt4Eo5YblopZ\n70LbT/MMtfUGhTBoysxAtdYNg3MSll0xHJol44nmWdCOkxrU8//opbPOnyTCp0ZO\noZdMmhcnTxskbUQtLxnXXLA4payfn3TaDxFzJ+E4SGQ6uGhwUvH8KXidYJwJ8Xf5\nu77zlbvA6QKBgCLLF/LBE43IEeb7V/bACNsInhzVeGnubsh2tuFFLmB77+0ROGt0\nP3VgCb0UDNEf9VGA59Z8ZPShAUgoPTjZrO2N8ylu5ZOpssw/2ZtOWeWSPVYgTpFB\ntP6ppO5mJhEUNv6hAzDdGeRgHWVmrr/5dm+BIKdo/FCqmLqAA8OgE1QNAoGAEC3n\njjyeInexOalK/utCB2j3nBGK4YKq+goE+cUPgI+m9Q03GFXdm81hE/mUPYsNGPfE\nsWF2B2ay+tHQwT6fF2O1su2fJe01cdEVn/Kn8jMsrqeX3PlcDBna8Jg74cSlIUWT\nSOmmpzKTRuUaECcgcUfe/BiUccxdWaoCE6E3QrECgYAWhqnAly3liVUAqhMQY8NE\nLVcTOiVrwdsJLa819a7AxWNdIlsVx58ZRLnnW0fdwhM/HHt6EbxzRvWrgVl/mtZp\nuyp9uaq3YL9QhcAZcIxoPTmeADxs7K/nZgs1yzECBFsU9AUQHmatoMZoUVYX1aqq\ndrDDXJwSPss1tSckWSMdUw==\n-----END PRIVATE KEY-----\n", "client_email": "getstock@gen-lang-client-0977182810.iam.gserviceaccount.com", "client_id": "110768409295534473868", "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/getstock%40gen-lang-client-0977182810.iam.gserviceaccount.com", "universe_domain": "googleapis.com" } # Parse Google service account JSON google_creds = json #os.environ["GOOGLE_CREDENTIALS"]) scope = ["https://spreadsheets.google.com/feeds", "https://www.googleapis.com/auth/drive"] creds = ServiceAccountCredentials.from_json_keyfile_dict(google_creds, scope) gc = gspread.authorize(creds) sheet = gc.open_by_key(SHEET_KEY).sheet1 # 2. Alpaca REST client client = StockHistoricalDataClient(ALPACA_API_KEY, ALPACA_SECRET_KEY) header = ["Time (VN)", "Symbol", "Open", "High", "Low", "Close", "Volume", "Trade Count", "VWAP", "Current Trade Day"] sheet.insert_row(header, index=1) last_bar_value = None days = 0 n = 0 #count update_days = 0 def fetch_and_append(): global last_bar_value, days, n, update_days # 1) Fetch the latest bar for AAPL response = client.get_stock_latest_bar( StockLatestBarRequest(symbol_or_symbols=["AAPL"]) ) bar = response["AAPL"] # 2) Check if value is same as last fetched if bar.close == last_bar_value: n += 1 if n >= 180 and update_days != 1: #không update trong 3 phút -> ngày mới n = 0 update_days = 1 days += 1 return # Skip appending if no update update_days = 0 #if change -> new days n = 0 # 2) Use current time as unique ID bar_ts = datetime.now(pytz.utc) # or use datetime.utcnow().replace(tzinfo=pytz.utc) if needed # 3) Update our marker last_bar_value = bar.close # 4) Format the timestamp as a string (Vietnam time) vn_tz = pytz.timezone("Asia/Ho_Chi_Minh") bar_ts_vn = bar_ts.astimezone(vn_tz) readable_time = bar_ts_vn.strftime("%Y-%m-%d %H:%M:%S") # 5) Build the row to append row = [ readable_time, # timestamp (UTC) bar.symbol, # "AAPL" bar.open, # open price bar.high, # high price bar.low, # low price bar.close, # close price bar.volume, # total volume bar.trade_count, # number of trades bar.vwap, # volume‑weighted avg price days ] sheet.append_row(row, value_input_option="USER_ENTERED") # 3. Schedule the job every second scheduler = BackgroundScheduler() scheduler.add_job(fetch_and_append, 'interval', seconds=2) # background job every 5 sec :contentReference[oaicite:6]{index=6} scheduler.start() # 4. Minimal Gradio UI to keep the process alive def status(): return "✅ Fetching every second and appending to Google Sheets..." demo = gr.Interface(fn=status, inputs=None, outputs="text", live=True) demo.launch()