import krakenex import pandas as pd from datetime import datetime import time import os from typing import Dict, List, Optional import logging from huggingface_hub import HfApi, login from io import StringIO # Set up logging logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s', handlers=[ logging.FileHandler('kraken_data_collection.log'), logging.StreamHandler() ] ) logger = logging.getLogger(__name__) class KrakenHuggingFaceCollector: """Handles data collection from Kraken and uploading to Hugging Face""" def __init__(self, kraken_key_path: str, hf_token: str, repo_id: str): # Initialize Kraken API self.kraken_api = krakenex.API() try: self.kraken_api.load_key(kraken_key_path) logger.info("Successfully loaded Kraken API key") except Exception as e: logger.error(f"Failed to load Kraken API key: {e}") raise # Initialize Hugging Face try: login(token=hf_token) self.hf_api = HfApi() self.repo_id = repo_id logger.info("Successfully logged in to Hugging Face") except Exception as e: logger.error(f"Failed to login to Hugging Face: {e}") raise # Trading pairs to collect data for self.pairs = [ "XXBTZUSD", # Bitcoin "XETHZUSD", # Ethereum "XXRPZUSD", # Ripple "ADAUSD", # Cardano "DOGEUSD", # Dogecoin "BNBUSD", # Binance Coin "SOLUSD", # Solana "DOTUSD", # Polkadot "MATICUSD", # Polygon "LTCUSD" # Litecoin ] def fetch_ticker_data(self, pair: str) -> Optional[Dict]: """Fetch ticker data for a single pair""" try: response = self.kraken_api.query_public('Ticker', {'pair': pair}) if 'error' in response and response['error']: logger.error(f"Kraken API error for {pair}: {response['error']}") return None data = response['result'] pair_data = list(data.values())[0] return { 'timestamp': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), 'pair': pair, 'price': float(pair_data['c'][0]), # Last trade closed price 'volume': float(pair_data['v'][0]), # 24h volume 'bid': float(pair_data['b'][0]), # Best bid 'ask': float(pair_data['a'][0]), # Best ask 'low': float(pair_data['l'][0]), # 24h low 'high': float(pair_data['h'][0]), # 24h high 'vwap': float(pair_data['p'][0]), # 24h VWAP 'trades': int(pair_data['t'][0]) # Number of trades } except Exception as e: logger.error(f"Error fetching data for {pair}: {e}") return None def upload_to_huggingface(self, df: pd.DataFrame, split: str) -> None: """Upload DataFrame to Hugging Face as CSV""" try: # Convert DataFrame to CSV string csv_str = df.to_csv(index=False) # Upload to Hugging Face path_in_repo = f"data/{split}/kraken_trades.csv" self.hf_api.upload_file( path_or_fileobj=StringIO(csv_str), path_in_repo=path_in_repo, repo_id=self.repo_id, repo_type="dataset" ) logger.info(f"Successfully uploaded {split} data to Hugging Face") except Exception as e: logger.error(f"Error uploading to Hugging Face: {e}") raise def collect_and_upload(self, split: str, num_rows: int, delay: int = 2) -> None: """ Collect data and upload directly to Hugging Face Args: split: Data split type ('training', 'validation', 'test') num_rows: Number of data points to collect per pair delay: Delay between API calls in seconds """ try: records = [] for i in range(num_rows): logger.info(f"Collecting row {i+1}/{num_rows}") for pair in self.pairs: record = self.fetch_ticker_data(pair) if record: records.append(record) if i < num_rows - 1: # Don't sleep after last iteration time.sleep(delay) # Respect API rate limits # Create DataFrame df = pd.DataFrame(records) # Upload to Hugging Face self.upload_to_huggingface(df, split) # Print data summary logger.info("\nData Summary:") logger.info(f"Total records: {len(records)}") logger.info(f"Pairs collected: {len(df['pair'].unique())}") logger.info(f"Time range: {df['timestamp'].min()} to {df['timestamp'].max()}") except Exception as e: logger.error(f"Error in data collection and upload: {e}") raise def main(): """Main function to run data collection and upload""" try: # Initialize collector collector = KrakenHuggingFaceCollector( kraken_key_path="kraken.key", hf_token="your_huggingface_token", # Replace with your token repo_id="GotThatData/kraken-trading-data" # Replace with your repo name ) # Collect and upload data for each split splits_config = { 'training': 1000, # 1000 rows for training 'validation': 200, # 200 rows for validation 'test': 200 # 200 rows for test } for split, num_rows in splits_config.items(): logger.info(f"\nCollecting and uploading {split} data...") collector.collect_and_upload(split=split, num_rows=num_rows) logger.info("Data collection and upload completed successfully!") except Exception as e: logger.error(f"Fatal error: {e}") raise if __name__ == "__main__": main()