|
import krakenex |
|
import pandas as pd |
|
from datetime import datetime |
|
import time |
|
import os |
|
from typing import Dict, List, Optional |
|
import logging |
|
|
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='%(asctime)s - %(levelname)s - %(message)s', |
|
handlers=[ |
|
logging.FileHandler('kraken_data_collection.log'), |
|
logging.StreamHandler() |
|
] |
|
) |
|
logger = logging.getLogger(__name__) |
|
|
|
class KrakenDataCollector: |
|
"""Handles data collection from Kraken API""" |
|
|
|
def __init__(self, api_key_path: str): |
|
self.api = krakenex.API() |
|
try: |
|
self.api.load_key(api_key_path) |
|
logger.info("Successfully loaded Kraken API key") |
|
except Exception as e: |
|
logger.error(f"Failed to load API key: {e}") |
|
raise |
|
|
|
|
|
self.pairs = [ |
|
"XXBTZUSD", |
|
"XETHZUSD", |
|
"XXRPZUSD", |
|
"ADAUSD", |
|
"DOGEUSD", |
|
"BNBUSD", |
|
"SOLUSD", |
|
"DOTUSD", |
|
"MATICUSD", |
|
"LTCUSD" |
|
] |
|
|
|
def fetch_ticker_data(self, pair: str) -> Optional[Dict]: |
|
"""Fetch ticker data for a single pair""" |
|
try: |
|
response = self.api.query_public('Ticker', {'pair': pair}) |
|
|
|
if 'error' in response and response['error']: |
|
logger.error(f"Kraken API error for {pair}: {response['error']}") |
|
return None |
|
|
|
data = response['result'] |
|
pair_data = list(data.values())[0] |
|
|
|
return { |
|
'timestamp': datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), |
|
'pair': pair, |
|
'price': float(pair_data['c'][0]), |
|
'volume': float(pair_data['v'][0]), |
|
'bid': float(pair_data['b'][0]), |
|
'ask': float(pair_data['a'][0]), |
|
'low': float(pair_data['l'][0]), |
|
'high': float(pair_data['h'][0]), |
|
'vwap': float(pair_data['p'][0]), |
|
'trades': int(pair_data['t'][0]) |
|
} |
|
|
|
except Exception as e: |
|
logger.error(f"Error fetching data for {pair}: {e}") |
|
return None |
|
|
|
def create_data_directories(self) -> None: |
|
"""Create directory structure for data storage""" |
|
for split in ['training', 'validation', 'test']: |
|
directory = f'data/{split}' |
|
if not os.path.exists(directory): |
|
os.makedirs(directory) |
|
logger.info(f"Created directory: {directory}") |
|
|
|
def save_data_to_csv(self, split: str, num_rows: int, delay: int = 2) -> None: |
|
""" |
|
Collect and save data for all pairs |
|
|
|
Args: |
|
split: Data split type ('training', 'validation', 'test') |
|
num_rows: Number of data points to collect per pair |
|
delay: Delay between API calls in seconds |
|
""" |
|
try: |
|
records = [] |
|
|
|
for i in range(num_rows): |
|
logger.info(f"Collecting row {i+1}/{num_rows}") |
|
|
|
for pair in self.pairs: |
|
record = self.fetch_ticker_data(pair) |
|
if record: |
|
records.append(record) |
|
|
|
if i < num_rows - 1: |
|
time.sleep(delay) |
|
|
|
|
|
df = pd.DataFrame(records) |
|
file_path = f"data/{split}/kraken_trades.csv" |
|
|
|
|
|
os.makedirs(os.path.dirname(file_path), exist_ok=True) |
|
|
|
|
|
df.to_csv(file_path, index=False) |
|
logger.info(f"Successfully saved {len(records)} records to {file_path}") |
|
|
|
|
|
logger.info("\nData Summary:") |
|
logger.info(f"Total records: {len(records)}") |
|
logger.info(f"Pairs collected: {len(df['pair'].unique())}") |
|
logger.info(f"Time range: {df['timestamp'].min()} to {df['timestamp'].max()}") |
|
|
|
except Exception as e: |
|
logger.error(f"Error saving data: {e}") |
|
raise |
|
|
|
def main(): |
|
"""Main function to run data collection""" |
|
try: |
|
|
|
collector = KrakenDataCollector("kraken.key") |
|
|
|
|
|
collector.create_data_directories() |
|
|
|
|
|
splits_config = { |
|
'training': 1000, |
|
'validation': 200, |
|
'test': 200 |
|
} |
|
|
|
for split, num_rows in splits_config.items(): |
|
logger.info(f"\nCollecting {split} data...") |
|
collector.save_data_to_csv(split=split, num_rows=num_rows) |
|
|
|
logger.info("Data collection completed successfully!") |
|
|
|
except Exception as e: |
|
logger.error(f"Fatal error in data collection: {e}") |
|
raise |
|
|
|
if __name__ == "__main__": |
|
main() |