gauravlochab commited on
Commit
144269c
·
1 Parent(s): d8345d8

adding fix for initial values

Browse files
Files changed (2) hide show
  1. app.py +10 -5
  2. initial_value_fixer.py +751 -0
app.py CHANGED
@@ -19,6 +19,7 @@ from typing import List, Dict, Any, Optional
19
  # from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
20
  # APR visualization functions integrated directly
21
  from fetch_and_preprocess_data import generate_continuous_random_data
 
22
 
23
  # Set up logging with appropriate verbosity
24
  logging.basicConfig(
@@ -180,6 +181,7 @@ def extract_apr_value(attr: Dict[str, Any]) -> Dict[str, Any]:
180
  apr = json_data.get("apr")
181
  adjusted_apr = json_data.get("adjusted_apr") # Extract adjusted_apr if present
182
  timestamp = json_data.get("timestamp")
 
183
 
184
  # Extract ROI (f_i_ratio) from calculation_metrics if it exists
185
  roi = None
@@ -193,20 +195,22 @@ def extract_apr_value(attr: Dict[str, Any]) -> Dict[str, Any]:
193
  if timestamp:
194
  timestamp_dt = datetime.fromtimestamp(timestamp)
195
 
196
- result = {
 
197
  "apr": apr,
198
  "adjusted_apr": adjusted_apr,
199
  "roi": roi,
200
  "timestamp": timestamp_dt,
201
  "agent_id": agent_id,
202
- "is_dummy": False
203
- }
 
204
  logger.debug(f"Agent {agent_id}: Extracted result: {result}")
205
  return result
206
  except (json.JSONDecodeError, KeyError, TypeError) as e:
207
  logger.error(f"Error parsing JSON value: {e} for agent_id: {attr.get('agent_id')}")
208
  logger.error(f"Problematic json_value: {attr.get('json_value')}")
209
- return {"apr": None, "adjusted_apr": None, "roi": None, "timestamp": None, "agent_id": attr.get('agent_id'), "is_dummy": False}
210
 
211
  def fetch_apr_data_from_db():
212
  """
@@ -779,8 +783,9 @@ def generate_apr_visualizations():
779
 
780
  # No longer generating dummy data
781
  # Set global_df for access by other functions
 
782
  global_df = df
783
-
784
  # Save to CSV before creating visualizations
785
  csv_file = save_to_csv(df)
786
 
 
19
  # from app_trans_new import create_transcation_visualizations,create_active_agents_visualizations
20
  # APR visualization functions integrated directly
21
  from fetch_and_preprocess_data import generate_continuous_random_data
22
+ from initial_value_fixer import fix_apr_and_roi
23
 
24
  # Set up logging with appropriate verbosity
25
  logging.basicConfig(
 
181
  apr = json_data.get("apr")
182
  adjusted_apr = json_data.get("adjusted_apr") # Extract adjusted_apr if present
183
  timestamp = json_data.get("timestamp")
184
+ address = json_data.get("portfolio_snapshot", {}).get("portfolio", {}).get("address")
185
 
186
  # Extract ROI (f_i_ratio) from calculation_metrics if it exists
187
  roi = None
 
195
  if timestamp:
196
  timestamp_dt = datetime.fromtimestamp(timestamp)
197
 
198
+ result = json_data.copy() # Copy the original JSON data for logging
199
+ result.update({
200
  "apr": apr,
201
  "adjusted_apr": adjusted_apr,
202
  "roi": roi,
203
  "timestamp": timestamp_dt,
204
  "agent_id": agent_id,
205
+ "is_dummy": False,
206
+ "address": address
207
+ })
208
  logger.debug(f"Agent {agent_id}: Extracted result: {result}")
209
  return result
210
  except (json.JSONDecodeError, KeyError, TypeError) as e:
211
  logger.error(f"Error parsing JSON value: {e} for agent_id: {attr.get('agent_id')}")
212
  logger.error(f"Problematic json_value: {attr.get('json_value')}")
213
+ return {"apr": None, "adjusted_apr": None, "roi": None, "timestamp": None, "agent_id": attr.get('agent_id'), "is_dummy": False, "address": None}
214
 
215
  def fetch_apr_data_from_db():
216
  """
 
783
 
784
  # No longer generating dummy data
785
  # Set global_df for access by other functions
786
+ df = fix_apr_and_roi(df) # Currently it assumes no investment has been made
787
  global_df = df
788
+
789
  # Save to CSV before creating visualizations
790
  csv_file = save_to_csv(df)
791
 
initial_value_fixer.py ADDED
@@ -0,0 +1,751 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Sample CSV values:
3
+ # apr,adjusted_apr,timestamp,portfolio_snapshot,calculation_metrics,roi,agent_id,is_dummy,address,agent_name,metric_type,first_investment_timestamp,agent_hash,volume,trading_type,selected_protocols
4
+ # -0.03,1.75,2025-05-15 21:37:27.000000,"{'portfolio': {'portfolio_value': 29.34506065817397, 'allocations': [{'chain': 'optimism', 'type': 'velodrome', 'id': '0xaF03f51DE7a0E62BF061F6Fc3931cF79166B0a29', 'assets': ['FRAX', 'alUSD'], 'apr': 11.9, 'details': 'Velodrome Pool', 'ratio': 100.0, 'address': '0xAD588C11Ea73123fDe199C5C4F7F75C6e495C758'}], 'portfolio_breakdown': [{'asset': 'FRAX', 'address': '0x2E3D870790dC77A83DD1d18184Acc7439A53f475', 'balance': 12.498312351563191, 'price': 0.999924, 'value_usd': 12.497362479824472, 'ratio': 0.425876}, {'asset': 'alUSD', 'address': '0xCB8FA9a76b8e203D8C3797bF438d8FB81Ea3326A', 'balance': 17.023792285753334, 'price': 0.989656, 'value_usd': 16.8476981783495, 'ratio': 0.574124}], 'address': '0xAD588C11Ea73123fDe199C5C4F7F75C6e495C758'}, 'positons': [{'chain': 'optimism', 'pool_address': '0xaF03f51DE7a0E62BF061F6Fc3931cF79166B0a29', 'dex_type': 'velodrome', 'token0': '0x2E3D870790dC77A83DD1d18184Acc7439A53f475', 'token1': '0xCB8FA9a76b8e203D8C3797bF438d8FB81Ea3326A', 'token0_symbol': 'FRAX', 'token1_symbol': 'alUSD', 'apr': 11.901789131732096, 'pool_id': '0xaF03f51DE7a0E62BF061F6Fc3931cF79166B0a29', 'is_stable': True, 'is_cl_pool': False, 'amount0': 12549523370531409633, 'amount1': 16972223462662011900, 'timestamp': 1747319387, 'status': 'open', 'tx_hash': '0xb487bb4a45bcd7bb3b9e9e3fabe76bf6594828091598ffab69704754b4c8bea8'}]}","{'initial_value': 29.353178464538146, 'final_value': 29.34506065817397, 'f_i_ratio': -0.0002765562977782299, 'last_investment_timestamp': 1747319387, 'time_ratio': 5380.753851502806}",-0.0002765562977782299,86,False,0xAD588C11Ea73123fDe199C5C4F7F75C6e495C758,nusus-tayar25,APR,,,,,
5
+
6
+ # Parse the optimus_apr_values.csv file
7
+ # Iterate on the rows: For each row:
8
+ # Parse address, final_value
9
+ # Compute initial_value using the parsed address similar to an Optimus function
10
+ # Compute the APR and ROI similar to an Optimus function
11
+ # Write the row with initial_value, APR, and ROI to a new CSV file
12
+
13
+
14
+ from datetime import datetime
15
+ from decimal import Decimal
16
+ import json
17
+ import logging
18
+ import os
19
+ import time
20
+ from typing import Dict, Optional, Tuple
21
+
22
+ from pandas import DataFrame
23
+ import requests
24
+ from web3 import Web3
25
+
26
+ ETHERSCAN_API_KEY = ""
27
+ EXCLUDED_ADDRESSES = { # Testnet agents of Gaurav, Divya, and Priyanshu
28
+ "0x6aA9b180c1a4Ef43Ea540Da905f23BAfAEEB7DC8", # agent_id 84
29
+ "0x6aA9b180c1a4Ef43Ea540Da905f23BAfAEEB7CB5", # agent_id 86
30
+ "0x3B3AbC1604fAd139F841Da5c3Cad73a72621fee4", # agent_id 102
31
+ }
32
+ COINGECKO_PRICE_API_URL = "https://api.coingecko.com/api/v3/coins/{coin_id}/history?date={date}}"
33
+ WHITELISTED_TOKENS = {
34
+ # Optimism tokens - stablecoins
35
+ "0x0b2c639c533813f4aa9d7837caf62653d097ff85": ("USDC", 6),
36
+ "0x01bff41798a0bcf287b996046ca68b395dbc1071": ("USDT0", 6),
37
+ "0x94b008aa00579c1307b0ef2c499ad98a8ce58e58": ("USDT", 6),
38
+ "0x7f5c764cbc14f9669b88837ca1490cca17c31607": ("USDC.e", 6),
39
+ "0x8ae125e8653821e851f12a49f7765db9a9ce7384": ("DOLA", 18),
40
+ "0xc40f949f8a4e094d1b49a23ea9241d289b7b2819": ("LUSD", 18),
41
+ "0xda10009cbd5d07dd0cecc66161fc93d7c9000da1": ("DAI", 18),
42
+ "0x087c440f251ff6cfe62b86dde1be558b95b4bb9b": ("BOLD", 18),
43
+ "0x2e3d870790dc77a83dd1d18184acc7439a53f475": ("FRAX", 18),
44
+ "0x2218a117083f5b482b0bb821d27056ba9c04b1d3": ("sDAI", 18),
45
+ "0x1217bfe6c773eec6cc4a38b5dc45b92292b6e189": ("oUSDT", 6),
46
+ "0x4f604735c1cf31399c6e711d5962b2b3e0225ad3": ("USDGLO", 18),
47
+ }
48
+ COIN_ID_MAPPING = {
49
+ "usdc": "usd-coin",
50
+ "alusd": "alchemix-usd",
51
+ "usdt0": "usdt0",
52
+ "usdt": "bridged-usdt",
53
+ "usdc.e": "bridged-usd-coin-optimism",
54
+ "usx": "token-dforce-usd",
55
+ "dola": "dola-usd",
56
+ "lusd": "liquity-usd",
57
+ "dai": "makerdao-optimism-bridged-dai-optimism",
58
+ "bold": "liquity-bold",
59
+ "frax": "frax",
60
+ "sdai": "savings-dai",
61
+ "usd+": "overnight-fi-usd-optimism",
62
+ "ousdt": "openusdt",
63
+ "usdglo": "glo-dollar",
64
+ }
65
+
66
+ # Configure logging
67
+ logging.basicConfig(level=logging.INFO)
68
+ logger = logging.getLogger(__name__)
69
+
70
+ w3 = Web3(Web3.HTTPProvider("https://rpc-gate.autonolas.tech/optimism-rpc/"))
71
+
72
+
73
+ def get_coin_id_from_symbol(symbol: str, chain: str) -> Optional[str]:
74
+ """Map token symbol to CoinGecko ID."""
75
+ if chain == "optimism":
76
+ coin_id_map = {
77
+ "USDC": "usd-coin",
78
+ "ALUSD": "alchemix-usd",
79
+ "USDT0": "usdt0",
80
+ "USDT": "bridged-usdt",
81
+ "MSUSD": None,
82
+ "USDC.E": "bridged-usd-coin-optimism",
83
+ "USX": "token-dforce-usd",
84
+ "DOLA": "dola-usd",
85
+ "LUSD": "liquity-usd",
86
+ "DAI": "makerdao-optimism-bridged-dai-optimism",
87
+ "BOLD": "liquity-bold",
88
+ "FRAX": "frax",
89
+ "SDAI": "savings-dai",
90
+ "USD+": "overnight-fi-usd-optimism",
91
+ "OUSDT": "openusdt",
92
+ "USDGLO": "glo-dollar",
93
+ "ETH": "ethereum",
94
+ "WETH": "ethereum",
95
+ "WBTC": "wrapped-bitcoin",
96
+ }
97
+ return coin_id_map.get(symbol.upper())
98
+ return None
99
+
100
+ def load_cache(name: str) -> Dict:
101
+ """Load price cache from JSON file."""
102
+ cache_file = f"{name}_cache.json"
103
+ if os.path.exists(cache_file):
104
+ try:
105
+ with open(cache_file, 'r') as f:
106
+ return json.load(f)
107
+ except json.JSONDecodeError:
108
+ logger.warning("Cache file corrupted, creating new cache")
109
+ return {}
110
+ return {}
111
+
112
+ def save_cache(name: str, cache: Dict):
113
+ """Save price cache to JSON file."""
114
+ cache_file = f"{name}_cache.json"
115
+ with open(cache_file, 'w') as f:
116
+ json.dump(cache, f, indent=2)
117
+
118
+ def get_cached_price(date_str: str, token_symbol: str) -> Optional[float]:
119
+ """Get price from cache if available."""
120
+ cache = load_cache(name="price")
121
+ return cache.get(date_str, {}).get(token_symbol)
122
+
123
+ def update_price_cache(date_str: str, token_symbol: str, price: float):
124
+ """Update price cache with new value."""
125
+ cache = load_cache(name="price")
126
+ if date_str not in cache:
127
+ cache[date_str] = {}
128
+ cache[date_str][token_symbol] = price
129
+ save_cache(name="price", cache=cache)
130
+
131
+ def get_cached_request(cache_key: str) -> Optional[Dict]:
132
+ """Get cached request response if available."""
133
+ cache = load_cache(name="request")
134
+ return cache.get(cache_key)
135
+
136
+ def update_request_cache(cache_key: str, response: Dict):
137
+ """Update request cache with new response."""
138
+ cache = load_cache(name="request")
139
+ cache[cache_key] = response
140
+ save_cache(name="request", cache=cache)
141
+
142
+ def fetch_historical_eth_price(date_str: str) -> float:
143
+ """Fetch historical ETH price from CoinGecko with caching."""
144
+ # Check cache first
145
+ cached_price = get_cached_price(date_str, "ETH")
146
+ if cached_price is not None:
147
+ return cached_price
148
+
149
+ try:
150
+ url = f"https://api.coingecko.com/api/v3/coins/ethereum/history"
151
+ params = {"date": date_str, "localization": "false"}
152
+
153
+ # Add delay to respect rate limits
154
+ time.sleep(1.2)
155
+
156
+ response = requests.get(url, params=params)
157
+ response.raise_for_status()
158
+
159
+ data = response.json()
160
+ if "market_data" in data and "current_price" in data["market_data"]:
161
+ price = data["market_data"]["current_price"]["usd"]
162
+ # Update cache
163
+ update_price_cache(date_str, "ETH", price)
164
+ return price
165
+
166
+ return 0.0
167
+
168
+ except Exception as e:
169
+ print(f"Error fetching ETH price for {date_str}: {str(e)}")
170
+ return 0.0
171
+
172
+ def fetch_historical_token_price(coin_id: str, date_str: str, token_symbol: str) -> float:
173
+ """Fetch historical token price from CoinGecko with caching."""
174
+ # Check cache first
175
+ cached_price = get_cached_price(date_str, token_symbol)
176
+ if cached_price is not None:
177
+ return cached_price
178
+
179
+ try:
180
+ success, data = request_with_retries(
181
+ endpoint=f"https://api.coingecko.com/api/v3/coins/{coin_id}/history",
182
+ params={"date": date_str, "localization": "false"},
183
+ )
184
+ if not success:
185
+ logger.error(f"Failed to fetch historical price for {coin_id} on {date_str}")
186
+ return 0.0
187
+
188
+ # Add delay to respect rate limits
189
+ time.sleep(1.2)
190
+
191
+ if "market_data" in data and "current_price" in data["market_data"]:
192
+ price = data["market_data"]["current_price"]["usd"]
193
+ # Update cache
194
+ update_price_cache(date_str, token_symbol, price)
195
+ return price
196
+
197
+ return 0.0
198
+
199
+ except Exception as e:
200
+ print(f"Error fetching price for {coin_id} on {date_str}: {str(e)}")
201
+ return 0.0
202
+
203
+ def get_block_at_timestamp(
204
+ timestamp: int,
205
+ chain: str = "optimism"
206
+ ) -> Optional[int]:
207
+ success, res = request_with_retries(
208
+ endpoint=f"https://api-optimistic.etherscan.io/api?module=block&action=getblocknobytime&timestamp={timestamp}&closest=before&apikey={ETHERSCAN_API_KEY}",
209
+ )
210
+ if success and res.get("status") == "1" and "result" in res:
211
+ return int(res.get("result"))
212
+ else:
213
+ logger.error(f"Failed to fetch block at timestamp {timestamp} for {chain}: {res.get('message', 'Unknown error')}")
214
+ return None
215
+
216
+ def fetch_eth_balance(address: str, timestamp: float) -> float:
217
+ key = "eth_balance"
218
+ cache = load_cache(name=key)
219
+ if f"{address}_{timestamp}" in cache:
220
+ return cache[f"{address}_{timestamp}"] / (10 ** 18)
221
+
222
+ balance = w3.eth.get_balance(
223
+ account=Web3.to_checksum_address(address),
224
+ block_identifier=get_block_at_timestamp(int(timestamp))
225
+ )
226
+
227
+ cache[f"{address}_{timestamp}"] = balance
228
+ save_cache(name=key, cache=cache)
229
+ return balance / (10 ** 18)
230
+
231
+ def fetch_token_balance(
232
+ address: str,
233
+ token_address: str,
234
+ timestamp: int,
235
+ decimals: int = 18
236
+ ) -> Optional[float]:
237
+ contract = w3.eth.contract(
238
+ address=Web3.to_checksum_address(token_address),
239
+ abi=[
240
+ {
241
+ "constant": True,
242
+ "inputs": [{"name": "_owner", "type": "address"}],
243
+ "name": "balanceOf",
244
+ "outputs": [{"name": "", "type": "uint256"}],
245
+ "payable": False,
246
+ "stateMutability": "view",
247
+ "type": "function",
248
+ }
249
+ ]
250
+ )
251
+ try:
252
+ cache_key = f"token_balance_{address}_{token_address}_{timestamp}"
253
+ cache = load_cache(name="token_balance")
254
+ if cache_key in cache:
255
+ return cache[cache_key] / (10 ** decimals)
256
+
257
+ balance = contract.functions.balanceOf(address).call(block_identifier=get_block_at_timestamp(int(timestamp)))
258
+ cache[cache_key] = balance
259
+ save_cache(name="token_balance", cache=cache)
260
+ return balance / (10 ** decimals) if balance else 0.0
261
+ except Exception as e:
262
+ logger.error(f"Error fetching token balance for {address} at {timestamp}: {e}")
263
+ return None
264
+
265
+ def get_datetime_from_timestamp(timestamp: str) -> Optional[datetime]:
266
+ """Convert timestamp string to datetime object."""
267
+ try:
268
+ return datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
269
+ except (ValueError, TypeError):
270
+ logger.warning(f"Invalid timestamp format: {timestamp}")
271
+ return None
272
+
273
+ def request_with_retries(
274
+ endpoint: str,
275
+ params: Dict = None,
276
+ headers: Dict = None,
277
+ method: str = "GET",
278
+ body: Dict = None,
279
+ rate_limited_code: int = 429,
280
+ retry_wait: int = 5,
281
+ max_retries: int = 3
282
+ ) -> Tuple[bool, Dict]:
283
+ for attempt in range(max_retries):
284
+ try:
285
+ if method.upper() == "POST":
286
+ cache_key = f"POST_{endpoint}_{str(body or {})}"
287
+ cached_response = get_cached_request(cache_key)
288
+ if cached_response is not None:
289
+ return len(cached_response) > 0, cached_response
290
+
291
+ response = requests.post(endpoint, headers=headers, json=body)
292
+
293
+ if response.ok:
294
+ update_request_cache(cache_key, response.json())
295
+ else:
296
+ # Check cache first for GET requests
297
+ cache_key = f"{endpoint}_{str(params or {})}"
298
+ cached_response = get_cached_request(cache_key)
299
+ if cached_response is not None:
300
+ return len(cached_response) > 0, cached_response
301
+
302
+ response = requests.get(endpoint, headers=headers, params=params or {})
303
+
304
+ # Cache successful responses
305
+ if response.status_code == 200:
306
+ update_request_cache(cache_key, response.json())
307
+ elif response.status_code == 404:
308
+ update_request_cache(cache_key, {})
309
+
310
+ if response.status_code == rate_limited_code:
311
+ logger.warning(f"Rate limited. Waiting {retry_wait} seconds...")
312
+ time.sleep(retry_wait)
313
+ continue
314
+
315
+ if response.status_code != 200:
316
+ logger.error(f"Request failed with status {response.status_code}")
317
+ return False, {}
318
+
319
+ return True, response.json()
320
+
321
+ except Exception as e:
322
+ logger.error(f"Request failed: {str(e)}")
323
+ if attempt < max_retries - 1:
324
+ time.sleep(retry_wait)
325
+ continue
326
+ return False, {}
327
+
328
+ return False, {}
329
+
330
+ def should_include_transfer_optimism(
331
+ from_address: str
332
+ ) -> bool:
333
+ """Determine if an Optimism transfer should be included based on from address type."""
334
+ if not from_address:
335
+ return False
336
+
337
+ # Exclude zero address
338
+ if from_address.lower() in [
339
+ "0x0000000000000000000000000000000000000000",
340
+ "0x0",
341
+ "",
342
+ ]:
343
+ return False
344
+
345
+ try:
346
+ # Use Optimism RPC to check if address is a contract
347
+ payload = {
348
+ "jsonrpc": "2.0",
349
+ "method": "eth_getCode",
350
+ "params": [from_address, "latest"],
351
+ "id": 1,
352
+ }
353
+
354
+ success, result = request_with_retries(
355
+ endpoint="https://mainnet.optimism.io",
356
+ method="POST",
357
+ headers={"Content-Type": "application/json"},
358
+ body=payload,
359
+ rate_limited_code=429,
360
+ retry_wait=5,
361
+ )
362
+
363
+ if not success:
364
+ logger.error("Failed to check contract code")
365
+ return False
366
+
367
+ code = result.get("result", "0x")
368
+
369
+ # If code is '0x', it's an EOA
370
+ if code == "0x":
371
+ return True
372
+
373
+ # If it has code, check if it's a GnosisSafe
374
+ safe_check_url = f"https://safe-transaction-optimism.safe.global/api/v1/safes/{from_address}/"
375
+ success, _ = request_with_retries(
376
+ endpoint=safe_check_url,
377
+ headers={"Accept": "application/json"},
378
+ rate_limited_code=429,
379
+ retry_wait=5,
380
+ )
381
+
382
+ if success:
383
+ return True
384
+
385
+ logger.info(
386
+ f"Excluding transfer from contract: {from_address}"
387
+ )
388
+ return False
389
+
390
+ except Exception as e:
391
+ logger.error(f"Error checking address {from_address}: {e}")
392
+ return False
393
+
394
+ def fetch_optimism_transfers(
395
+ address: str,
396
+ last_timestamp: int
397
+ ) -> Dict:
398
+ base_url = "https://safe-transaction-optimism.safe.global/api/v1"
399
+ all_transfers_by_date = {}
400
+
401
+ try:
402
+ logger.info("Fetching Optimism transfers using SafeGlobal API...")
403
+
404
+ last_date = datetime.fromtimestamp(last_timestamp).strftime("%Y-%m-%d")
405
+
406
+ # Fetch incoming transfers
407
+ transfers_url = f"{base_url}/safes/{address}/incoming-transfers/"
408
+
409
+ processed_count = 0
410
+ while True:
411
+ success, response_json = request_with_retries(
412
+ endpoint=transfers_url,
413
+ headers={"Accept": "application/json"},
414
+ rate_limited_code=429,
415
+ retry_wait=5
416
+ )
417
+
418
+ if not success:
419
+ logger.error("Failed to fetch Optimism transfers")
420
+ break
421
+
422
+ transfers = response_json.get("results", [])
423
+ if not transfers:
424
+ break
425
+
426
+ for transfer in transfers:
427
+ # Parse timestamp
428
+ timestamp = transfer.get("executionDate")
429
+ if not timestamp:
430
+ continue
431
+
432
+ tx_datetime = get_datetime_from_timestamp(timestamp)
433
+ tx_date = tx_datetime.strftime("%Y-%m-%d") if tx_datetime else None
434
+
435
+ if not tx_date:
436
+ continue
437
+
438
+ if tx_datetime.timestamp() > last_timestamp:
439
+ continue
440
+
441
+ # Process the transfer
442
+ from_address = transfer.get("from", address)
443
+ transfer_type = transfer.get("type", "")
444
+
445
+ if from_address.lower() == address.lower():
446
+ continue
447
+
448
+ # Initialize date in transfers dict if not exists
449
+ if tx_date not in all_transfers_by_date:
450
+ all_transfers_by_date[tx_date] = []
451
+
452
+ should_include = should_include_transfer_optimism(
453
+ from_address
454
+ )
455
+ if not should_include:
456
+ continue
457
+
458
+ # Process different transfer types
459
+ if transfer_type == "ERC20_TRANSFER":
460
+ # Token transfer
461
+ token_info = transfer.get("tokenInfo", {})
462
+ token_address = transfer.get("tokenAddress", "")
463
+
464
+ if not token_info:
465
+ if not token_address:
466
+ continue
467
+ # You might want to add token decimal and symbol fetching here
468
+ symbol = "Unknown"
469
+ decimals = 18
470
+ else:
471
+ symbol = token_info.get("symbol", "Unknown")
472
+ decimals = int(token_info.get("decimals", 18) or 18)
473
+
474
+ if symbol.lower() != "usdc":
475
+ continue
476
+
477
+ value_raw = int(transfer.get("value", "0") or "0")
478
+ amount = value_raw / (10**decimals)
479
+
480
+ transfer_data = {
481
+ "from_address": from_address,
482
+ "amount": amount,
483
+ "token_address": token_address,
484
+ "symbol": symbol,
485
+ "timestamp": timestamp,
486
+ "tx_hash": transfer.get("transactionHash", ""),
487
+ "type": "token"
488
+ }
489
+
490
+ elif transfer_type == "ETHER_TRANSFER":
491
+ # ETH transfer
492
+ try:
493
+ value_wei = int(transfer.get("value", "0") or "0")
494
+ amount_eth = value_wei / 10**18
495
+
496
+ if amount_eth <= 0:
497
+ continue
498
+ except (ValueError, TypeError):
499
+ logger.warning(f"Skipping transfer with invalid value: {transfer.get('value')}")
500
+ continue
501
+
502
+ transfer_data = {
503
+ "from_address": from_address,
504
+ "amount": amount_eth,
505
+ "token_address": "",
506
+ "symbol": "ETH",
507
+ "timestamp": timestamp,
508
+ "tx_hash": transfer.get("transactionHash", ""),
509
+ "type": "eth"
510
+ }
511
+
512
+ else:
513
+ # Skip other transfer types
514
+ continue
515
+
516
+ all_transfers_by_date[tx_date].append(transfer_data)
517
+ processed_count += 1
518
+
519
+ # Show progress
520
+ if processed_count % 100 == 0:
521
+ logger.info(f"Processed {processed_count} Optimism transfers...")
522
+
523
+ # Check for next page
524
+ cursor = response_json.get("next")
525
+ if not cursor:
526
+ break
527
+
528
+ logger.info(f"Completed Optimism transfers: {processed_count} found")
529
+ return all_transfers_by_date
530
+
531
+ except Exception as e:
532
+ logger.error(f"Error fetching Optimism transfers: {e}")
533
+ return {}
534
+
535
+ def calculate_initial_investment_value_from_funding_events(
536
+ transfers: Dict,
537
+ chain: str,
538
+ ) -> float:
539
+ total_investment = 0.0
540
+
541
+ if not transfers:
542
+ print(f"No transfers found for {chain} chain")
543
+ return 0.0
544
+
545
+ if chain == "optimism":
546
+ print("Using Optimism-specific transfer processing")
547
+ for date, date_transfers in transfers.items():
548
+ for transfer in date_transfers:
549
+ try:
550
+ amount = transfer.get("amount", 0)
551
+ token_symbol = transfer.get("symbol", "").upper()
552
+
553
+ if amount <= 0:
554
+ continue
555
+
556
+ # Get historical price for the transfer date
557
+ date_str = datetime.strptime(date, "%Y-%m-%d").strftime("%d-%m-%Y")
558
+
559
+ if token_symbol == "ETH": # nosec B105
560
+ price = fetch_historical_eth_price(date_str)
561
+ else:
562
+ coingecko_id = get_coin_id_from_symbol(token_symbol, chain)
563
+ if coingecko_id:
564
+ price = fetch_historical_token_price(
565
+ coingecko_id, date_str, token_symbol
566
+ )
567
+ else:
568
+ price = None
569
+
570
+ transfer_value = amount * price
571
+ total_investment += transfer_value
572
+
573
+ print(f"Processed transfer on {date}: {amount} {token_symbol} @ ${price} = ${transfer_value}")
574
+
575
+ except Exception as e:
576
+ print(f"Error processing transfer: {str(e)}")
577
+ continue
578
+ else:
579
+ print(f"Unsupported chain: {chain}, skipping")
580
+ return 0.0
581
+
582
+ print(f"Total initial investment from {chain} chain: ${total_investment}")
583
+ return total_investment if total_investment > 0 else 0.0
584
+
585
+ def calculate_initial_value_from_address_and_timestamp(
586
+ address: str,
587
+ final_timestamp: int,
588
+ ) -> Tuple[float, int]:
589
+ # First fetch the transfers
590
+ transfers = fetch_optimism_transfers(address, final_timestamp)
591
+ initial_timestamp = final_timestamp
592
+ for _transfers in transfers.values():
593
+ for _transfer in _transfers:
594
+ if "timestamp" not in _transfer:
595
+ continue
596
+
597
+ transfer_timestamp = datetime.fromisoformat(_transfer["timestamp"].replace('Z', '+00:00')).timestamp()
598
+ if transfer_timestamp < initial_timestamp:
599
+ initial_timestamp = int(transfer_timestamp)
600
+
601
+ # Then calculate initial investment
602
+ initial_investment = calculate_initial_investment_value_from_funding_events(
603
+ transfers=transfers,
604
+ chain="optimism",
605
+ )
606
+
607
+ return initial_investment, int(initial_timestamp)
608
+
609
+ def calculate_final_value_from_address_and_timestamp(
610
+ address: str,
611
+ timestamp: int,
612
+ ) -> float:
613
+ eth_balance = fetch_eth_balance(address, timestamp)
614
+ eth_price = fetch_historical_eth_price(
615
+ datetime.utcfromtimestamp(timestamp).strftime("%d-%m-%Y")
616
+ )
617
+
618
+ final_value = eth_balance * eth_price
619
+ for token_address, (symbol, decimals) in WHITELISTED_TOKENS.items():
620
+ token_balance = fetch_token_balance(
621
+ address=address,
622
+ token_address=token_address,
623
+ decimals=decimals,
624
+ timestamp=timestamp,
625
+ )
626
+ token_price = fetch_historical_token_price(
627
+ coin_id=COIN_ID_MAPPING.get(symbol.lower(), symbol.lower()),
628
+ date_str=datetime.utcfromtimestamp(timestamp).strftime("%d-%m-%Y"),
629
+ token_symbol=symbol
630
+ )
631
+ if token_balance is not None and token_price is not None:
632
+ token_value = token_balance * token_price
633
+ if token_value > 0:
634
+ final_value += token_value
635
+
636
+ return final_value
637
+
638
+ def _calculate_adjusted_apr(
639
+ apr: float,
640
+ initial_timestamp: int,
641
+ final_timestamp: int
642
+ ) -> float:
643
+ if apr is None or apr == 0:
644
+ return 0.0
645
+
646
+ intial_eth_price = fetch_historical_eth_price(datetime.utcfromtimestamp(initial_timestamp).strftime("%d-%m-%Y"))
647
+ final_eth_price = fetch_historical_eth_price(datetime.utcfromtimestamp(final_timestamp).strftime("%d-%m-%Y"))
648
+
649
+ if (
650
+ final_eth_price is not None
651
+ and intial_eth_price is not None
652
+ ):
653
+ adjustment_factor = Decimal("1") - (
654
+ Decimal(str(final_eth_price)) / Decimal(str(intial_eth_price))
655
+ )
656
+ adjusted_apr = round(
657
+ float(apr)
658
+ + float(adjustment_factor * Decimal("100")),
659
+ 2,
660
+ )
661
+ return adjusted_apr
662
+ else:
663
+ logger.warning(
664
+ f"Could not fetch ETH prices for timestamps {initial_timestamp} and {final_timestamp}. Returning original APR: {apr}"
665
+ )
666
+ return apr
667
+
668
+ def calculate_apr_and_roi(
669
+ initial_value: float,
670
+ final_value: float,
671
+ initial_timestamp: int,
672
+ final_timestamp: int
673
+ ) -> Tuple[float, float, float]:
674
+ if final_value <= 0:
675
+ logger.warning("Final value is non-positive, returning 0.0 for APR and ROI.")
676
+ return 0.0, 0.0, 0.0
677
+
678
+ # Calculate ROI (Return on Investment)
679
+ roi = ((final_value / initial_value) - 1) * 100
680
+
681
+ # Calculate hours since investment
682
+ hours = max(1, (final_timestamp - int(initial_timestamp)) / 3600)
683
+
684
+ # Calculate time ratio (hours in a year / hours since investment)
685
+ hours_in_year = 8760
686
+ time_ratio = hours_in_year / hours
687
+
688
+ # Calculate APR (Annualized ROI)
689
+ apr = float(roi * time_ratio)
690
+ if apr < 0:
691
+ apr = roi
692
+
693
+ adjust_apr = _calculate_adjusted_apr(
694
+ apr=apr,
695
+ initial_timestamp=initial_timestamp,
696
+ final_timestamp=final_timestamp
697
+ )
698
+
699
+ return float(round(apr, 2)), float(round(adjust_apr, 2)), float(round(roi, 2))
700
+
701
+
702
+ def fix_apr_and_roi(df: DataFrame) -> DataFrame:
703
+
704
+ # Remove rows with excluded addresses
705
+ df = df[~df['address'].isin(EXCLUDED_ADDRESSES)]
706
+
707
+ # Remove rows with timestamps before 2025-06-06
708
+ df = df[df['timestamp'] >= '2025-06-06 00:00:00.000000']
709
+
710
+ for idx, row in df.iterrows():
711
+ if row['is_dummy']:
712
+ continue
713
+
714
+ final_timestamp = int(row['timestamp'].timestamp())
715
+ calculation_metrics = row['calculation_metrics']
716
+ initial_value, initial_timestamp = calculate_initial_value_from_address_and_timestamp(row['address'], final_timestamp)
717
+ final_value = calculate_final_value_from_address_and_timestamp(row['address'], final_timestamp)
718
+ if row["volume"] > 0:
719
+ final_value += row["volume"]
720
+
721
+ if initial_value <= 0:
722
+ logger.warning(f"Initial value for address {row['address']} is non-positive, skipping row.")
723
+ df = df.drop(idx)
724
+ continue
725
+
726
+ calculation_metrics['initial_value'] = initial_value
727
+ calculation_metrics['final_value'] = final_value
728
+ df.at[idx, 'calculation_metrics'] = calculation_metrics
729
+
730
+ apr, adjusted_apr, roi = calculate_apr_and_roi(
731
+ initial_value=initial_value,
732
+ final_value=final_value,
733
+ initial_timestamp=initial_timestamp,
734
+ final_timestamp=final_timestamp
735
+ )
736
+ df.at[idx, 'apr'] = apr
737
+ df.at[idx, 'adjusted_apr'] = adjusted_apr
738
+ df.at[idx, 'roi'] = roi
739
+
740
+ return df
741
+
742
+
743
+ if __name__ == "__main__":
744
+ test_address = "0xa11417aeBF3932ee895008eDE8eA95616f488bCf"
745
+ test_final_timestamp = 1749567654
746
+
747
+ v = calculate_initial_value_from_address_and_timestamp(
748
+ test_address,
749
+ test_final_timestamp
750
+ )
751
+ print(v)