1
1
import warnings
2
2
3
-
4
3
warnings .filterwarnings ("ignore" )
5
4
6
5
import json
15
14
from pycoingecko import CoinGeckoAPI
16
15
from web3 import Web3
17
16
18
-
19
17
# Configure _logger
20
18
_logger = setup_logger (__name__ )
21
19
46
44
_aggregators_cache = None
47
45
_historical_data_cache = None
48
46
47
+ # Error list
48
+ errors = []
49
49
50
50
def get_coin_list ():
51
51
global _coin_list_cache
@@ -56,7 +56,7 @@ def get_coin_list():
56
56
response .raise_for_status ()
57
57
_coin_list_cache = response .json ()
58
58
except requests .RequestException as e :
59
- _logger . error (f"Failed to fetch coin list: { e } " )
59
+ errors . append (f"Failed to fetch coin list: { e } " )
60
60
_coin_list_cache = []
61
61
return _coin_list_cache
62
62
@@ -72,7 +72,7 @@ def fetch_token_id(symbol):
72
72
if coin ["symbol" ].lower () == symbol :
73
73
return coin ["id" ]
74
74
75
- _logger . error (f"Failed to fetch id for coin with symbol: { symbol } " )
75
+ errors . append (f"Failed to fetch id for coin with symbol: { symbol } " )
76
76
return None
77
77
78
78
@@ -86,7 +86,8 @@ def fetch_historical_data(limit: int = 720):
86
86
url = f"https://us-central1-stu-dashboard-a0ba2.cloudfunctions.net/getV2AggregatorHistoricalData?last_time={ one_month_ago_ms } &limit={ limit } "
87
87
response = requests .get (url )
88
88
if response .status_code != 200 :
89
- raise Exception ("Failed to fetch historical data from STURDY API." )
89
+ errors .append ("Failed to fetch historical data from STURDY API." )
90
+ return None
90
91
_historical_data_cache = response .json ()
91
92
return _historical_data_cache
92
93
@@ -166,12 +167,12 @@ def fetch_aggregators() -> List[Dict[str, Any]]:
166
167
response .raise_for_status ()
167
168
result = response .json ()
168
169
if "errors" in result :
169
- _logger . error (f"REST API Errors: { result ['errors' ]} " )
170
+ errors . append (f"REST API Errors: { result ['errors' ]} " )
170
171
_aggregators_cache = []
171
172
else :
172
173
_aggregators_cache = result
173
174
except requests .RequestException as e :
174
- _logger . error (f"REST API request failed: { e } " )
175
+ errors . append (f"REST API request failed: { e } " )
175
176
_aggregators_cache = []
176
177
return _aggregators_cache
177
178
@@ -200,7 +201,7 @@ def filter_aggregators(
200
201
filtered_aggregators .append (aggregator )
201
202
202
203
if not filtered_aggregators :
203
- _logger . error ("No suitable aggregator found." )
204
+ errors . append ("No suitable aggregator found." )
204
205
return []
205
206
206
207
# If very few aggregators, return them directly
@@ -226,7 +227,7 @@ def filter_aggregators(
226
227
scored_aggregators .append (aggregator )
227
228
228
229
if not scored_aggregators :
229
- _logger . error ("No suitable aggregator found after scoring." )
230
+ errors . append ("No suitable aggregator found after scoring." )
230
231
return []
231
232
232
233
score_threshold = np .percentile (
@@ -239,7 +240,7 @@ def filter_aggregators(
239
240
filtered_scored_aggregators .sort (key = lambda x : x ["score" ], reverse = True )
240
241
241
242
if not filtered_scored_aggregators :
242
- _logger . error ("No suitable aggregator found after score threshold." )
243
+ errors . append ("No suitable aggregator found after score threshold." )
243
244
return []
244
245
245
246
# Limit to top 10 scored pools if more than 10
@@ -256,12 +257,16 @@ def calculate_il_risk_score_for_lending(
256
257
time_period : int = 90 ,
257
258
) -> float :
258
259
if not asset_token_1 or not asset_token_2 :
259
- _logger . error (
260
+ errors . append (
260
261
"Tokens are required. Cannot calculate IL risk score without asset tokens"
261
262
)
262
- return float ( "nan" )
263
+ return None
263
264
264
- cg = CoinGeckoAPI (demo_api_key = coingecko_api_key )
265
+ is_pro = is_pro_api_key (coingecko_api_key )
266
+ if is_pro :
267
+ cg = CoinGeckoAPI (api_key = coingecko_api_key )
268
+ else :
269
+ cg = CoinGeckoAPI (demo_api_key = coingecko_api_key )
265
270
to_timestamp = int (datetime .now ().timestamp ())
266
271
from_timestamp = int ((datetime .now () - timedelta (days = time_period )).timestamp ())
267
272
@@ -279,15 +284,15 @@ def calculate_il_risk_score_for_lending(
279
284
to_timestamp = to_timestamp ,
280
285
)
281
286
except Exception as e :
282
- _logger . error (f"Error fetching price data: { e } " )
283
- return float ( "nan" )
284
-
287
+ errors . append (f"Error fetching price data: Incorrect Coingecko API Key " )
288
+ return None
289
+
285
290
prices_1_data = np .array ([x [1 ] for x in prices_1 ["prices" ]])
286
291
prices_2_data = np .array ([x [1 ] for x in prices_2 ["prices" ]])
287
292
288
293
min_length = min (len (prices_1_data ), len (prices_2_data ))
289
294
if min_length == 0 :
290
- return float ( "nan" )
295
+ return None
291
296
292
297
prices_1_data = prices_1_data [:min_length ]
293
298
prices_2_data = prices_2_data [:min_length ]
@@ -319,7 +324,7 @@ def get_token_id(symbol):
319
324
320
325
token_0_id = get_token_id (token0_symbol )
321
326
if not token_0_id :
322
- return float ( "nan" )
327
+ return None
323
328
324
329
for silo in silos :
325
330
token_1_symbol = silo ["collateral" ].lower ()
@@ -329,12 +334,15 @@ def get_token_id(symbol):
329
334
il_risk_score = calculate_il_risk_score_for_lending (
330
335
token_0_id , token_1_id , coingecko_api_key
331
336
)
337
+ if not il_risk_score :
338
+ return None
339
+
332
340
il_risk_scores .append (il_risk_score )
333
341
else :
334
- _logger . error (f"Failed to fetch token IDs for silo: { silo ['collateral' ]} " )
342
+ errors . append (f"Failed to fetch token IDs for silo: { silo ['collateral' ]} " )
335
343
336
344
if not il_risk_scores :
337
- return float ( "nan" )
345
+ return None
338
346
339
347
return sum (il_risk_scores ) / len (il_risk_scores )
340
348
@@ -355,6 +363,7 @@ def analyze_vault_liquidity(aggregator):
355
363
break
356
364
357
365
if not tvl or not total_assets :
366
+ errors .append ("Could not retrieve depth score and maximum position size." )
358
367
return float ("nan" ), float ("nan" )
359
368
360
369
depth_score = (
@@ -389,15 +398,19 @@ def get_best_opportunities(
389
398
) -> List [Dict [str , Any ]]:
390
399
data = fetch_aggregators ()
391
400
if not data :
392
- return []
401
+ errors .append ("Failed to fetch aggregators." )
402
+ return {"error" : errors }
393
403
394
404
filtered_aggregators = filter_aggregators (
395
405
chains , data , lending_asset , current_positions
396
406
)
397
407
if not filtered_aggregators :
398
- return []
408
+ errors .append ("No suitable aggregators found." )
409
+ return {"error" : errors }
399
410
400
411
historical_data = fetch_historical_data ()
412
+ if historical_data is None :
413
+ return {"error" : errors }
401
414
402
415
for aggregator in filtered_aggregators :
403
416
silos = aggregator .get ("whitelistedSilos" , [])
@@ -427,6 +440,9 @@ def calculate_metrics(
427
440
coingecko_api_key ,
428
441
)
429
442
historical_data = fetch_historical_data ()
443
+ if historical_data is None :
444
+ return {"error" : errors }
445
+
430
446
sharpe_ratio = get_sharpe_ratio_for_address (
431
447
historical_data , position ["pool_address" ]
432
448
)
@@ -439,10 +455,31 @@ def calculate_metrics(
439
455
}
440
456
441
457
458
+ def is_pro_api_key (coingecko_api_key : str ) -> bool :
459
+ """
460
+ Check if the provided CoinGecko API key is a pro key.
461
+ """
462
+ # Try using the key as a pro API key
463
+ cg_pro = CoinGeckoAPI (api_key = coingecko_api_key )
464
+ try :
465
+ response = cg_pro .get_coin_market_chart_range_by_id (
466
+ id = "bitcoin" ,
467
+ vs_currency = "usd" ,
468
+ from_timestamp = 0 ,
469
+ to_timestamp = 0
470
+ )
471
+ if response :
472
+ return True
473
+ except Exception :
474
+ return False
475
+
476
+ return False
477
+
442
478
def run (* _args , ** kwargs ) -> Any :
443
479
missing = check_missing_fields (kwargs )
444
480
if missing :
445
- return {"error" : f"Required kwargs { missing } were not provided." }
481
+ errors .append (f"Required kwargs { missing } were not provided." )
482
+ return {"error" : errors }
446
483
447
484
required_fields = list (REQUIRED_FIELDS )
448
485
get_metrics = kwargs .get ("get_metrics" , False )
@@ -452,9 +489,14 @@ def run(*_args, **kwargs) -> Any:
452
489
kwargs = remove_irrelevant_fields (kwargs , required_fields )
453
490
454
491
if get_metrics :
455
- return calculate_metrics (** kwargs )
492
+ metrics = calculate_metrics (** kwargs )
493
+ if metrics is None :
494
+ errors .append ("Failed to calculate metrics." )
495
+ return {"error" : errors } if errors else metrics
456
496
else :
457
497
result = get_best_opportunities (** kwargs )
498
+ if isinstance (result , dict ) and "error" in result :
499
+ errors .append (result ["error" ])
458
500
if not result :
459
- return { "error" : " No suitable aggregators found"}
460
- return result
501
+ errors . append ( " No suitable aggregators found")
502
+ return { "error" : errors } if errors else { " result" : result }
0 commit comments