File size: 2,684 Bytes
b4a0040
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import logging
from utils import get_question, current_answer, RPC, measure_execution_time
from markets import (
    etl as mkt_etl,
    DEFAULT_FILENAME as MARKETS_FILENAME,
)
from get_mech_info import (
    get_mech_events_since_last_run,
    update_json_files,
)
from pull_data import DATA_DIR, update_json_files, updating_timestamps
from tools import DEFAULT_FILENAME as TOOLS_FILENAME, generate_tools_file
from profitability import (
    run_profitability_analysis,
    analyse_all_traders,
    label_trades_by_staking,
)
from update_tools_accuracy import compute_tools_accuracy
import pandas as pd

logging.basicConfig(level=logging.INFO)


def prepare_live_metrics(
    tools_filename="new_tools.parquet", trades_filename="new_fpmmTrades.parquet"
):
    fpmmTrades = pd.read_parquet(DATA_DIR / trades_filename)
    tools = pd.read_parquet(DATA_DIR / tools_filename)
    print("Analysing trades...")
    all_trades_df = analyse_all_traders(fpmmTrades, tools, daily_info=True)

    # staking label
    label_trades_by_staking(all_trades_df)

    # save into a separate file
    all_trades_df.to_parquet(DATA_DIR / "daily_info.parquet", index=False)


@measure_execution_time
def daily_analysis():
    """Run daily analysis for the FPMMS project."""
    rpc = RPC
    # Run markets ETL
    logging.info("Running markets ETL")
    mkt_etl(MARKETS_FILENAME)
    logging.info("Markets ETL completed")

    # get only new data
    latest_timestamp = get_mech_events_since_last_run()
    if latest_timestamp == None:
        print("Error while getting the mech events")
        return
    logging.info(f"Finished generating the mech json files from {latest_timestamp}")

    # Run tools ETL
    logging.info("Generate and parse the tools content")
    # generate only new file
    generate_tools_file("new_tools_info.json", "new_tools.parquet")
    logging.info("Tools ETL completed")

    # add_current_answer("new_tools.parquet")

    # # Run profitability analysis
    logging.info("Computing trading metrics")
    run_profitability_analysis(
        rpc=rpc,
        tools_filename="new_tools.parquet",
        trades_filename="new_fpmmTrades.parquet",
        # from_timestamp=int(latest_timestamp.timestamp()),
        from_timestamp=latest_timestamp,
        merge=True,
    )
    logging.info("Profitability analysis completed")

    # merge new json files with old json files
    update_json_files()

    try:
        updating_timestamps(rpc, TOOLS_FILENAME)
    except Exception as e:
        logging.error("Error while updating timestamps of tools")
        print(e)

    compute_tools_accuracy()


if __name__ == "__main__":
    daily_analysis()
    prepare_live_metrics()