File size: 10,925 Bytes
cfffc31
 
 
278fab8
cfffc31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285f2a6
 
 
 
 
 
 
 
 
cfffc31
 
 
 
 
 
285f2a6
 
 
cfffc31
285f2a6
 
cfffc31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
285f2a6
 
cfffc31
 
 
 
 
 
 
 
 
285f2a6
cfffc31
 
 
 
 
b3c2f09
cfffc31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3b90a69
cfffc31
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac8ae1f
cfffc31
 
 
 
f7c2ff7
cfffc31
 
 
 
b3c2f09
 
 
 
ac8ae1f
b3c2f09
cfffc31
 
 
 
 
 
 
 
 
 
278fab8
cfffc31
 
285f2a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cfffc31
 
285f2a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
import json
import sys
from typing import Any, List
from utils import RPC, DATA_DIR, TMP_DIR
import requests
from tqdm import tqdm
from web3 import Web3
import pandas as pd
import pickle
import os
from concurrent.futures import ThreadPoolExecutor, as_completed

NUM_WORKERS = 10
DEPRECATED_STAKING_PROGRAMS = {
    "quickstart_alpha_everest": "0x5add592ce0a1B5DceCebB5Dcac086Cd9F9e3eA5C",
    "quickstart_alpha_alpine": "0x2Ef503950Be67a98746F484DA0bBAdA339DF3326",
    "quickstart_alpha_coastal": "0x43fB32f25dce34EB76c78C7A42C8F40F84BCD237",
}
STAKING_PROGRAMS_QS = {
    "quickstart_beta_hobbyist": "0x389B46c259631Acd6a69Bde8B6cEe218230bAE8C",
    "quickstart_beta_hobbyist_2": "0x238EB6993b90a978ec6AAD7530d6429c949C08DA",
    "quickstart_beta_expert": "0x5344B7DD311e5d3DdDd46A4f71481bD7b05AAA3e",
    "quickstart_beta_expert_2": "0xb964e44c126410df341ae04B13aB10A985fE3513",
    "quickstart_beta_expert_3": "0x80faD33Cadb5F53f9D29F02Db97D682E8b101618",
    "quickstart_beta_expert_4": "0xaD9d891134443B443D7F30013c7e14Fe27F2E029",
    "quickstart_beta_expert_5": "0xE56dF1E563De1B10715cB313D514af350D207212",
    "quickstart_beta_expert_6": "0x2546214aEE7eEa4bEE7689C81231017CA231Dc93",
    "quickstart_beta_expert_7": "0xD7A3C8b975f71030135f1a66e9e23164d54fF455",
    "quickstart_beta_expert_8": "0x356C108D49C5eebd21c84c04E9162de41933030c",
    "quickstart_beta_expert_9": "0x17dBAe44BC5618Cc254055b386A29576b4F87015",
    "quickstart_beta_expert_10": "0xB0ef657b8302bd2c74B6E6D9B2b4b39145b19c6f",
    "quickstart_beta_expert_11": "0x3112c1613eAC3dBAE3D4E38CeF023eb9E2C91CF7",
    "quickstart_beta_expert_12": "0xF4a75F476801B3fBB2e7093aCDcc3576593Cc1fc",
}

STAKING_PROGRAMS_PEARL = {
    "pearl_alpha": "0xEE9F19b5DF06c7E8Bfc7B28745dcf944C504198A",
    "pearl_beta": "0xeF44Fb0842DDeF59D37f85D61A1eF492bbA6135d",
    "pearl_beta_2": "0x1c2F82413666d2a3fD8bC337b0268e62dDF67434",
    "pearl_beta_3": "0xBd59Ff0522aA773cB6074ce83cD1e4a05A457bc1",
    "pearl_beta_4": "0x3052451e1eAee78e62E169AfdF6288F8791F2918",
    "pearl_beta_5": "0x4Abe376Fda28c2F43b84884E5f822eA775DeA9F4",
}


SERVICE_REGISTRY_ADDRESS = "0x9338b5153AE39BB89f50468E608eD9d764B755fD"


def _get_contract(address: str) -> Any:
    w3 = Web3(Web3.HTTPProvider(RPC))
    abi = _get_abi(address)
    contract = w3.eth.contract(address=Web3.to_checksum_address(address), abi=abi)
    return contract


def _get_abi(address: str) -> List:
    contract_abi_url = (
        "https://gnosis.blockscout.com/api/v2/smart-contracts/{contract_address}"
    )
    response = requests.get(contract_abi_url.format(contract_address=address)).json()

    if "result" in response:
        result = response["result"]
        try:
            abi = json.loads(result)
        except json.JSONDecodeError:
            print("Error: Failed to parse 'result' field as JSON")
            sys.exit(1)
    else:
        abi = response.get("abi")

    return abi if abi else []


def get_service_safe(service_id: int) -> str:
    """Gets the service Safe"""
    service_registry = _get_contract(SERVICE_REGISTRY_ADDRESS)
    service_safe_address = service_registry.functions.getService(service_id).call()[1]
    return service_safe_address


def list_contract_functions(contract):
    function_names = []
    for item in contract.abi:
        if item.get("type") == "function":
            function_names.append(item.get("name"))
    return function_names


def get_service_data(service_registry: Any, service_id: int) -> dict:
    tmp_map = {}
    # Get the list of addresses
    # print(f"getting addresses from service id ={service_id}")

    # available_functions = list_contract_functions(service_registry)
    # print("Available Contract Functions:")
    # for func in available_functions:
    #     print(f"- {func}")

    data = service_registry.functions.getService(service_id).call()
    try:
        owner_data = service_registry.functions.ownerOf(service_id).call()
    except Exception as e:
        tqdm.write(f"Error: no owner data infor from {service_id}")
        return None
    # print(f"owner data = {owner_data}")
    address = data[1]
    state = data[-1]
    # print(f"address = {address}")
    # print(f"state={state}")
    # PEARL trade

    if address != "0x0000000000000000000000000000000000000000":
        tmp_map[service_id] = {
            "safe_address": address,
            "state": state,
            "owner_address": owner_data,
        }
    return tmp_map


def update_service_map(start: int = 1, end: int = 2000):
    if os.path.exists(DATA_DIR / "service_map.pkl"):
        with open(DATA_DIR / "service_map.pkl", "rb") as f:
            service_map = pickle.load(f)
    else:
        service_map = {}
    print(f"updating service map from service id={start}")
    # we do not know which is the last service id right now
    service_registry = _get_contract(SERVICE_REGISTRY_ADDRESS)
    with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
        futures = []
        for service_id in range(start, end):
            futures.append(
                executor.submit(
                    get_service_data,
                    service_registry,
                    service_id,
                )
            )

        for future in tqdm(
            as_completed(futures),
            total=len(futures),
            desc=f"Fetching all service data from contracts",
        ):
            partial_dict = future.result()
            if partial_dict:
                service_map.update(partial_dict)

    with open(DATA_DIR / "service_map.pkl", "wb") as f:
        pickle.dump(service_map, f)


def check_owner_staking_contract(owner_address: str) -> str:
    staking = "non_staking"
    owner_address = owner_address.lower()
    # check quickstart staking contracts
    qs_list = [x.lower() for x in STAKING_PROGRAMS_QS.values()]
    if owner_address in qs_list:
        return "quickstart"

    # check pearl staking contracts
    pearl_list = [x.lower() for x in STAKING_PROGRAMS_PEARL.values()]
    if owner_address in pearl_list:
        return "pearl"

    # check legacy staking contracts
    deprec_list = [x.lower() for x in DEPRECATED_STAKING_PROGRAMS.values()]
    if owner_address in deprec_list:
        return "quickstart"

    return staking


def get_trader_address_staking(trader_address: str, service_map: dict) -> str:
    # check if there is any service id linked with that trader address

    found_key = -1
    for key, value in service_map.items():
        if value["safe_address"].lower() == trader_address.lower():
            # found a service
            found_key = key
            break

    if found_key == -1:
        return "non_Olas"
    owner = service_map[found_key]["owner_address"]
    return check_owner_staking_contract(owner_address=owner)


def label_trades_by_staking(trades_df: pd.DataFrame, start: int = None) -> None:
    with open(DATA_DIR / "service_map.pkl", "rb") as f:
        service_map = pickle.load(f)
    # get the last service id
    keys = service_map.keys()
    if start is None:
        last_key = max(keys)
    else:
        last_key = start
    print(f"last service key = {last_key}")
    update_service_map(start=last_key)
    all_traders = trades_df.trader_address.unique()
    trades_df["staking"] = ""
    for trader in tqdm(all_traders, desc="Labeling traders by staking", unit="trader"):
        # tqdm.write(f"checking trader {trader}")
        staking_label = get_trader_address_staking(trader, service_map)
        if staking_label:
            trades_df.loc[trades_df["trader_address"] == trader, "staking"] = (
                staking_label
            )
        # tqdm.write(f"statking label {staking_label}")
    return trades_df


def generate_retention_activity_file():
    tools = pd.read_parquet(TMP_DIR / "tools.parquet")
    tools["request_time"] = pd.to_datetime(tools["request_time"])
    tools["request_date"] = tools["request_time"].dt.date
    tools = tools.sort_values(by="request_time", ascending=True)
    reduced_tools_df = tools[
        ["trader_address", "request_time", "market_creator", "request_date"]
    ]
    print(f"length of reduced tools before labeling = {len(reduced_tools_df)}")
    reduced_tools_df = label_trades_by_staking(trades_df=reduced_tools_df)
    print(f"length of reduced tools after labeling = {len(reduced_tools_df)}")
    reduced_tools_df = reduced_tools_df.sort_values(by="request_time", ascending=True)
    reduced_tools_df["month_year_week"] = (
        pd.to_datetime(tools["request_time"])
        .dt.to_period("W")
        .dt.start_time.dt.strftime("%b-%d-%Y")
    )
    reduced_tools_df.to_parquet(TMP_DIR / "retention_activity.parquet")
    return True


def check_list_addresses(address_list: list):
    with open(DATA_DIR / "service_map.pkl", "rb") as f:
        service_map = pickle.load(f)
    # check if it is part of any service id on the map
    mapping = {}
    print(f"length of service map={len(service_map)}")
    keys = service_map.keys()
    last_key = max(keys)

    print(f"last service key = {last_key}")
    update_service_map(start=last_key)
    found_key = -1
    for trader_address in address_list:
        for key, value in service_map.items():
            if value["safe_address"].lower() == trader_address.lower():
                # found a service
                found_key = key
                mapping[trader_address] = "Olas"

        if found_key == -1:
            mapping[trader_address] = "non_Olas"
    print("mapping")
    print(mapping)


def check_service_map():
    with open(DATA_DIR / "service_map.pkl", "rb") as f:
        service_map = pickle.load(f)
    # check if it is part of any service id on the map
    mapping = {}
    print(f"length of service map={len(service_map)}")
    keys = service_map.keys()
    last_key = max(keys)
    print(f"last key ={last_key}")
    missing_keys = 0
    for i in range(1, last_key):
        if i not in keys:
            missing_keys += 1
            print(f"missing key = {i}")
    print(f"total missing keys = {missing_keys}")


if __name__ == "__main__":
    # create_service_map()
    # trades_df = pd.read_parquet(TMP_DIR / "all_trades_df.parquet")
    # trades_df = trades_df.loc[trades_df["is_invalid"] == False]

    # trades_df = label_trades_by_staking(trades_df=trades_df, start=8)
    # print(trades_df.staking.value_counts())
    # trades_df.to_parquet(TMP_DIR / "result_staking.parquet", index=False)
    # generate_retention_activity_file()
    a_list = [
        "0x027592700fafc4db3221bb662d7bdc7f546a2bb5",
        "0x0845f4ad01a2f41da618848c7a9e56b64377965e",
    ]
    # check_list_addresses(address_list=a_list)
    # update_service_map()
    # check_service_map()
    unknown_traders = pd.read_parquet(DATA_DIR / "unknown_traders.parquet")
    unknown_traders = label_trades_by_staking(trades_df=unknown_traders)
    unknown_traders.to_parquet(DATA_DIR / "unknown_traders.parquet", index=False)