rosacastillo
commited on
Commit
·
7652a7b
1
Parent(s):
23fcc73
Refactoring queries and new logic about live markets info
Browse files- live_data/analysis_of_markets_data.ipynb +0 -0
- live_data/markets_live_data.parquet +3 -0
- live_data/markets_live_data_old.parquet +3 -0
- scripts/live_markets_data.py +276 -0
- scripts/live_traders_data.py +153 -0
- scripts/live_utils.py +14 -0
- scripts/markets.py +13 -31
- scripts/profitability.py +4 -105
- scripts/queries.py +240 -0
- scripts/utils.py +23 -0
live_data/analysis_of_markets_data.ipynb
ADDED
The diff for this file is too large to render.
See raw diff
|
|
live_data/markets_live_data.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:797e30e861c7468a5cf840a23b560d66bb9a1103d5bdae2cd86a485a43592e6e
|
3 |
+
size 21197
|
live_data/markets_live_data_old.parquet
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:362255753caee81b4453f2d9c0fcf5870c770c3cf32d09c15ae6a571692a6874
|
3 |
+
size 19052
|
scripts/live_markets_data.py
ADDED
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# ------------------------------------------------------------------------------
|
3 |
+
#
|
4 |
+
# Copyright 2024 Valory AG
|
5 |
+
#
|
6 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
7 |
+
# you may not use this file except in compliance with the License.
|
8 |
+
# You may obtain a copy of the License at
|
9 |
+
#
|
10 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11 |
+
#
|
12 |
+
# Unless required by applicable law or agreed to in writing, software
|
13 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
14 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15 |
+
# See the License for the specific language governing permissions and
|
16 |
+
# limitations under the License.
|
17 |
+
#
|
18 |
+
# ------------------------------------------------------------------------------
|
19 |
+
|
20 |
+
import functools
|
21 |
+
import warnings
|
22 |
+
from typing import Optional, Generator, Callable
|
23 |
+
import os
|
24 |
+
import pandas as pd
|
25 |
+
from datetime import datetime, timedelta, UTC
|
26 |
+
import requests
|
27 |
+
from tqdm import tqdm
|
28 |
+
from typing import List, Dict
|
29 |
+
from live_traders_data import add_trading_info
|
30 |
+
from utils import SUBGRAPH_API_KEY, measure_execution_time
|
31 |
+
from live_utils import OMEN_SUBGRAPH_URL, CREATOR, BATCH_SIZE, DATA_DIR
|
32 |
+
from queries import (
|
33 |
+
FPMMS_WITH_TOKENS_QUERY,
|
34 |
+
ID_FIELD,
|
35 |
+
DATA_FIELD,
|
36 |
+
ANSWER_FIELD,
|
37 |
+
ANSWER_TIMESTAMP_FIELD,
|
38 |
+
QUERY_FIELD,
|
39 |
+
TITLE_FIELD,
|
40 |
+
OUTCOMES_FIELD,
|
41 |
+
OPENING_TIMESTAMP_FIELD,
|
42 |
+
CREATION_TIMESTAMP_FIELD,
|
43 |
+
LIQUIDITY_FIELD,
|
44 |
+
LIQUIDIY_MEASURE_FIELD,
|
45 |
+
TOKEN_AMOUNTS_FIELD,
|
46 |
+
ERROR_FIELD,
|
47 |
+
QUESTION_FIELD,
|
48 |
+
FPMMS_FIELD,
|
49 |
+
)
|
50 |
+
|
51 |
+
|
52 |
+
ResponseItemType = List[Dict[str, str]]
|
53 |
+
SubgraphResponseType = Dict[str, ResponseItemType]
|
54 |
+
|
55 |
+
|
56 |
+
class RetriesExceeded(Exception):
|
57 |
+
"""Exception to raise when retries are exceeded during data-fetching."""
|
58 |
+
|
59 |
+
def __init__(
|
60 |
+
self, msg="Maximum retries were exceeded while trying to fetch the data!"
|
61 |
+
):
|
62 |
+
super().__init__(msg)
|
63 |
+
|
64 |
+
|
65 |
+
def hacky_retry(func: Callable, n_retries: int = 3) -> Callable:
|
66 |
+
"""Create a hacky retry strategy.
|
67 |
+
Unfortunately, we cannot use `requests.packages.urllib3.util.retry.Retry`,
|
68 |
+
because the subgraph does not return the appropriate status codes in case of failure.
|
69 |
+
Instead, it always returns code 200. Thus, we raise exceptions manually inside `make_request`,
|
70 |
+
catch those exceptions in the hacky retry decorator and try again.
|
71 |
+
Finally, if the allowed number of retries is exceeded, we raise a custom `RetriesExceeded` exception.
|
72 |
+
|
73 |
+
:param func: the input request function.
|
74 |
+
:param n_retries: the maximum allowed number of retries.
|
75 |
+
:return: The request method with the hacky retry strategy applied.
|
76 |
+
"""
|
77 |
+
|
78 |
+
@functools.wraps(func)
|
79 |
+
def wrapper_hacky_retry(*args, **kwargs) -> SubgraphResponseType:
|
80 |
+
"""The wrapper for the hacky retry.
|
81 |
+
|
82 |
+
:return: a response dictionary.
|
83 |
+
"""
|
84 |
+
retried = 0
|
85 |
+
|
86 |
+
while retried <= n_retries:
|
87 |
+
try:
|
88 |
+
if retried > 0:
|
89 |
+
warnings.warn(f"Retrying {retried}/{n_retries}...")
|
90 |
+
|
91 |
+
return func(*args, **kwargs)
|
92 |
+
except (ValueError, ConnectionError) as e:
|
93 |
+
warnings.warn(e.args[0])
|
94 |
+
finally:
|
95 |
+
retried += 1
|
96 |
+
|
97 |
+
raise RetriesExceeded()
|
98 |
+
|
99 |
+
return wrapper_hacky_retry
|
100 |
+
|
101 |
+
|
102 |
+
@hacky_retry
|
103 |
+
def query_subgraph(url: str, query: str, key: str) -> SubgraphResponseType:
|
104 |
+
"""Query a subgraph.
|
105 |
+
|
106 |
+
Args:
|
107 |
+
url: the subgraph's URL.
|
108 |
+
query: the query to be used.
|
109 |
+
key: the key to use in order to access the required data.
|
110 |
+
|
111 |
+
Returns:
|
112 |
+
a response dictionary.
|
113 |
+
"""
|
114 |
+
content = {QUERY_FIELD: query}
|
115 |
+
headers = {
|
116 |
+
"Accept": "application/json",
|
117 |
+
"Content-Type": "application/json",
|
118 |
+
}
|
119 |
+
res = requests.post(url, json=content, headers=headers)
|
120 |
+
|
121 |
+
if res.status_code != 200:
|
122 |
+
raise ConnectionError(
|
123 |
+
"Something went wrong while trying to communicate with the subgraph "
|
124 |
+
f"(Error: {res.status_code})!\n{res.text}"
|
125 |
+
)
|
126 |
+
|
127 |
+
body = res.json()
|
128 |
+
if ERROR_FIELD in body.keys():
|
129 |
+
raise ValueError(f"The given query is not correct: {body[ERROR_FIELD]}")
|
130 |
+
|
131 |
+
data = body.get(DATA_FIELD, {}).get(key, None)
|
132 |
+
if data is None:
|
133 |
+
raise ValueError(f"Unknown error encountered!\nRaw response: \n{body}")
|
134 |
+
|
135 |
+
return data
|
136 |
+
|
137 |
+
|
138 |
+
def fpmms_fetcher(current_timestamp: int) -> Generator[ResponseItemType, int, None]:
|
139 |
+
"""An indefinite fetcher for the FPMMs."""
|
140 |
+
omen_subgraph = OMEN_SUBGRAPH_URL.substitute(subgraph_api_key=SUBGRAPH_API_KEY)
|
141 |
+
print(f"omen_subgraph = {omen_subgraph}")
|
142 |
+
while True:
|
143 |
+
fpmm_id = yield
|
144 |
+
fpmms_query = FPMMS_WITH_TOKENS_QUERY.substitute(
|
145 |
+
creator=CREATOR,
|
146 |
+
fpmm_id=fpmm_id,
|
147 |
+
current_timestamp=current_timestamp,
|
148 |
+
fpmms_field=FPMMS_FIELD,
|
149 |
+
first=BATCH_SIZE,
|
150 |
+
id_field=ID_FIELD,
|
151 |
+
answer_timestamp_field=ANSWER_TIMESTAMP_FIELD,
|
152 |
+
question_field=QUESTION_FIELD,
|
153 |
+
outcomes_field=OUTCOMES_FIELD,
|
154 |
+
title_field=TITLE_FIELD,
|
155 |
+
opening_timestamp_field=OPENING_TIMESTAMP_FIELD,
|
156 |
+
creation_timestamp_field=CREATION_TIMESTAMP_FIELD,
|
157 |
+
liquidity_field=LIQUIDITY_FIELD,
|
158 |
+
liquidity_measure_field=LIQUIDIY_MEASURE_FIELD,
|
159 |
+
token_amounts_field=TOKEN_AMOUNTS_FIELD,
|
160 |
+
)
|
161 |
+
print(f"Executing query {fpmms_query}")
|
162 |
+
yield query_subgraph(omen_subgraph, fpmms_query, FPMMS_FIELD)
|
163 |
+
|
164 |
+
|
165 |
+
def fetch_fpmms(current_timestamp: int) -> pd.DataFrame:
|
166 |
+
"""Fetch all the fpmms of the creator."""
|
167 |
+
print("Fetching all markets")
|
168 |
+
latest_id = ""
|
169 |
+
fpmms = []
|
170 |
+
fetcher = fpmms_fetcher(current_timestamp)
|
171 |
+
for _ in tqdm(fetcher, unit="fpmms", unit_scale=BATCH_SIZE):
|
172 |
+
batch = fetcher.send(latest_id)
|
173 |
+
if len(batch) == 0:
|
174 |
+
print("no data")
|
175 |
+
break
|
176 |
+
|
177 |
+
# TODO Add the incremental batching system from market creator
|
178 |
+
# prev_fpmms is the previous local file with the markets
|
179 |
+
# for fpmm in batch:
|
180 |
+
# if fpmm["id"] not in fpmms or "trades" not in prev_fpmms[fpmm["id"]]:
|
181 |
+
# prev_fpmms[fpmm["id"]] = fpmm
|
182 |
+
print(f"length of the data received = {len(batch)}")
|
183 |
+
latest_id = batch[-1].get(ID_FIELD, "")
|
184 |
+
if latest_id == "":
|
185 |
+
raise ValueError(f"Unexpected data format retrieved: {batch}")
|
186 |
+
|
187 |
+
fpmms.extend(batch)
|
188 |
+
|
189 |
+
print("Finished collecting data")
|
190 |
+
return pd.DataFrame(fpmms)
|
191 |
+
|
192 |
+
|
193 |
+
def get_answer(fpmm: pd.Series) -> str:
|
194 |
+
"""Get an answer from its index, using Series of an FPMM."""
|
195 |
+
return fpmm[QUESTION_FIELD][OUTCOMES_FIELD][fpmm[ANSWER_FIELD]]
|
196 |
+
|
197 |
+
|
198 |
+
def get_first_token_perc(row):
|
199 |
+
if row["total_tokens"] == 0.0:
|
200 |
+
return 0
|
201 |
+
return round((row["token_first_amount"] / row["total_tokens"]) * 100, 2)
|
202 |
+
|
203 |
+
|
204 |
+
def get_second_token_perc(row):
|
205 |
+
if row["total_tokens"] == 0.0:
|
206 |
+
return 0
|
207 |
+
return round((row["token_second_amount"] / row["total_tokens"]) * 100, 2)
|
208 |
+
|
209 |
+
|
210 |
+
def transform_fpmms(fpmms: pd.DataFrame, filename: str, current_timestamp: int) -> None:
|
211 |
+
"""Transform an FPMMS dataframe."""
|
212 |
+
|
213 |
+
# prepare the new ones
|
214 |
+
# Add current timestamp
|
215 |
+
fpmms["tokens_timestamp"] = current_timestamp
|
216 |
+
fpmms["open"] = True
|
217 |
+
|
218 |
+
# computation of token distributions
|
219 |
+
fpmms["token_first_amount"] = fpmms.outcomeTokenAmounts.apply(lambda x: int(x[0]))
|
220 |
+
fpmms["token_second_amount"] = fpmms.outcomeTokenAmounts.apply(lambda x: int(x[1]))
|
221 |
+
fpmms["total_tokens"] = fpmms.apply(
|
222 |
+
lambda x: x.token_first_amount + x.token_second_amount, axis=1
|
223 |
+
)
|
224 |
+
fpmms["first_token_perc"] = fpmms.apply(lambda x: get_first_token_perc(x), axis=1)
|
225 |
+
fpmms["second_token_perc"] = fpmms.apply(lambda x: get_second_token_perc(x), axis=1)
|
226 |
+
fpmms.drop(
|
227 |
+
columns=["token_first_amount", "token_second_amount", "total_tokens"],
|
228 |
+
inplace=True,
|
229 |
+
)
|
230 |
+
# previous file to update?
|
231 |
+
old_fpmms = None
|
232 |
+
if os.path.exists(DATA_DIR / filename):
|
233 |
+
old_fpmms = pd.read_parquet(DATA_DIR / filename)
|
234 |
+
|
235 |
+
if old_fpmms is not None:
|
236 |
+
# update which markets are not open anymore
|
237 |
+
open_markets = list(fpmms.id.unique())
|
238 |
+
print("Updating market status of old markets")
|
239 |
+
open_mask = old_fpmms["id"].isin(open_markets)
|
240 |
+
old_fpmms.loc[~open_mask, "status"] = False
|
241 |
+
|
242 |
+
# now concatenate
|
243 |
+
print("Appending new data to previous data")
|
244 |
+
fpmms = pd.concat([old_fpmms, fpmms], ignore_index=True)
|
245 |
+
# fpmms.drop_duplicates(inplace=True)
|
246 |
+
|
247 |
+
return
|
248 |
+
|
249 |
+
|
250 |
+
@measure_execution_time
|
251 |
+
def compute_distributions(filename: Optional[str]) -> pd.DataFrame:
|
252 |
+
"""Fetch, process, store and return the markets as a Dataframe."""
|
253 |
+
|
254 |
+
print("fetching new markets information")
|
255 |
+
current_timestamp = int(datetime.now(UTC).timestamp())
|
256 |
+
fpmms = fetch_fpmms(current_timestamp)
|
257 |
+
print(fpmms.head())
|
258 |
+
|
259 |
+
print("transforming and updating previous data")
|
260 |
+
|
261 |
+
transform_fpmms(fpmms, filename, current_timestamp)
|
262 |
+
print(fpmms.head())
|
263 |
+
|
264 |
+
# WIP
|
265 |
+
# print("Adding trading information")
|
266 |
+
add_trading_info(fpmms)
|
267 |
+
print("saving the data")
|
268 |
+
print(fpmms.info())
|
269 |
+
if filename:
|
270 |
+
fpmms.to_parquet(DATA_DIR / filename, index=False)
|
271 |
+
|
272 |
+
return fpmms
|
273 |
+
|
274 |
+
|
275 |
+
if __name__ == "__main__":
|
276 |
+
compute_distributions("markets_live_data.parquet")
|
scripts/live_traders_data.py
ADDED
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
import os
|
3 |
+
from datetime import datetime, timedelta, UTC
|
4 |
+
import pandas as pd
|
5 |
+
from collections import defaultdict
|
6 |
+
from typing import Any, Optional
|
7 |
+
from tqdm import tqdm
|
8 |
+
from live_utils import OMEN_SUBGRAPH_URL, CREATOR, BATCH_SIZE, DATA_DIR
|
9 |
+
from utils import SUBGRAPH_API_KEY, _to_content
|
10 |
+
from queries import omen_trader_votes_query
|
11 |
+
|
12 |
+
|
13 |
+
headers = {
|
14 |
+
"Accept": "application/json, multipart/mixed",
|
15 |
+
"Content-Type": "application/json",
|
16 |
+
}
|
17 |
+
|
18 |
+
|
19 |
+
def _query_omen_xdai_subgraph(
|
20 |
+
fpmm_id: str,
|
21 |
+
) -> dict[str, Any]:
|
22 |
+
"""Query the subgraph."""
|
23 |
+
omen_subgraph = OMEN_SUBGRAPH_URL.substitute(subgraph_api_key=SUBGRAPH_API_KEY)
|
24 |
+
print(f"omen_subgraph = {omen_subgraph}")
|
25 |
+
grouped_results = defaultdict(list)
|
26 |
+
id_gt = ""
|
27 |
+
|
28 |
+
while True:
|
29 |
+
query = omen_trader_votes_query.substitute(
|
30 |
+
fpmm_creator=CREATOR.lower(),
|
31 |
+
first=BATCH_SIZE,
|
32 |
+
id_gt=id_gt,
|
33 |
+
fpmm_id=fpmm_id,
|
34 |
+
)
|
35 |
+
print(f"query for the omen to collect trades {query}")
|
36 |
+
content_json = _to_content(query)
|
37 |
+
|
38 |
+
res = requests.post(omen_subgraph, headers=headers, json=content_json)
|
39 |
+
result_json = res.json()
|
40 |
+
# print(f"result = {result_json}")
|
41 |
+
user_trades = result_json.get("data", {}).get("fpmmTrades", [])
|
42 |
+
|
43 |
+
if not user_trades:
|
44 |
+
break
|
45 |
+
|
46 |
+
for trade in user_trades:
|
47 |
+
fpmm_id = trade.get("fpmm", {}).get("id")
|
48 |
+
grouped_results[fpmm_id].append(trade)
|
49 |
+
|
50 |
+
id_gt = user_trades[len(user_trades) - 1]["id"]
|
51 |
+
|
52 |
+
all_results = {
|
53 |
+
"data": {
|
54 |
+
"fpmmTrades": [
|
55 |
+
trade
|
56 |
+
for trades_list in grouped_results.values()
|
57 |
+
for trade in trades_list
|
58 |
+
]
|
59 |
+
}
|
60 |
+
}
|
61 |
+
|
62 |
+
return all_results
|
63 |
+
|
64 |
+
|
65 |
+
def transform_trades(trades_json: dict) -> pd.DataFrame:
|
66 |
+
# convert to dataframe
|
67 |
+
print("transforming trades")
|
68 |
+
df = pd.DataFrame(trades_json["data"]["fpmmTrades"])
|
69 |
+
if len(df) == 0:
|
70 |
+
print("No trades for this market")
|
71 |
+
return df
|
72 |
+
|
73 |
+
# print(df.info())
|
74 |
+
|
75 |
+
# convert creator to address
|
76 |
+
df["trade_creator"] = df["creator"].apply(lambda x: x["id"])
|
77 |
+
|
78 |
+
# normalize fpmm column
|
79 |
+
fpmm = pd.json_normalize(df["fpmm"])
|
80 |
+
fpmm.columns = [f"fpmm.{col}" for col in fpmm.columns]
|
81 |
+
df = pd.concat([df, fpmm], axis=1)
|
82 |
+
|
83 |
+
# drop fpmm column
|
84 |
+
df.drop(["fpmm"], axis=1, inplace=True)
|
85 |
+
|
86 |
+
# convert into int
|
87 |
+
df.outcomeIndex = pd.to_numeric(df.outcomeIndex, errors="coerce")
|
88 |
+
return df
|
89 |
+
|
90 |
+
|
91 |
+
def compute_from_timestamp_value(
|
92 |
+
fpmm_id: str, opening_timestamp: int, fpmms: pd.DataFrame
|
93 |
+
) -> Optional[int]:
|
94 |
+
"""Function to find the latest timestamp registered for a specific market"""
|
95 |
+
try:
|
96 |
+
market_data = fpmms.loc[fpmms["id"] == fpmm_id]
|
97 |
+
# how many previous samples do we have?
|
98 |
+
if len(market_data) == 1:
|
99 |
+
# take the opening Timestamp of the Market
|
100 |
+
return opening_timestamp
|
101 |
+
timestamps = (market_data.tokens_timestamp.values).sort()
|
102 |
+
# the last value is the current timestamp so we need to take the previous one
|
103 |
+
return timestamps[-2]
|
104 |
+
except Exception as e:
|
105 |
+
print(
|
106 |
+
f"Error when trying to get the from timestamp value of the market id {fpmm_id}"
|
107 |
+
)
|
108 |
+
return None
|
109 |
+
|
110 |
+
|
111 |
+
def compute_votes_distribution(market_trades: pd.DataFrame):
|
112 |
+
"""Function to compute the distribution of votes for the trades of a market"""
|
113 |
+
total_trades = len(market_trades)
|
114 |
+
print(f"The total number of trades is {total_trades}")
|
115 |
+
# outcomeIndex is always 1 or 0?
|
116 |
+
sum_outcome_index_1 = sum(market_trades.outcomeIndex)
|
117 |
+
print(f"The total number of votes for index 1 is {sum_outcome_index_1}")
|
118 |
+
percentage_index_1 = round((sum_outcome_index_1 / total_trades) * 100, 2)
|
119 |
+
return (100 - percentage_index_1), percentage_index_1
|
120 |
+
|
121 |
+
|
122 |
+
def add_trading_info(fpmms: pd.DataFrame) -> None:
|
123 |
+
# Iterate over the markets
|
124 |
+
print("Adding votes distribution per market")
|
125 |
+
fpmms["votes_first_outcome_perc"] = 0.0
|
126 |
+
fpmms["votes_second_outcome_perc"] = 0.0
|
127 |
+
for i, fpmm in tqdm(fpmms.iterrows(), total=len(fpmms), desc="Analysing trades"):
|
128 |
+
# read trades from latest read timestamp
|
129 |
+
market_id = fpmm["id"]
|
130 |
+
print(f"Adding information for the market {market_id}")
|
131 |
+
market_trades_json = _query_omen_xdai_subgraph(
|
132 |
+
fpmm_id=market_id,
|
133 |
+
)
|
134 |
+
market_trades = transform_trades(market_trades_json)
|
135 |
+
if len(market_trades) == 0:
|
136 |
+
continue
|
137 |
+
# to compute the votes distribution
|
138 |
+
print("Computing the votes distribution")
|
139 |
+
first_outcome, second_outcome = compute_votes_distribution(market_trades)
|
140 |
+
print(
|
141 |
+
f"first outcome votes ={first_outcome}, second outcome votes = {second_outcome}"
|
142 |
+
)
|
143 |
+
fpmms.loc[fpmms["id"] == market_id, "votes_first_outcome_perc"] = first_outcome
|
144 |
+
fpmms.loc[fpmms["id"] == market_id, "votes_second_outcome_perc"] = (
|
145 |
+
second_outcome
|
146 |
+
)
|
147 |
+
print("Dataset after adding trading info")
|
148 |
+
print(fpmms.head())
|
149 |
+
return
|
150 |
+
|
151 |
+
|
152 |
+
if __name__ == "__main__":
|
153 |
+
print("collecting votes distribution")
|
scripts/live_utils.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from string import Template
|
2 |
+
from pathlib import Path
|
3 |
+
|
4 |
+
|
5 |
+
CREATOR = "0x89c5cc945dd550BcFfb72Fe42BfF002429F46Fec"
|
6 |
+
BATCH_SIZE = 1000
|
7 |
+
# OMEN_SUBGRAPH = "https://api.thegraph.com/subgraphs/name/protofire/omen-xdai"
|
8 |
+
OMEN_SUBGRAPH_URL = Template(
|
9 |
+
"""https://gateway-arbitrum.network.thegraph.com/api/${subgraph_api_key}/subgraphs/id/9fUVQpFwzpdWS9bq5WkAnmKbNNcoBwatMR4yZq81pbbz"""
|
10 |
+
)
|
11 |
+
SCRIPTS_DIR = Path(__file__).parent
|
12 |
+
ROOT_DIR = SCRIPTS_DIR.parent
|
13 |
+
DATA_DIR = ROOT_DIR / "live_data"
|
14 |
+
MAX_UINT_HEX = "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
|
scripts/markets.py
CHANGED
@@ -29,6 +29,18 @@ from tqdm import tqdm
|
|
29 |
from typing import List, Dict
|
30 |
from pathlib import Path
|
31 |
from utils import SUBGRAPH_API_KEY
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
ResponseItemType = List[Dict[str, str]]
|
34 |
SubgraphResponseType = Dict[str, ResponseItemType]
|
@@ -40,42 +52,12 @@ BATCH_SIZE = 1000
|
|
40 |
OMEN_SUBGRAPH_URL = Template(
|
41 |
"""https://gateway-arbitrum.network.thegraph.com/api/${subgraph_api_key}/subgraphs/id/9fUVQpFwzpdWS9bq5WkAnmKbNNcoBwatMR4yZq81pbbz"""
|
42 |
)
|
43 |
-
|
44 |
-
QUERY_FIELD = "query"
|
45 |
-
ERROR_FIELD = "errors"
|
46 |
-
DATA_FIELD = "data"
|
47 |
-
ID_FIELD = "id"
|
48 |
-
ANSWER_FIELD = "currentAnswer"
|
49 |
-
QUESTION_FIELD = "question"
|
50 |
-
OUTCOMES_FIELD = "outcomes"
|
51 |
-
TITLE_FIELD = "title"
|
52 |
MAX_UINT_HEX = "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
|
53 |
DEFAULT_FILENAME = "fpmms.parquet"
|
54 |
SCRIPTS_DIR = Path(__file__).parent
|
55 |
ROOT_DIR = SCRIPTS_DIR.parent
|
56 |
DATA_DIR = ROOT_DIR / "data"
|
57 |
-
FPMMS_QUERY = Template(
|
58 |
-
"""
|
59 |
-
{
|
60 |
-
${fpmms_field}(
|
61 |
-
where: {
|
62 |
-
creator: "${creator}",
|
63 |
-
id_gt: "${fpmm_id}",
|
64 |
-
isPendingArbitration: false
|
65 |
-
},
|
66 |
-
orderBy: ${id_field}
|
67 |
-
first: ${first}
|
68 |
-
){
|
69 |
-
${id_field}
|
70 |
-
${answer_field}
|
71 |
-
${question_field} {
|
72 |
-
${outcomes_field}
|
73 |
-
}
|
74 |
-
${title_field}
|
75 |
-
}
|
76 |
-
}
|
77 |
-
"""
|
78 |
-
)
|
79 |
|
80 |
|
81 |
class RetriesExceeded(Exception):
|
|
|
29 |
from typing import List, Dict
|
30 |
from pathlib import Path
|
31 |
from utils import SUBGRAPH_API_KEY
|
32 |
+
from queries import (
|
33 |
+
FPMMS_QUERY,
|
34 |
+
ID_FIELD,
|
35 |
+
DATA_FIELD,
|
36 |
+
ANSWER_FIELD,
|
37 |
+
QUERY_FIELD,
|
38 |
+
TITLE_FIELD,
|
39 |
+
OUTCOMES_FIELD,
|
40 |
+
ERROR_FIELD,
|
41 |
+
QUESTION_FIELD,
|
42 |
+
FPMMS_FIELD,
|
43 |
+
)
|
44 |
|
45 |
ResponseItemType = List[Dict[str, str]]
|
46 |
SubgraphResponseType = Dict[str, ResponseItemType]
|
|
|
52 |
OMEN_SUBGRAPH_URL = Template(
|
53 |
"""https://gateway-arbitrum.network.thegraph.com/api/${subgraph_api_key}/subgraphs/id/9fUVQpFwzpdWS9bq5WkAnmKbNNcoBwatMR4yZq81pbbz"""
|
54 |
)
|
55 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
MAX_UINT_HEX = "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
|
57 |
DEFAULT_FILENAME = "fpmms.parquet"
|
58 |
SCRIPTS_DIR = Path(__file__).parent
|
59 |
ROOT_DIR = SCRIPTS_DIR.parent
|
60 |
DATA_DIR = ROOT_DIR / "data"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
|
63 |
class RetriesExceeded(Exception):
|
scripts/profitability.py
CHANGED
@@ -30,20 +30,9 @@ import numpy as np
|
|
30 |
import os
|
31 |
from pathlib import Path
|
32 |
from get_mech_info import DATETIME_60_DAYS_AGO
|
33 |
-
from utils import SUBGRAPH_API_KEY, wei_to_unit, convert_hex_to_int
|
34 |
-
|
35 |
-
|
36 |
-
"openai-text-davinci-002",
|
37 |
-
"openai-text-davinci-003",
|
38 |
-
"openai-gpt-3.5-turbo",
|
39 |
-
"openai-gpt-4",
|
40 |
-
"stabilityai-stable-diffusion-v1-5",
|
41 |
-
"stabilityai-stable-diffusion-xl-beta-v2-2-2",
|
42 |
-
"stabilityai-stable-diffusion-512-v2-1",
|
43 |
-
"stabilityai-stable-diffusion-768-v2-1",
|
44 |
-
"deepmind-optimization-strong",
|
45 |
-
"deepmind-optimization",
|
46 |
-
]
|
47 |
QUERY_BATCH_SIZE = 1000
|
48 |
DUST_THRESHOLD = 10000000000000
|
49 |
INVALID_ANSWER = -1
|
@@ -52,7 +41,7 @@ DEFAULT_FROM_DATE = "1970-01-01T00:00:00"
|
|
52 |
DEFAULT_TO_DATE = "2038-01-19T03:14:07"
|
53 |
DEFAULT_FROM_TIMESTAMP = 0
|
54 |
DEFAULT_60_DAYS_AGO_TIMESTAMP = (DATETIME_60_DAYS_AGO).timestamp()
|
55 |
-
DEFAULT_TO_TIMESTAMP = 2147483647
|
56 |
WXDAI_CONTRACT_ADDRESS = "0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d"
|
57 |
DEFAULT_MECH_FEE = 0.01
|
58 |
DUST_THRESHOLD = 10000000000000
|
@@ -153,96 +142,6 @@ headers = {
|
|
153 |
}
|
154 |
|
155 |
|
156 |
-
omen_xdai_trades_query = Template(
|
157 |
-
"""
|
158 |
-
{
|
159 |
-
fpmmTrades(
|
160 |
-
where: {
|
161 |
-
type: Buy,
|
162 |
-
fpmm_: {
|
163 |
-
creator: "${fpmm_creator}"
|
164 |
-
creationTimestamp_gte: "${fpmm_creationTimestamp_gte}",
|
165 |
-
creationTimestamp_lt: "${fpmm_creationTimestamp_lte}"
|
166 |
-
},
|
167 |
-
creationTimestamp_gte: "${creationTimestamp_gte}",
|
168 |
-
creationTimestamp_lte: "${creationTimestamp_lte}"
|
169 |
-
id_gt: "${id_gt}"
|
170 |
-
}
|
171 |
-
first: ${first}
|
172 |
-
orderBy: id
|
173 |
-
orderDirection: asc
|
174 |
-
) {
|
175 |
-
id
|
176 |
-
title
|
177 |
-
collateralToken
|
178 |
-
outcomeTokenMarginalPrice
|
179 |
-
oldOutcomeTokenMarginalPrice
|
180 |
-
type
|
181 |
-
creator {
|
182 |
-
id
|
183 |
-
}
|
184 |
-
creationTimestamp
|
185 |
-
collateralAmount
|
186 |
-
collateralAmountUSD
|
187 |
-
feeAmount
|
188 |
-
outcomeIndex
|
189 |
-
outcomeTokensTraded
|
190 |
-
transactionHash
|
191 |
-
fpmm {
|
192 |
-
id
|
193 |
-
outcomes
|
194 |
-
title
|
195 |
-
answerFinalizedTimestamp
|
196 |
-
currentAnswer
|
197 |
-
isPendingArbitration
|
198 |
-
arbitrationOccurred
|
199 |
-
openingTimestamp
|
200 |
-
condition {
|
201 |
-
id
|
202 |
-
}
|
203 |
-
}
|
204 |
-
}
|
205 |
-
}
|
206 |
-
"""
|
207 |
-
)
|
208 |
-
|
209 |
-
|
210 |
-
conditional_tokens_gc_user_query = Template(
|
211 |
-
"""
|
212 |
-
{
|
213 |
-
user(id: "${id}") {
|
214 |
-
userPositions(
|
215 |
-
first: ${first}
|
216 |
-
where: {
|
217 |
-
id_gt: "${userPositions_id_gt}"
|
218 |
-
}
|
219 |
-
orderBy: id
|
220 |
-
) {
|
221 |
-
balance
|
222 |
-
id
|
223 |
-
position {
|
224 |
-
id
|
225 |
-
conditionIds
|
226 |
-
}
|
227 |
-
totalBalance
|
228 |
-
wrappedBalance
|
229 |
-
}
|
230 |
-
}
|
231 |
-
}
|
232 |
-
"""
|
233 |
-
)
|
234 |
-
|
235 |
-
|
236 |
-
def _to_content(q: str) -> dict[str, Any]:
|
237 |
-
"""Convert the given query string to payload content, i.e., add it under a `queries` key and convert it to bytes."""
|
238 |
-
finalized_query = {
|
239 |
-
"query": q,
|
240 |
-
"variables": None,
|
241 |
-
"extensions": {"headers": None},
|
242 |
-
}
|
243 |
-
return finalized_query
|
244 |
-
|
245 |
-
|
246 |
def _query_omen_xdai_subgraph(
|
247 |
from_timestamp: float,
|
248 |
to_timestamp: float,
|
|
|
30 |
import os
|
31 |
from pathlib import Path
|
32 |
from get_mech_info import DATETIME_60_DAYS_AGO
|
33 |
+
from utils import SUBGRAPH_API_KEY, wei_to_unit, convert_hex_to_int, _to_content
|
34 |
+
from queries import omen_xdai_trades_query, conditional_tokens_gc_user_query
|
35 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
QUERY_BATCH_SIZE = 1000
|
37 |
DUST_THRESHOLD = 10000000000000
|
38 |
INVALID_ANSWER = -1
|
|
|
41 |
DEFAULT_TO_DATE = "2038-01-19T03:14:07"
|
42 |
DEFAULT_FROM_TIMESTAMP = 0
|
43 |
DEFAULT_60_DAYS_AGO_TIMESTAMP = (DATETIME_60_DAYS_AGO).timestamp()
|
44 |
+
DEFAULT_TO_TIMESTAMP = 2147483647 # around year 2038
|
45 |
WXDAI_CONTRACT_ADDRESS = "0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d"
|
46 |
DEFAULT_MECH_FEE = 0.01
|
47 |
DUST_THRESHOLD = 10000000000000
|
|
|
142 |
}
|
143 |
|
144 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
145 |
def _query_omen_xdai_subgraph(
|
146 |
from_timestamp: float,
|
147 |
to_timestamp: float,
|
scripts/queries.py
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
# ------------------------------------------------------------------------------
|
3 |
+
#
|
4 |
+
# Copyright 2024 Valory AG
|
5 |
+
#
|
6 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
7 |
+
# you may not use this file except in compliance with the License.
|
8 |
+
# You may obtain a copy of the License at
|
9 |
+
#
|
10 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11 |
+
#
|
12 |
+
# Unless required by applicable law or agreed to in writing, software
|
13 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
14 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15 |
+
# See the License for the specific language governing permissions and
|
16 |
+
# limitations under the License.
|
17 |
+
#
|
18 |
+
# ------------------------------------------------------------------------------
|
19 |
+
|
20 |
+
from string import Template
|
21 |
+
|
22 |
+
FPMMS_FIELD = "fixedProductMarketMakers"
|
23 |
+
QUERY_FIELD = "query"
|
24 |
+
ERROR_FIELD = "errors"
|
25 |
+
DATA_FIELD = "data"
|
26 |
+
ID_FIELD = "id"
|
27 |
+
ANSWER_FIELD = "currentAnswer"
|
28 |
+
QUESTION_FIELD = "question"
|
29 |
+
OUTCOMES_FIELD = "outcomes"
|
30 |
+
TITLE_FIELD = "title"
|
31 |
+
ANSWER_TIMESTAMP_FIELD = "currentAnswerTimestamp"
|
32 |
+
OPENING_TIMESTAMP_FIELD = "openingTimestamp"
|
33 |
+
RESOLUTION_TIMESTAMP_FIELD = "resolutionTimestamp"
|
34 |
+
CREATION_TIMESTAMP_FIELD = "creationTimestamp"
|
35 |
+
LIQUIDITY_FIELD = "liquidityParameter"
|
36 |
+
LIQUIDIY_MEASURE_FIELD = "liquidityMeasure"
|
37 |
+
TOKEN_AMOUNTS_FIELD = "outcomeTokenAmounts"
|
38 |
+
|
39 |
+
FPMMS_QUERY = Template(
|
40 |
+
"""
|
41 |
+
{
|
42 |
+
${fpmms_field}(
|
43 |
+
where: {
|
44 |
+
creator: "${creator}",
|
45 |
+
id_gt: "${fpmm_id}",
|
46 |
+
isPendingArbitration: false
|
47 |
+
},
|
48 |
+
orderBy: ${id_field}
|
49 |
+
first: ${first}
|
50 |
+
){
|
51 |
+
${id_field}
|
52 |
+
${answer_field}
|
53 |
+
${question_field} {
|
54 |
+
${outcomes_field}
|
55 |
+
}
|
56 |
+
${title_field}
|
57 |
+
}
|
58 |
+
}
|
59 |
+
"""
|
60 |
+
)
|
61 |
+
|
62 |
+
FPMMS_WITH_TOKENS_QUERY = Template(
|
63 |
+
"""
|
64 |
+
{
|
65 |
+
${fpmms_field}(
|
66 |
+
where: {
|
67 |
+
creator: "${creator}",
|
68 |
+
id_gt: "${fpmm_id}",
|
69 |
+
isPendingArbitration: false
|
70 |
+
currentAnswer: null
|
71 |
+
openingTimestamp_gt:${current_timestamp}
|
72 |
+
},
|
73 |
+
orderBy: ${id_field}
|
74 |
+
orderDirection: asc
|
75 |
+
first: ${first}
|
76 |
+
){
|
77 |
+
${id_field}
|
78 |
+
${question_field} {
|
79 |
+
${outcomes_field}
|
80 |
+
${answer_timestamp_field}
|
81 |
+
answers{
|
82 |
+
answer
|
83 |
+
}
|
84 |
+
}
|
85 |
+
${title_field}
|
86 |
+
${opening_timestamp_field}
|
87 |
+
${creation_timestamp_field}
|
88 |
+
${liquidity_field}
|
89 |
+
${liquidity_measure_field}
|
90 |
+
${token_amounts_field}
|
91 |
+
}
|
92 |
+
}
|
93 |
+
"""
|
94 |
+
)
|
95 |
+
omen_xdai_trades_query = Template(
|
96 |
+
"""
|
97 |
+
{
|
98 |
+
fpmmTrades(
|
99 |
+
where: {
|
100 |
+
type: Buy,
|
101 |
+
fpmm_: {
|
102 |
+
creator: "${fpmm_creator}"
|
103 |
+
creationTimestamp_gte: "${fpmm_creationTimestamp_gte}",
|
104 |
+
creationTimestamp_lt: "${fpmm_creationTimestamp_lte}"
|
105 |
+
},
|
106 |
+
creationTimestamp_gte: "${creationTimestamp_gte}",
|
107 |
+
creationTimestamp_lte: "${creationTimestamp_lte}"
|
108 |
+
id_gt: "${id_gt}"
|
109 |
+
}
|
110 |
+
first: ${first}
|
111 |
+
orderBy: id
|
112 |
+
orderDirection: asc
|
113 |
+
) {
|
114 |
+
id
|
115 |
+
title
|
116 |
+
collateralToken
|
117 |
+
outcomeTokenMarginalPrice
|
118 |
+
oldOutcomeTokenMarginalPrice
|
119 |
+
type
|
120 |
+
creator {
|
121 |
+
id
|
122 |
+
}
|
123 |
+
creationTimestamp
|
124 |
+
collateralAmount
|
125 |
+
collateralAmountUSD
|
126 |
+
feeAmount
|
127 |
+
outcomeIndex
|
128 |
+
outcomeTokensTraded
|
129 |
+
transactionHash
|
130 |
+
fpmm {
|
131 |
+
id
|
132 |
+
outcomes
|
133 |
+
title
|
134 |
+
answerFinalizedTimestamp
|
135 |
+
currentAnswer
|
136 |
+
isPendingArbitration
|
137 |
+
arbitrationOccurred
|
138 |
+
openingTimestamp
|
139 |
+
condition {
|
140 |
+
id
|
141 |
+
}
|
142 |
+
}
|
143 |
+
}
|
144 |
+
}
|
145 |
+
"""
|
146 |
+
)
|
147 |
+
|
148 |
+
omen_trader_votes_query = Template(
|
149 |
+
"""
|
150 |
+
{
|
151 |
+
fpmmTrades(
|
152 |
+
where: {
|
153 |
+
type: Buy,
|
154 |
+
fpmm_: {
|
155 |
+
creator: "${fpmm_creator}",
|
156 |
+
id: "${fpmm_id}",
|
157 |
+
},
|
158 |
+
id_gt: "${id_gt}"
|
159 |
+
}
|
160 |
+
first: ${first}
|
161 |
+
orderBy: id
|
162 |
+
orderDirection: asc
|
163 |
+
) {
|
164 |
+
id
|
165 |
+
title
|
166 |
+
collateralToken
|
167 |
+
outcomeTokenMarginalPrice
|
168 |
+
oldOutcomeTokenMarginalPrice
|
169 |
+
type
|
170 |
+
creator {
|
171 |
+
id
|
172 |
+
}
|
173 |
+
creationTimestamp
|
174 |
+
collateralAmount
|
175 |
+
collateralAmountUSD
|
176 |
+
feeAmount
|
177 |
+
outcomeIndex
|
178 |
+
outcomeTokensTraded
|
179 |
+
transactionHash
|
180 |
+
fpmm {
|
181 |
+
id
|
182 |
+
outcomes
|
183 |
+
title
|
184 |
+
condition {
|
185 |
+
id
|
186 |
+
}
|
187 |
+
}
|
188 |
+
}
|
189 |
+
}
|
190 |
+
"""
|
191 |
+
)
|
192 |
+
|
193 |
+
|
194 |
+
conditional_tokens_gc_user_query = Template(
|
195 |
+
"""
|
196 |
+
{
|
197 |
+
user(id: "${id}") {
|
198 |
+
userPositions(
|
199 |
+
first: ${first}
|
200 |
+
where: {
|
201 |
+
id_gt: "${userPositions_id_gt}"
|
202 |
+
}
|
203 |
+
orderBy: id
|
204 |
+
) {
|
205 |
+
balance
|
206 |
+
id
|
207 |
+
position {
|
208 |
+
id
|
209 |
+
conditionIds
|
210 |
+
}
|
211 |
+
totalBalance
|
212 |
+
wrappedBalance
|
213 |
+
}
|
214 |
+
}
|
215 |
+
}
|
216 |
+
"""
|
217 |
+
)
|
218 |
+
|
219 |
+
|
220 |
+
TRADES_QUERY = """
|
221 |
+
query fpmms_query($fpmm: String, $id_gt: ID) {
|
222 |
+
fpmmTrades(
|
223 |
+
where: {fpmm: $fpmm, id_gt: $id_gt, type: Buy}
|
224 |
+
orderBy: id
|
225 |
+
orderDirection: asc
|
226 |
+
first: 1000
|
227 |
+
) {
|
228 |
+
collateralAmount
|
229 |
+
outcomeIndex
|
230 |
+
outcomeTokensTraded
|
231 |
+
id
|
232 |
+
oldOutcomeTokenMarginalPrice
|
233 |
+
outcomeTokenMarginalPrice
|
234 |
+
type
|
235 |
+
collateralAmountUSD
|
236 |
+
creationTimestamp
|
237 |
+
feeAmount
|
238 |
+
}
|
239 |
+
}
|
240 |
+
"""
|
scripts/utils.py
CHANGED
@@ -4,6 +4,7 @@ import os
|
|
4 |
import time
|
5 |
from tqdm import tqdm
|
6 |
from typing import List, Any, Optional, Union
|
|
|
7 |
import pandas as pd
|
8 |
import gc
|
9 |
import re
|
@@ -348,3 +349,25 @@ def convert_hex_to_int(x: Union[str, float]) -> Union[int, float]:
|
|
348 |
def wei_to_unit(wei: int) -> float:
|
349 |
"""Converts wei to currency unit."""
|
350 |
return wei / 10**18
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
import time
|
5 |
from tqdm import tqdm
|
6 |
from typing import List, Any, Optional, Union
|
7 |
+
import numpy as np
|
8 |
import pandas as pd
|
9 |
import gc
|
10 |
import re
|
|
|
349 |
def wei_to_unit(wei: int) -> float:
|
350 |
"""Converts wei to currency unit."""
|
351 |
return wei / 10**18
|
352 |
+
|
353 |
+
|
354 |
+
def measure_execution_time(func):
|
355 |
+
def wrapper(*args, **kwargs):
|
356 |
+
start_time = time.time()
|
357 |
+
result = func(*args, **kwargs)
|
358 |
+
end_time = time.time()
|
359 |
+
execution_time = end_time - start_time
|
360 |
+
print(f"Execution time: {execution_time:.6f} seconds")
|
361 |
+
return result
|
362 |
+
|
363 |
+
return wrapper
|
364 |
+
|
365 |
+
|
366 |
+
def _to_content(q: str) -> dict[str, Any]:
|
367 |
+
"""Convert the given query string to payload content, i.e., add it under a `queries` key and convert it to bytes."""
|
368 |
+
finalized_query = {
|
369 |
+
"query": q,
|
370 |
+
"variables": None,
|
371 |
+
"extensions": {"headers": None},
|
372 |
+
}
|
373 |
+
return finalized_query
|