rosacastillo commited on
Commit
f7c2ff7
·
1 Parent(s): 7aa7dd1

adding new unknown trader category

Browse files
app.py CHANGED
@@ -70,43 +70,19 @@ def get_logger():
70
  logger = get_logger()
71
 
72
 
73
- def get_last_one_month_data():
74
- """
75
- Get the last one month data from the tools.parquet file
76
- """
77
- logger.info("Getting last one month data")
78
- con = duckdb.connect(":memory:")
79
- one_months_ago = (datetime.now() - timedelta(days=60)).strftime("%Y-%m-%d")
80
-
81
- # Query to fetch data from all_trades_profitability.parquet
82
- query2 = f"""
83
- SELECT *
84
- FROM read_parquet('./data/all_trades_profitability.parquet')
85
- WHERE creation_timestamp >= '{one_months_ago}'
86
- """
87
- df2 = con.execute(query2).fetchdf()
88
- logger.info("Got last one month data from all_trades_profitability.parquet")
89
-
90
- query1 = f"""
91
- SELECT *
92
- FROM read_parquet('./data/tools.parquet')
93
- WHERE request_time >= '{one_months_ago}'
94
- """
95
- df1 = con.execute(query1).fetchdf()
96
- logger.info("Got last one month data from tools.parquet")
97
-
98
- con.close()
99
-
100
- return df1, df2
101
-
102
-
103
  def get_all_data():
104
  """
105
- Get all data from the tools.parquet, tools_accuracy and trades parquet files
106
  """
107
  logger.info("Getting all data")
108
 
109
  con = duckdb.connect(":memory:")
 
 
 
 
 
 
110
  # Query to fetch invalid trades data
111
  query4 = f"""
112
  SELECT *
@@ -138,18 +114,21 @@ def get_all_data():
138
 
139
  con.close()
140
 
141
- return df1, df2, df3, df4
142
 
143
 
144
  def prepare_data():
145
  """
146
  Prepare the data for the dashboard
147
  """
148
- tools_df, trades_df, tools_accuracy_info, invalid_trades = get_all_data()
 
 
149
  print(trades_df.info())
150
 
151
  tools_df = prepare_tools(tools_df)
152
  trades_df = prepare_trades(trades_df)
 
153
 
154
  tools_accuracy_info = compute_weighted_accuracy(tools_accuracy_info)
155
  print("weighted accuracy info")
@@ -166,11 +145,14 @@ def prepare_data():
166
  outliers.to_parquet("./data/outliers.parquet")
167
  trades_df = trades_df.loc[trades_df["roi"] < 1000]
168
 
169
- return tools_df, trades_df, tools_accuracy_info, invalid_trades
170
 
171
 
172
- tools_df, trades_df, tools_accuracy_info, invalid_trades = prepare_data()
 
 
173
  trades_df = trades_df.sort_values(by="creation_timestamp", ascending=True)
 
174
 
175
  demo = gr.Blocks()
176
 
@@ -255,7 +237,7 @@ with demo:
255
  "# Weekly trading metrics for trades coming from 🌊 Olas traders"
256
  )
257
  with gr.Row():
258
- trade_a_details_selector = gr.Dropdown(
259
  label="Select a trade metric",
260
  choices=trade_metric_choices,
261
  value=default_trade_metric,
@@ -263,15 +245,15 @@ with demo:
263
 
264
  with gr.Row():
265
  with gr.Column(scale=3):
266
- a_trade_details_plot = plot_trade_metrics(
267
  metric_name=default_trade_metric,
268
  trades_df=trades_df,
269
- trader_filter="agent",
270
  )
271
  with gr.Column(scale=1):
272
  trade_details_text = get_trade_metrics_text()
273
 
274
- def update_a_trade_details(trade_detail, trade_details_plot):
275
  new_a_plot = plot_trade_metrics(
276
  metric_name=trade_detail,
277
  trades_df=trades_df,
@@ -279,10 +261,10 @@ with demo:
279
  )
280
  return new_a_plot
281
 
282
- trade_a_details_selector.change(
283
  update_a_trade_details,
284
- inputs=[trade_a_details_selector, a_trade_details_plot],
285
- outputs=[a_trade_details_plot],
286
  )
287
 
288
  # Non-Olasic traders graph
@@ -291,7 +273,7 @@ with demo:
291
  "# Weekly trading metrics for trades coming from Non-Olas traders"
292
  )
293
  with gr.Row():
294
- trade_na_details_selector = gr.Dropdown(
295
  label="Select a trade metric",
296
  choices=trade_metric_choices,
297
  value=default_trade_metric,
@@ -299,7 +281,7 @@ with demo:
299
 
300
  with gr.Row():
301
  with gr.Column(scale=3):
302
- na_trade_details_plot = plot_trade_metrics(
303
  metric_name=default_trade_metric,
304
  trades_df=trades_df,
305
  trader_filter="non_Olas",
@@ -308,23 +290,59 @@ with demo:
308
  trade_details_text = get_trade_metrics_text()
309
 
310
  def update_na_trade_details(trade_detail, trade_details_plot):
311
- new_a_plot = plot_trade_metrics(
312
  metric_name=trade_detail,
313
  trades_df=trades_df,
314
  trader_filter="non_Olas",
315
  )
316
- return new_a_plot
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
317
 
318
- trade_na_details_selector.change(
319
  update_na_trade_details,
320
- inputs=[trade_na_details_selector, na_trade_details_plot],
321
- outputs=[na_trade_details_plot],
322
  )
323
 
324
  with gr.TabItem("🔒 Staking traders"):
325
  with gr.Row():
326
  gr.Markdown("# Trades conducted at the Pearl markets")
327
  with gr.Row():
 
328
  staking_pearl_trades_by_week = plot_staking_trades_per_market_by_week(
329
  trades_df=trades_df, market_creator="pearl"
330
  )
 
70
  logger = get_logger()
71
 
72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  def get_all_data():
74
  """
75
+ Get all data from the parquet files
76
  """
77
  logger.info("Getting all data")
78
 
79
  con = duckdb.connect(":memory:")
80
+ query5 = f"""
81
+ SELECT *
82
+ FROM read_parquet('./data/unknown_traders.parquet')
83
+ """
84
+ df5 = con.execute(query5).fetchdf()
85
+
86
  # Query to fetch invalid trades data
87
  query4 = f"""
88
  SELECT *
 
114
 
115
  con.close()
116
 
117
+ return df1, df2, df3, df4, df5
118
 
119
 
120
  def prepare_data():
121
  """
122
  Prepare the data for the dashboard
123
  """
124
+ tools_df, trades_df, tools_accuracy_info, invalid_trades, unknown_trades = (
125
+ get_all_data()
126
+ )
127
  print(trades_df.info())
128
 
129
  tools_df = prepare_tools(tools_df)
130
  trades_df = prepare_trades(trades_df)
131
+ unknown_trades = prepare_trades(unknown_trades)
132
 
133
  tools_accuracy_info = compute_weighted_accuracy(tools_accuracy_info)
134
  print("weighted accuracy info")
 
145
  outliers.to_parquet("./data/outliers.parquet")
146
  trades_df = trades_df.loc[trades_df["roi"] < 1000]
147
 
148
+ return tools_df, trades_df, tools_accuracy_info, invalid_trades, unknown_trades
149
 
150
 
151
+ tools_df, trades_df, tools_accuracy_info, invalid_trades, unknown_trades = (
152
+ prepare_data()
153
+ )
154
  trades_df = trades_df.sort_values(by="creation_timestamp", ascending=True)
155
+ unknown_trades = unknown_trades.sort_values(by="creation_timestamp", ascending=True)
156
 
157
  demo = gr.Blocks()
158
 
 
237
  "# Weekly trading metrics for trades coming from 🌊 Olas traders"
238
  )
239
  with gr.Row():
240
+ trade_o_details_selector = gr.Dropdown(
241
  label="Select a trade metric",
242
  choices=trade_metric_choices,
243
  value=default_trade_metric,
 
245
 
246
  with gr.Row():
247
  with gr.Column(scale=3):
248
+ trade_o_details_plot = plot_trade_metrics(
249
  metric_name=default_trade_metric,
250
  trades_df=trades_df,
251
+ trader_filter="Olas",
252
  )
253
  with gr.Column(scale=1):
254
  trade_details_text = get_trade_metrics_text()
255
 
256
+ def update_a_trade_details(trade_detail, trade_o_details_plot):
257
  new_a_plot = plot_trade_metrics(
258
  metric_name=trade_detail,
259
  trades_df=trades_df,
 
261
  )
262
  return new_a_plot
263
 
264
+ trade_o_details_selector.change(
265
  update_a_trade_details,
266
+ inputs=[trade_o_details_selector, trade_o_details_plot],
267
+ outputs=[trade_o_details_plot],
268
  )
269
 
270
  # Non-Olasic traders graph
 
273
  "# Weekly trading metrics for trades coming from Non-Olas traders"
274
  )
275
  with gr.Row():
276
+ trade_no_details_selector = gr.Dropdown(
277
  label="Select a trade metric",
278
  choices=trade_metric_choices,
279
  value=default_trade_metric,
 
281
 
282
  with gr.Row():
283
  with gr.Column(scale=3):
284
+ trade_no_details_plot = plot_trade_metrics(
285
  metric_name=default_trade_metric,
286
  trades_df=trades_df,
287
  trader_filter="non_Olas",
 
290
  trade_details_text = get_trade_metrics_text()
291
 
292
  def update_na_trade_details(trade_detail, trade_details_plot):
293
+ new_no_plot = plot_trade_metrics(
294
  metric_name=trade_detail,
295
  trades_df=trades_df,
296
  trader_filter="non_Olas",
297
  )
298
+ return new_no_plot
299
+
300
+ trade_no_details_selector.change(
301
+ update_na_trade_details,
302
+ inputs=[trade_no_details_selector, trade_no_details_plot],
303
+ outputs=[trade_no_details_plot],
304
+ )
305
+ # Unknown traders graph
306
+ with gr.Row():
307
+ gr.Markdown(
308
+ "# Weekly trading metrics for trades coming from unknown traders"
309
+ )
310
+ with gr.Row():
311
+ trade_u_details_selector = gr.Dropdown(
312
+ label="Select a trade metric",
313
+ choices=trade_metric_choices,
314
+ value=default_trade_metric,
315
+ )
316
+
317
+ with gr.Row():
318
+ with gr.Column(scale=3):
319
+ trade_u_details_plot = plot_trade_metrics(
320
+ metric_name=default_trade_metric,
321
+ trades_df=unknown_trades,
322
+ trader_filter="all",
323
+ )
324
+ with gr.Column(scale=1):
325
+ trade_details_text = get_trade_metrics_text()
326
+
327
+ def update_na_trade_details(trade_detail, trade_u_details_plot):
328
+ new_u_plot = plot_trade_metrics(
329
+ metric_name=trade_detail,
330
+ trades_df=unknown_trades,
331
+ trader_filter="all",
332
+ )
333
+ return new_u_plot
334
 
335
+ trade_u_details_selector.change(
336
  update_na_trade_details,
337
+ inputs=[trade_u_details_selector, trade_u_details_plot],
338
+ outputs=[trade_u_details_plot],
339
  )
340
 
341
  with gr.TabItem("🔒 Staking traders"):
342
  with gr.Row():
343
  gr.Markdown("# Trades conducted at the Pearl markets")
344
  with gr.Row():
345
+ print("Calling plot staking with pearl")
346
  staking_pearl_trades_by_week = plot_staking_trades_per_market_by_week(
347
  trades_df=trades_df, market_creator="pearl"
348
  )
data/all_trades_profitability.parquet CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9ff7a9001dceeac25cf151e9b8cff55beafe610387c42e093d004e2712206e6b
3
- size 3884891
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d097ed0d81bd0fa9f2e301a1db66da1a5cb122f1ad8626327715dcaff127b83
3
+ size 3558217
data/unknown_traders.parquet ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ab41a7a35d8bf5c588b95849ec650e048578ddcbb18bc62df0e7a3c96902ea5
3
+ size 368142
scripts/cleaning_old_info.py CHANGED
@@ -49,9 +49,6 @@ def clean_old_data_from_parquet_files(cutoff_date: str):
49
 
50
  # generate again summary_profitability.parquet
51
  try:
52
- print("Summarising trades...")
53
- summary_df = summary_analyse(all_trades)
54
-
55
  # add staking labels
56
  label_trades_by_staking(trades_df=all_trades, update=False)
57
 
@@ -59,6 +56,8 @@ def clean_old_data_from_parquet_files(cutoff_date: str):
59
  all_trades.to_parquet(
60
  DATA_DIR / "all_trades_profitability.parquet", index=False
61
  )
 
 
62
  summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
63
  except Exception as e:
64
  print(f"Error generating summary and saving all trades profitability file {e}")
 
49
 
50
  # generate again summary_profitability.parquet
51
  try:
 
 
 
52
  # add staking labels
53
  label_trades_by_staking(trades_df=all_trades, update=False)
54
 
 
56
  all_trades.to_parquet(
57
  DATA_DIR / "all_trades_profitability.parquet", index=False
58
  )
59
+ print("Summarising trades...")
60
+ summary_df = summary_analyse(all_trades)
61
  summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
62
  except Exception as e:
63
  print(f"Error generating summary and saving all trades profitability file {e}")
scripts/daily_data.py CHANGED
@@ -5,6 +5,7 @@ from profitability import (
5
  label_trades_by_staking,
6
  )
7
  import pandas as pd
 
8
 
9
  logging.basicConfig(level=logging.INFO)
10
 
@@ -21,6 +22,14 @@ def prepare_live_metrics(
21
  # staking label
22
  label_trades_by_staking(all_trades_df)
23
 
 
 
 
 
 
 
 
 
24
  # save into a separate file
25
  all_trades_df.to_parquet(DATA_DIR / "daily_info.parquet", index=False)
26
 
 
5
  label_trades_by_staking,
6
  )
7
  import pandas as pd
8
+ from nr_mech_calls import create_unknown_traders_df
9
 
10
  logging.basicConfig(level=logging.INFO)
11
 
 
22
  # staking label
23
  label_trades_by_staking(all_trades_df)
24
 
25
+ # create the unknown traders dataset
26
+ unknown_traders_df, all_trades_df = create_unknown_traders_df(
27
+ trades_df=all_trades_df
28
+ )
29
+ unknown_traders_df.to_parquet(
30
+ TMP_DIR / "unknown_daily_traders.parquet", index=False
31
+ )
32
+
33
  # save into a separate file
34
  all_trades_df.to_parquet(DATA_DIR / "daily_info.parquet", index=False)
35
 
scripts/{update_nr_mech_calls.py → nr_mech_calls.py} RENAMED
@@ -1,5 +1,5 @@
1
  import pandas as pd
2
- from profitability import DATA_DIR, DEFAULT_MECH_FEE, summary_analyse
3
  from tqdm import tqdm
4
 
5
 
@@ -12,6 +12,16 @@ def update_roi(row: pd.DataFrame) -> float:
12
  return new_value
13
 
14
 
 
 
 
 
 
 
 
 
 
 
15
  def update_trade_nr_mech_calls(non_agents: bool = False):
16
  try:
17
  all_trades_df = pd.read_parquet(DATA_DIR / "all_trades_profitability.parquet")
@@ -51,10 +61,19 @@ def update_trade_nr_mech_calls(non_agents: bool = False):
51
 
52
  # saving
53
  all_trades_df.to_parquet(DATA_DIR / "all_trades_profitability.parquet", index=False)
54
- print("Summarising trades...")
55
- summary_df = summary_analyse(all_trades_df)
56
- summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
57
 
58
 
59
  if __name__ == "__main__":
60
- update_trade_nr_mech_calls(non_agents=True)
 
 
 
 
 
 
 
 
 
 
1
  import pandas as pd
2
+ from utils import DATA_DIR, DEFAULT_MECH_FEE
3
  from tqdm import tqdm
4
 
5
 
 
12
  return new_value
13
 
14
 
15
+ def create_unknown_traders_df(trades_df: pd.DataFrame) -> pd.DataFrame:
16
+ """filter trades coming from non-Olas traders that are placing no mech calls"""
17
+ no_mech_calls_mask = (trades_df["staking"] == "non_Olas") & (
18
+ trades_df["num_mech_calls"] == 0
19
+ )
20
+ no_mech_calls_df = trades_df.loc[no_mech_calls_mask]
21
+ trades_df = trades_df.loc[~no_mech_calls_mask]
22
+ return no_mech_calls_df, trades_df
23
+
24
+
25
  def update_trade_nr_mech_calls(non_agents: bool = False):
26
  try:
27
  all_trades_df = pd.read_parquet(DATA_DIR / "all_trades_profitability.parquet")
 
61
 
62
  # saving
63
  all_trades_df.to_parquet(DATA_DIR / "all_trades_profitability.parquet", index=False)
64
+ # print("Summarising trades...")
65
+ # summary_df = summary_analyse(all_trades_df)
66
+ # summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
67
 
68
 
69
  if __name__ == "__main__":
70
+ # update_trade_nr_mech_calls(non_agents=True)
71
+ trades_df = pd.read_parquet(DATA_DIR / "all_trades_profitability.parquet")
72
+ print("before filtering")
73
+ print(trades_df.staking.value_counts())
74
+ unknown_df, trades_df = create_unknown_traders_df(trades_df=trades_df)
75
+ print("after filtering")
76
+ print(trades_df.staking.value_counts())
77
+ print("saving unknown traders")
78
+ unknown_df.to_parquet(DATA_DIR / "unknown_traders.parquet", index=False)
79
+ trades_df.to_parquet(DATA_DIR / "all_trades_profitability.parquet", index=False)
scripts/profitability.py CHANGED
@@ -32,18 +32,20 @@ from get_mech_info import (
32
  update_tools_parquet,
33
  update_all_trades_parquet,
34
  )
35
- from utils import wei_to_unit, convert_hex_to_int, JSON_DATA_DIR, DATA_DIR, TMP_DIR
 
 
 
 
 
 
36
  from staking import label_trades_by_staking
 
37
 
38
  DUST_THRESHOLD = 10000000000000
39
  INVALID_ANSWER = -1
40
- DEFAULT_FROM_DATE = "1970-01-01T00:00:00"
41
- DEFAULT_TO_DATE = "2038-01-19T03:14:07"
42
-
43
  DEFAULT_60_DAYS_AGO_TIMESTAMP = (DATETIME_60_DAYS_AGO).timestamp()
44
-
45
  WXDAI_CONTRACT_ADDRESS = "0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d"
46
- DEFAULT_MECH_FEE = 0.01
47
  DUST_THRESHOLD = 10000000000000
48
 
49
 
@@ -423,14 +425,6 @@ def run_profitability_analysis(
423
  # debugging purposes
424
  all_trades_df.to_parquet(JSON_DATA_DIR / "all_trades_df.parquet", index=False)
425
 
426
- # filter trades coming from non-Olas traders that are placing no mech calls
427
- no_mech_calls_mask = (all_trades_df["staking"] == "non_Olas") & (
428
- all_trades_df.loc["num_mech_calls"] == 0
429
- )
430
- no_mech_calls_df = all_trades_df.loc[no_mech_calls_mask]
431
- no_mech_calls_df.to_parquet(TMP_DIR / "no_mech_calls_trades.parquet", index=False)
432
- all_trades_df = all_trades_df.loc[~no_mech_calls_mask]
433
-
434
  # filter invalid markets. Condition: "is_invalid" is True
435
  invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True]
436
  if len(invalid_trades) == 0:
@@ -452,15 +446,21 @@ def run_profitability_analysis(
452
 
453
  all_trades_df = all_trades_df.loc[all_trades_df["is_invalid"] == False]
454
 
455
- # summarize profitability df
456
- print("Summarising trades...")
457
- summary_df = summary_analyse(all_trades_df)
458
-
459
  # add staking labels
460
  label_trades_by_staking(trades_df=all_trades_df)
461
 
 
 
 
 
 
 
462
  # save to parquet
463
  all_trades_df.to_parquet(DATA_DIR / "all_trades_profitability.parquet", index=False)
 
 
 
 
464
  summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
465
 
466
  print("Done!")
 
32
  update_tools_parquet,
33
  update_all_trades_parquet,
34
  )
35
+ from utils import (
36
+ wei_to_unit,
37
+ convert_hex_to_int,
38
+ JSON_DATA_DIR,
39
+ DATA_DIR,
40
+ DEFAULT_MECH_FEE,
41
+ )
42
  from staking import label_trades_by_staking
43
+ from nr_mech_calls import create_unknown_traders_df
44
 
45
  DUST_THRESHOLD = 10000000000000
46
  INVALID_ANSWER = -1
 
 
 
47
  DEFAULT_60_DAYS_AGO_TIMESTAMP = (DATETIME_60_DAYS_AGO).timestamp()
 
48
  WXDAI_CONTRACT_ADDRESS = "0xe91D153E0b41518A2Ce8Dd3D7944Fa863463a97d"
 
49
  DUST_THRESHOLD = 10000000000000
50
 
51
 
 
425
  # debugging purposes
426
  all_trades_df.to_parquet(JSON_DATA_DIR / "all_trades_df.parquet", index=False)
427
 
 
 
 
 
 
 
 
 
428
  # filter invalid markets. Condition: "is_invalid" is True
429
  invalid_trades = all_trades_df.loc[all_trades_df["is_invalid"] == True]
430
  if len(invalid_trades) == 0:
 
446
 
447
  all_trades_df = all_trades_df.loc[all_trades_df["is_invalid"] == False]
448
 
 
 
 
 
449
  # add staking labels
450
  label_trades_by_staking(trades_df=all_trades_df)
451
 
452
+ # create the unknown traders dataset
453
+ unknown_traders_df, all_trades_df = create_unknown_traders_df(
454
+ trades_df=all_trades_df
455
+ )
456
+ unknown_traders_df.to_parquet(DATA_DIR / "unknown_traders.parquet", index=False)
457
+
458
  # save to parquet
459
  all_trades_df.to_parquet(DATA_DIR / "all_trades_profitability.parquet", index=False)
460
+
461
+ # summarize profitability df
462
+ print("Summarising trades...")
463
+ summary_df = summary_analyse(all_trades_df)
464
  summary_df.to_parquet(DATA_DIR / "summary_profitability.parquet", index=False)
465
 
466
  print("Done!")
scripts/pull_data.py CHANGED
@@ -128,7 +128,7 @@ def only_new_weekly_analysis():
128
 
129
  save_historical_data()
130
 
131
- clean_old_data_from_parquet_files("2024-10-13")
132
 
133
  compute_tools_accuracy()
134
 
 
128
 
129
  save_historical_data()
130
 
131
+ clean_old_data_from_parquet_files("2024-10-14")
132
 
133
  compute_tools_accuracy()
134
 
scripts/staking.py CHANGED
@@ -173,7 +173,7 @@ def get_trader_address_staking(trader_address: str, service_map: dict) -> str:
173
  return check_owner_staking_contract(owner_address=owner)
174
 
175
 
176
- def label_trades_by_staking(trades_df: pd.DataFrame, start: int = None) -> pd.DataFrame:
177
  with open(DATA_DIR / "service_map.pkl", "rb") as f:
178
  service_map = pickle.load(f)
179
  # get the last service id
 
173
  return check_owner_staking_contract(owner_address=owner)
174
 
175
 
176
+ def label_trades_by_staking(trades_df: pd.DataFrame, start: int = None) -> None:
177
  with open(DATA_DIR / "service_map.pkl", "rb") as f:
178
  service_map = pickle.load(f)
179
  # get the last service id
scripts/utils.py CHANGED
@@ -11,6 +11,7 @@ from pathlib import Path
11
  from enum import Enum
12
  from json.decoder import JSONDecodeError
13
 
 
14
  REDUCE_FACTOR = 0.25
15
  SLEEP = 0.5
16
  REQUEST_ID_FIELD = "request_id"
 
11
  from enum import Enum
12
  from json.decoder import JSONDecodeError
13
 
14
+ DEFAULT_MECH_FEE = 0.01
15
  REDUCE_FACTOR = 0.25
16
  SLEEP = 0.5
17
  REQUEST_ID_FIELD = "request_id"
tabs/staking.py CHANGED
@@ -39,8 +39,6 @@ def plot_staking_trades_per_market_by_week(
39
  all_filtered_trades = all_filtered_trades.loc[
40
  all_filtered_trades["market_creator"] == market_creator
41
  ]
42
- print(f"Checking values for market creator={market_creator}")
43
- print(all_filtered_trades.staking.value_counts())
44
  if market_creator != "all":
45
  if market_creator == "pearl":
46
  # remove the staking data from quickstart
 
39
  all_filtered_trades = all_filtered_trades.loc[
40
  all_filtered_trades["market_creator"] == market_creator
41
  ]
 
 
42
  if market_creator != "all":
43
  if market_creator == "pearl":
44
  # remove the staking data from quickstart
tabs/trades.py CHANGED
@@ -197,7 +197,13 @@ def integrated_plot_trades_per_market_by_week_v2(trades_df: pd.DataFrame) -> gr.
197
  # Process both Olas and non-Olas traces for each market together
198
  for market in ["pearl", "quickstart", "all"]:
199
  market_data = trades[trades["market_creator"] == market]
200
-
 
 
 
 
 
 
201
  # First add 'Olas' trace
202
  olas_data = market_data[market_data["staking_type"] == "Olas"]
203
  olas_trace = go.Bar(
@@ -217,7 +223,7 @@ def integrated_plot_trades_per_market_by_week_v2(trades_df: pd.DataFrame) -> gr.
217
  name=f"{market}-non_Olas",
218
  marker_color=market_darker_colors[market],
219
  offsetgroup=market, # Keep the market grouping
220
- base=olas_data["trades"], # Stack on top of olas trace
221
  showlegend=True,
222
  )
223
 
 
197
  # Process both Olas and non-Olas traces for each market together
198
  for market in ["pearl", "quickstart", "all"]:
199
  market_data = trades[trades["market_creator"] == market]
200
+ # Create a dictionary to store the Olas values for each week
201
+ olas_values = dict(
202
+ zip(
203
+ market_data[market_data["staking_type"] == "Olas"]["month_year_week"],
204
+ market_data[market_data["staking_type"] == "Olas"]["trades"],
205
+ )
206
+ )
207
  # First add 'Olas' trace
208
  olas_data = market_data[market_data["staking_type"] == "Olas"]
209
  olas_trace = go.Bar(
 
223
  name=f"{market}-non_Olas",
224
  marker_color=market_darker_colors[market],
225
  offsetgroup=market, # Keep the market grouping
226
+ base=[olas_values.get(x, 0) for x in non_Olas_data["month_year_week"]],
227
  showlegend=True,
228
  )
229