anaucoin commited on
Commit
d4c513e
1 Parent(s): 141e16c
Files changed (3) hide show
  1. app.py +626 -264
  2. historical_app.py +0 -726
  3. old_app.py +364 -0
app.py CHANGED
@@ -20,29 +20,133 @@ import numpy as np
20
  import datetime as dt
21
  import matplotlib.pyplot as plt
22
  from pathlib import Path
 
 
 
 
23
 
24
  import streamlit as st
25
  import plotly.express as px
26
  import altair as alt
27
  import dateutil.parser
28
- import copy
29
 
30
 
31
  # +
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  @st.experimental_memo
33
  def get_hist_info(df_coin, principal_balance,plheader):
34
  numtrades = int(len(df_coin))
35
  numwin = int(sum(df_coin[plheader] > 0))
36
  numloss = int(sum(df_coin[plheader] < 0))
37
- winrate = int(np.round(100*numwin/numtrades,2))
 
 
 
38
 
39
  grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
40
  grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
41
- if grossloss !=0:
42
  pfactor = -1*np.round(grosswin/grossloss,2)
43
  else:
44
  pfactor = np.nan
45
  return numtrades, numwin, numloss, winrate, pfactor
 
46
  @st.experimental_memo
47
  def get_rolling_stats(df, lev, otimeheader, days):
48
  max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
@@ -58,301 +162,557 @@ def get_rolling_stats(df, lev, otimeheader, days):
58
  else:
59
  rolling_perc = np.nan
60
  return 100*rolling_perc
 
 
 
 
 
61
 
62
  @st.experimental_memo
 
 
 
 
63
  def filt_df(df, cheader, symbol_selections):
64
- """
65
- Inputs: df (pd.DataFrame), cheader (str) and symbol_selections (list[str]).
66
-
67
- Returns a filtered pd.DataFrame containing only data that matches symbol_selections (list[str])
68
- from df[cheader].
69
- """
70
 
71
  df = df.copy()
72
  df = df[df[cheader].isin(symbol_selections)]
73
 
74
  return df
75
 
76
- @st.experimental_memo
77
- def my_style(v, props=''):
78
- props = 'color:red' if v < 0 else 'color:green'
79
- return props
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
- @st.cache(ttl=24*3600, allow_output_mutation=True)
82
  def load_data(filename, otimeheader, fmat):
83
- df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
84
- df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
85
- df.insert(1, 'Signal', ['Long']*len(df))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
 
87
- df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
88
- df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
89
- df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
90
- df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
91
- df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
92
- df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
93
-
94
- df['Buy Price'] = pd.to_numeric(df['Buy Price'])
95
- df['Sell Price'] = pd.to_numeric(df['Sell Price'])
96
- df['P/L per token'] = pd.to_numeric(df['P/L per token'])
97
- df['P/L %'] = pd.to_numeric(df['P/L %'])
98
-
99
  dateheader = 'Date'
100
  theader = 'Time'
101
-
102
  df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
103
  df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
104
 
105
  df[otimeheader]= [dateutil.parser.parse(date+' '+time)
106
- for date,time in zip(df[dateheader],df[theader])]
107
-
108
  df[otimeheader] = pd.to_datetime(df[otimeheader])
109
  df['Exit Date'] = pd.to_datetime(df['Exit Date'])
110
  df.sort_values(by=otimeheader, inplace=True)
111
-
112
  df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
113
  df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
114
  df['Trade'] = df.index + 1 #reindex
115
-
116
- df['DCA'] = np.nan
117
 
118
- for exit in pd.unique(df['Exit Date']):
119
- df_exit = df[df['Exit Date']==exit]
120
- if dateutil.parser.parse(str(exit)) < dateutil.parser.parse('2023-02-07 13:00:00'):
121
- for i in range(len(df_exit)):
122
- ind = df_exit.index[i]
123
- df.loc[ind,'DCA'] = i+1
124
-
125
- else:
126
- for i in range(len(df_exit)):
127
- ind = df_exit.index[i]
128
- df.loc[ind,'DCA'] = i+1.1
 
 
 
129
  return df
130
 
131
- def runapp():
132
- bot_selections = "Cinnamon Toast"
133
- otimeheader = 'Exit Date'
134
- fmat = '%Y-%m-%d %H:%M:%S'
135
- dollar_cap = 100000.00
136
- fees = .075/100
137
- st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
138
- st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
139
- "the performance of our trading bots.")
140
- # st.sidebar.header("FAQ")
141
 
142
- # with st.sidebar.subheader("FAQ"):
143
- # st.write(Path("FAQ_README.md").read_text())
144
- st.subheader("Choose your settings:")
145
- no_errors = True
146
-
147
- data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
148
- df = data.copy(deep=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
 
150
- dateheader = 'Date'
151
- theader = 'Time'
 
 
152
 
153
- with st.form("user input", ):
154
- if no_errors:
155
- with st.container():
156
- col1, col2 = st.columns(2)
157
- with col1:
158
- try:
159
- startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
160
- except:
161
- st.error("Please select your exchange or upload a supported trade log file.")
162
- no_errors = False
163
- with col2:
164
- try:
165
- enddate = st.date_input("End Date", value=datetime.today())
166
- except:
167
- st.error("Please select your exchange or upload a supported trade log file.")
168
- no_errors = False
169
- #st.sidebar.subheader("Customize your Dashboard")
170
-
171
- if no_errors and (enddate < startdate):
172
- st.error("End Date must be later than Start date. Please try again.")
173
- no_errors = False
174
- with st.container():
175
- col1,col2 = st.columns(2)
176
- with col2:
177
- lev = st.number_input('Leverage', min_value=1, value=1, max_value= 5, step=1)
178
- with col1:
179
- principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
180
- st.write("Choose your DCA setup (for trades before 02/07/2023)")
181
- with st.container():
182
- col1, col2, col3, col4 = st.columns(4)
183
- with col1:
184
- dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
185
- with col2:
186
- dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
187
- with col3:
188
- dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
189
- with col4:
190
- dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
191
- st.write("Choose your DCA setup (for trades on or after 02/07/2023)")
192
- with st.container():
193
- col1, col2 = st.columns(2)
194
- with col1:
195
- dca5 = st.number_input('DCA 1 Allocation', min_value=0, value=50, max_value= 100, step=1)
196
- with col2:
197
- dca6 = st.number_input('DCA 2 Allocation', min_value=0, value=50, max_value= 100, step=1)
198
-
199
-
200
- #hack way to get button centered
201
- c = st.columns(9)
202
- with c[4]:
203
- submitted = st.form_submit_button("Get Cookin'!")
204
 
205
- signal_map = {'Long': 1, 'Short':-1} # 1 for long #-1 for short
206
- dca_map = {1: 25/100, 2: 25/100, 3: 25/100, 4: 25/100, 1.1: 50/100, 2.1: 50/100}
207
- df['DCA %'] = df['DCA'].map(dca_map)
208
- df['Calculated Return %'] = (df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
209
-
210
- if submitted and principal_balance * lev > dollar_cap:
211
- lev = np.floor(dollar_cap/principal_balance)
212
- st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
213
-
214
- if submitted and no_errors:
215
- df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
216
-
217
- if len(df) == 0:
218
- st.error("There are no available trades matching your selections. Please try again!")
219
- no_errors = False
220
- if no_errors:
221
-
222
- dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
223
- df['DCA %'] = df['DCA'].map(dca_map)
224
- df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
225
- df['DCA'] = np.floor(df['DCA'].values)
226
 
227
- df['Return Per Trade'] = np.nan
228
- df['Balance used in Trade'] = np.nan
229
- df['New Balance'] = np.nan
230
-
231
- g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
 
233
- df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+lev*g['Return Per Trade'].values
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
 
235
- df['Compounded Return'] = df['Return Per Trade'].cumprod()
236
- df.loc[df['DCA']==1.0,'New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df.loc[df['DCA']==1.0,'Compounded Return']]
237
- df.loc[df['DCA']==1.0,'Balance used in Trade'] = np.concatenate([[principal_balance], df.loc[df['DCA']==1.0,'New Balance'].values[:-1]])
238
- df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
239
- df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
240
- cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
241
-
242
- effective_return = 100*((cum_pl - principal_balance)/principal_balance)
243
-
244
- st.header(f"{bot_selections} Results")
245
- if len(bot_selections) > 1:
246
- st.metric(
247
- "Total Account Balance",
248
- f"${cum_pl:.2f}",
249
- f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
250
- )
251
-
252
- st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
253
-
254
- df['Per Trade Return Rate'] = df['Return Per Trade']-1
255
-
256
- totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
257
- data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
258
- totals.loc[len(totals)] = list(i for i in data)
259
-
260
- totals['Cum. P/L'] = cum_pl-principal_balance
261
- totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
262
- #results_df['Avg. P/L'] = (cum_pl-principal_balance)/results_df['# of Trades'].values[0]
263
- #results_df['Avg. P/L (%)'] = 100*results_df['Avg. P/L'].values[0]/principal_balance
264
-
265
- if df.empty:
266
- st.error("Oops! None of the data provided matches your selection(s). Please try again.")
267
- else:
268
- #st.dataframe(totals.style.format({'# of Trades': '{:.0f}','Wins': '{:.0f}','Losses': '{:.0f}','Win Rate': '{:.2f}%','Profit Factor' : '{:.2f}', 'Avg. P/L (%)': '{:.2f}%', 'Cum. P/L (%)': '{:.2f}%', 'Cum. P/L': '{:.2f}', 'Avg. P/L': '{:.2f}'})
269
- #.text_gradient(subset=['Win Rate'],cmap="RdYlGn", vmin = 0, vmax = 100)\
270
- #.text_gradient(subset=['Profit Factor'],cmap="RdYlGn", vmin = 0, vmax = 2), use_container_width=True)
271
- for row in totals.itertuples():
272
- col1, col2, col3, col4 = st.columns(4)
273
- c1, c2, c3, c4 = st.columns(4)
274
- with col1:
275
- st.metric(
276
- "Total Trades",
277
- f"{row._1:.0f}",
278
- )
279
- with c1:
280
- st.metric(
281
- "Profit Factor",
282
- f"{row._5:.2f}",
283
- )
284
- with col2:
285
- st.metric(
286
- "Wins",
287
- f"{row.Wins:.0f}",
288
- )
289
- with c2:
290
- st.metric(
291
- "Cumulative P/L",
292
- f"${row._6:.2f}",
293
- f"{row._7:.2f} %",
294
- )
295
- with col3:
296
- st.metric(
297
- "Losses",
298
- f"{row.Losses:.0f}",
299
- )
300
- with c3:
301
- st.metric(
302
- "Rolling 7 Days",
303
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
304
- f"{get_rolling_stats(df,lev, otimeheader,7):.2f}%",
305
- )
306
- st.metric(
307
- "Rolling 30 Days",
308
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
309
- f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
310
- )
311
-
312
- with col4:
313
- st.metric(
314
- "Win Rate",
315
- f"{row._4:.1f}%",
316
- )
317
- with c4:
318
- st.metric(
319
- "Rolling 90 Days",
320
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
321
- f"{get_rolling_stats(df,lev, otimeheader,90):.2f}%",
322
- )
323
- st.metric(
324
- "Rolling 180 Days",
325
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
326
- f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
327
- )
328
- if submitted:
329
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
330
- 'Sell Price' : 'max',
331
- 'Net P/L Per Trade': 'mean',
332
- 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
333
- 'DCA': lambda x: int(np.floor(x.max()))})
334
- grouped_df.index = range(1, len(grouped_df)+1)
335
- grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
336
- 'Net P/L Per Trade':'Net P/L',
337
- 'Calculated Return %':'P/L %'}, inplace=True)
338
- else:
339
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
340
- 'Sell Price' : 'max',
341
- 'P/L per token': 'mean',
342
- 'Calculated Return %' : lambda x: np.round(100*x.sum(),2),
343
- 'DCA': lambda x: int(np.floor(x.max()))})
344
- grouped_df.index = range(1, len(grouped_df)+1)
345
- grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
346
- 'Calculated Return %':'P/L %',
347
- 'P/L per token':'Net P/L'}, inplace=True)
348
-
349
- st.subheader("Trade Logs")
350
- grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
351
- grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
352
- st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}','# of DCAs':'{:.0f}', 'Net P/L':'${:.2f}', 'P/L %' :'{:.2f}%'})\
353
- .applymap(my_style,subset=['Net P/L'])\
354
- .applymap(my_style,subset=['P/L %']), use_container_width=True)
355
-
356
  if __name__ == "__main__":
357
  st.set_page_config(
358
  "Trading Bot Dashboard",
@@ -362,3 +722,5 @@ if __name__ == "__main__":
362
  # -
363
 
364
 
 
 
 
20
  import datetime as dt
21
  import matplotlib.pyplot as plt
22
  from pathlib import Path
23
+ import time
24
+ import plotly.graph_objects as go
25
+ import plotly.io as pio
26
+ from PIL import Image
27
 
28
  import streamlit as st
29
  import plotly.express as px
30
  import altair as alt
31
  import dateutil.parser
32
+ from matplotlib.colors import LinearSegmentedColormap
33
 
34
 
35
  # +
36
+ class color:
37
+ PURPLE = '\033[95m'
38
+ CYAN = '\033[96m'
39
+ DARKCYAN = '\033[36m'
40
+ BLUE = '\033[94m'
41
+ GREEN = '\033[92m'
42
+ YELLOW = '\033[93m'
43
+ RED = '\033[91m'
44
+ BOLD = '\033[1m'
45
+ UNDERLINE = '\033[4m'
46
+ END = '\033[0m'
47
+
48
+ @st.experimental_memo
49
+ def print_PL(amnt, thresh, extras = "" ):
50
+ if amnt > 0:
51
+ return color.BOLD + color.GREEN + str(amnt) + extras + color.END
52
+ elif amnt < 0:
53
+ return color.BOLD + color.RED + str(amnt)+ extras + color.END
54
+ elif np.isnan(amnt):
55
+ return str(np.nan)
56
+ else:
57
+ return str(amnt + extras)
58
+
59
+ @st.experimental_memo
60
+ def get_headers(logtype):
61
+ otimeheader = ""
62
+ cheader = ""
63
+ plheader = ""
64
+ fmat = '%Y-%m-%d %H:%M:%S'
65
+
66
+ if logtype == "ByBit":
67
+ otimeheader = 'Create Time'
68
+ cheader = 'Contracts'
69
+ plheader = 'Closed P&L'
70
+ fmat = '%Y-%m-%d %H:%M:%S'
71
+
72
+ if logtype == "BitGet":
73
+ otimeheader = 'Date'
74
+ cheader = 'Futures'
75
+ plheader = 'Realized P/L'
76
+ fmat = '%Y-%m-%d %H:%M:%S'
77
+
78
+ if logtype == "MEXC":
79
+ otimeheader = 'Trade time'
80
+ cheader = 'Futures'
81
+ plheader = 'closing position'
82
+ fmat = '%Y/%m/%d %H:%M'
83
+
84
+ if logtype == "Binance":
85
+ otimeheader = 'Date'
86
+ cheader = 'Symbol'
87
+ plheader = 'Realized Profit'
88
+ fmat = '%Y-%m-%d %H:%M:%S'
89
+
90
+ #if logtype == "Kucoin":
91
+ # otimeheader = 'Time'
92
+ # cheader = 'Contract'
93
+ # plheader = ''
94
+ # fmat = '%Y/%m/%d %H:%M:%S'
95
+
96
+
97
+ if logtype == "Kraken":
98
+ otimeheader = 'time'
99
+ cheader = 'asset'
100
+ plheader = 'amount'
101
+ fmat = '%Y-%m-%d %H:%M:%S.%f'
102
+
103
+ if logtype == "OkX":
104
+ otimeheader = '\ufeffOrder Time'
105
+ cheader = '\ufeffInstrument'
106
+ plheader = '\ufeffPL'
107
+ fmat = '%Y-%m-%d %H:%M:%S'
108
+
109
+ return otimeheader.lower(), cheader.lower(), plheader.lower(), fmat
110
+
111
+ @st.experimental_memo
112
+ def get_coin_info(df_coin, principal_balance,plheader):
113
+ numtrades = int(len(df_coin))
114
+ numwin = int(sum(df_coin[plheader] > 0))
115
+ numloss = int(sum(df_coin[plheader] < 0))
116
+ winrate = np.round(100*numwin/numtrades,2)
117
+
118
+ grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
119
+ grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
120
+ if grossloss != 0:
121
+ pfactor = -1*np.round(grosswin/grossloss,2)
122
+ else:
123
+ pfactor = np.nan
124
+
125
+ cum_PL = np.round(sum(df_coin[plheader].values),2)
126
+ cum_PL_perc = np.round(100*cum_PL/principal_balance,2)
127
+ mean_PL = np.round(sum(df_coin[plheader].values/len(df_coin)),2)
128
+ mean_PL_perc = np.round(100*mean_PL/principal_balance,2)
129
+
130
+ return numtrades, numwin, numloss, winrate, pfactor, cum_PL, cum_PL_perc, mean_PL, mean_PL_perc
131
+
132
  @st.experimental_memo
133
  def get_hist_info(df_coin, principal_balance,plheader):
134
  numtrades = int(len(df_coin))
135
  numwin = int(sum(df_coin[plheader] > 0))
136
  numloss = int(sum(df_coin[plheader] < 0))
137
+ if numtrades != 0:
138
+ winrate = int(np.round(100*numwin/numtrades,2))
139
+ else:
140
+ winrate = np.nan
141
 
142
  grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
143
  grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
144
+ if grossloss != 0:
145
  pfactor = -1*np.round(grosswin/grossloss,2)
146
  else:
147
  pfactor = np.nan
148
  return numtrades, numwin, numloss, winrate, pfactor
149
+
150
  @st.experimental_memo
151
  def get_rolling_stats(df, lev, otimeheader, days):
152
  max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
 
162
  else:
163
  rolling_perc = np.nan
164
  return 100*rolling_perc
165
+ @st.experimental_memo
166
+ def cc_coding(row):
167
+ return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2022-12-16 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
168
+ def ctt_coding(row):
169
+ return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2023-01-02 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
170
 
171
  @st.experimental_memo
172
+ def my_style(v, props=''):
173
+ props = 'color:red' if v < 0 else 'color:green'
174
+ return props
175
+
176
  def filt_df(df, cheader, symbol_selections):
 
 
 
 
 
 
177
 
178
  df = df.copy()
179
  df = df[df[cheader].isin(symbol_selections)]
180
 
181
  return df
182
 
183
+ def tv_reformat(close50filename):
184
+ try:
185
+ data = pd.read_csv(open('CT-Trade-Log-50.csv','r'), sep='[,|\t]', engine='python')
186
+ except:
187
+ data = pd.DataFrame([])
188
+
189
+ if data.empty:
190
+ return data
191
+ else:
192
+ entry_df = data[data['Type'] == "Entry Long"]
193
+ exit_df = data[data['Type']=="Exit Long"]
194
+
195
+ entry_df.index = range(len(entry_df))
196
+ exit_df.index = range(len(exit_df))
197
+
198
+ df = pd.DataFrame([], columns=['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %'])
199
+
200
+ df['Trade'] = entry_df.index
201
+ df['Entry Date'] = entry_df['Date/Time']
202
+ df['Buy Price'] = entry_df['Price USDT']
203
+
204
+ df['Sell Price'] = exit_df['Price USDT']
205
+ df['Exit Date'] = exit_df['Date/Time']
206
+ df['P/L per token'] = df['Sell Price'] - df['Buy Price']
207
+ df['P/L %'] = exit_df['Profit %']
208
+ df['Drawdown %'] = exit_df['Drawdown %']
209
+ df['Close 50'] = [int(i == "Close 50% of Position") for i in exit_df['Signal']]
210
+ df.loc[df['Close 50'] == 1, 'Exit Date'] = np.copy(df.loc[df[df['Close 50'] == 1].index.values -1]['Exit Date'])
211
+
212
+ grouped_df = df.groupby('Entry Date').agg({'Entry Date': 'min', 'Buy Price':'mean',
213
+ 'Sell Price' : 'mean',
214
+ 'Exit Date': 'max',
215
+ 'P/L per token': 'mean',
216
+ 'P/L %' : 'mean'})
217
+
218
+ grouped_df.insert(0,'Trade', range(len(grouped_df)))
219
+ grouped_df.index = range(len(grouped_df))
220
+ return grouped_df
221
 
 
222
  def load_data(filename, otimeheader, fmat):
223
+ df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
224
+ close50filename = filename.split('.')[0] + '-50.' + filename.split('.')[1]
225
+ df2 = tv_reformat(close50filename)
226
+
227
+ if filename == "CT-Trade-Log.csv":
228
+ df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
229
+ df.insert(1, 'Signal', ['Long']*len(df))
230
+ elif filename == "CC-Trade-Log.csv":
231
+ df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
232
+ else:
233
+ df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %']
234
+
235
+ if filename != "CT-Toasted-Trade-Log.csv":
236
+ df['Signal'] = df['Signal'].str.replace(' ', '', regex=True)
237
+ df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
238
+ df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
239
+ df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
240
+ df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
241
+ df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
242
+ df['P/L per token'] = df['P/L per token'].str.replace(',', '', regex=True)
243
+ df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
244
+
245
+ df['Buy Price'] = pd.to_numeric(df['Buy Price'])
246
+ df['Sell Price'] = pd.to_numeric(df['Sell Price'])
247
+ df['P/L per token'] = pd.to_numeric(df['P/L per token'])
248
+ df['P/L %'] = pd.to_numeric(df['P/L %'])
249
+
250
+ if df2.empty:
251
+ df = df
252
+ else:
253
+ df = pd.concat([df,df2], axis=0, ignore_index=True)
254
+
255
+ if filename == "CT-Trade-Log.csv":
256
+ df['Signal'] = ['Long']*len(df)
257
 
 
 
 
 
 
 
 
 
 
 
 
 
258
  dateheader = 'Date'
259
  theader = 'Time'
260
+
261
  df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
262
  df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
263
 
264
  df[otimeheader]= [dateutil.parser.parse(date+' '+time)
265
+ for date,time in zip(df[dateheader],df[theader])]
 
266
  df[otimeheader] = pd.to_datetime(df[otimeheader])
267
  df['Exit Date'] = pd.to_datetime(df['Exit Date'])
268
  df.sort_values(by=otimeheader, inplace=True)
269
+
270
  df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
271
  df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
272
  df['Trade'] = df.index + 1 #reindex
 
 
273
 
274
+ if filename == "CT-Trade-Log.csv":
275
+ df['DCA'] = np.nan
276
+
277
+ for exit in pd.unique(df['Exit Date']):
278
+ df_exit = df[df['Exit Date']==exit]
279
+ if dateutil.parser.parse(str(exit)) < dateutil.parser.parse('2023-02-07 13:00:00'):
280
+ for i in range(len(df_exit)):
281
+ ind = df_exit.index[i]
282
+ df.loc[ind,'DCA'] = i+1
283
+
284
+ else:
285
+ for i in range(len(df_exit)):
286
+ ind = df_exit.index[i]
287
+ df.loc[ind,'DCA'] = i+1.1
288
  return df
289
 
 
 
 
 
 
 
 
 
 
 
290
 
291
+ def get_sd_df(sd_df, sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance):
292
+ sd = 2*.00026
293
+ # ------ Standard Dev. Calculations.
294
+ if bot_selections == "Cinnamon Toast":
295
+ dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
296
+ sd_df['DCA %'] = sd_df['DCA'].map(dca_map)
297
+ sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
298
+ sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
299
+ sd_df['DCA'] = np.floor(sd_df['DCA'].values)
300
+
301
+ sd_df['Return Per Trade (+)'] = np.nan
302
+ sd_df['Return Per Trade (-)'] = np.nan
303
+ sd_df['Balance used in Trade (+)'] = np.nan
304
+ sd_df['Balance used in Trade (-)'] = np.nan
305
+ sd_df['New Balance (+)'] = np.nan
306
+ sd_df['New Balance (-)'] = np.nan
307
+
308
+ g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
309
+ g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
310
+ sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
311
+ sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
312
+
313
+ sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
314
+ sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
315
+ sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (+)']]
316
+ sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'].values[:-1]])
317
+
318
+ sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (-)']]
319
+ sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'].values[:-1]])
320
+ else:
321
+ sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
322
+ sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
323
+ sd_df['Return Per Trade (+)'] = np.nan
324
+ sd_df['Return Per Trade (-)'] = np.nan
325
+
326
+ g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
327
+ g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
328
+ sd_df['Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
329
+ sd_df['Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
330
+
331
+ sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
332
+ sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
333
+ sd_df['New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (+)']]
334
+ sd_df['Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df['New Balance (+)'].values[:-1]])
335
+
336
+ sd_df['New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (-)']]
337
+ sd_df['Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df['New Balance (-)'].values[:-1]])
338
+
339
+ sd_df['Net P/L Per Trade (+)'] = (sd_df['Return Per Trade (+)']-1)*sd_df['Balance used in Trade (+)']
340
+ sd_df['Cumulative P/L (+)'] = sd_df['Net P/L Per Trade (+)'].cumsum()
341
+
342
+ sd_df['Net P/L Per Trade (-)'] = (sd_df['Return Per Trade (-)']-1)*sd_df['Balance used in Trade (-)']
343
+ sd_df['Cumulative P/L (-)'] = sd_df['Net P/L Per Trade (-)'].cumsum()
344
+ return sd_df
345
+
346
+ def runapp() -> None:
347
+ bot_selections = "Cinnamon Toast"
348
+ otimeheader = 'Exit Date'
349
+ fmat = '%Y-%m-%d %H:%M:%S'
350
+ fees = .075/100
351
 
352
+ st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
353
+ no_errors = True
354
+ st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
355
+ "the performance of our trading bots.")
356
 
357
+ if bot_selections == "Cinnamon Toast":
358
+ lev_cap = 5
359
+ dollar_cap = 1000000000.00
360
+ data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
361
+ if bot_selections == "French Toast":
362
+ lev_cap = 3
363
+ dollar_cap = 10000000000.00
364
+ data = load_data("FT-Trade-Log.csv",otimeheader, fmat)
365
+ if bot_selections == "Short Bread":
366
+ lev_cap = 5
367
+ dollar_cap = 100000.00
368
+ data = load_data("SB-Trade-Log.csv",otimeheader, fmat)
369
+ if bot_selections == "Cosmic Cupcake":
370
+ lev_cap = 3
371
+ dollar_cap = 100000.00
372
+ data = load_data("CC-Trade-Log.csv",otimeheader, fmat)
373
+ if bot_selections == "CT Toasted":
374
+ lev_cap = 5
375
+ dollar_cap = 100000.00
376
+ data = load_data("CT-Toasted-Trade-Log.csv",otimeheader, fmat)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
377
 
378
+ df = data.copy(deep=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
379
 
380
+ dateheader = 'Date'
381
+ theader = 'Time'
382
+
383
+ st.subheader("Choose your settings:")
384
+ with st.form("user input", ):
385
+ if no_errors:
386
+ with st.container():
387
+ col1, col2 = st.columns(2)
388
+ with col1:
389
+ try:
390
+ startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
391
+ except:
392
+ st.error("Please select your exchange or upload a supported trade log file.")
393
+ no_errors = False
394
+ with col2:
395
+ try:
396
+ enddate = st.date_input("End Date", value=datetime.today())
397
+ except:
398
+ st.error("Please select your exchange or upload a supported trade log file.")
399
+ no_errors = False
400
+ #st.sidebar.subheader("Customize your Dashboard")
401
+
402
+ if no_errors and (enddate < startdate):
403
+ st.error("End Date must be later than Start date. Please try again.")
404
+ no_errors = False
405
+ with st.container():
406
+ col1,col2 = st.columns(2)
407
+ with col2:
408
+ lev = st.number_input('Leverage', min_value=1, value=1, max_value= lev_cap, step=1)
409
+ with col1:
410
+ principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
411
+
412
+ if bot_selections == "Cinnamon Toast":
413
+ st.write("Choose your DCA setup (for trades before 02/07/2023)")
414
+ with st.container():
415
+ col1, col2, col3, col4 = st.columns(4)
416
+ with col1:
417
+ dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
418
+ with col2:
419
+ dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
420
+ with col3:
421
+ dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
422
+ with col4:
423
+ dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
424
+ st.write("Choose your DCA setup (for trades on or after 02/07/2023)")
425
+ with st.container():
426
+ col1, col2 = st.columns(2)
427
+ with col1:
428
+ dca5 = st.number_input('DCA 1 Allocation', min_value=0, value=50, max_value= 100, step=1)
429
+ with col2:
430
+ dca6 = st.number_input('DCA 2 Allocation', min_value=0, value=50, max_value= 100, step=1)
431
+
432
+ #hack way to get button centered
433
+ c = st.columns(9)
434
+ with c[4]:
435
+ submitted = st.form_submit_button("Get Cookin'!")
436
+
437
+ if submitted and principal_balance * lev > dollar_cap:
438
+ lev = np.floor(dollar_cap/principal_balance)
439
+ st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
440
+
441
+ if submitted and no_errors:
442
+ df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
443
+ signal_map = {'Long': 1, 'Short':-1}
444
+
445
+
446
+ if len(df) == 0:
447
+ st.error("There are no available trades matching your selections. Please try again!")
448
+ no_errors = False
449
+
450
+ if no_errors:
451
+ if bot_selections == "Cinnamon Toast":
452
+ dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
453
+ df['DCA %'] = df['DCA'].map(dca_map)
454
+ df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
455
+ df['DCA'] = np.floor(df['DCA'].values)
456
+
457
+ df['Return Per Trade'] = np.nan
458
+ df['Balance used in Trade'] = np.nan
459
+ df['New Balance'] = np.nan
460
 
461
+ g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
462
+ df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+lev*g['Return Per Trade'].values
463
+
464
+ df['Compounded Return'] = df['Return Per Trade'].cumprod()
465
+ df.loc[df['DCA']==1.0,'New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df.loc[df['DCA']==1.0,'Compounded Return']]
466
+ df.loc[df['DCA']==1.0,'Balance used in Trade'] = np.concatenate([[principal_balance], df.loc[df['DCA']==1.0,'New Balance'].values[:-1]])
467
+ else:
468
+ df['Calculated Return %'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
469
+ df['Return Per Trade'] = np.nan
470
+ g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
471
+ df['Return Per Trade'] = 1+lev*g['Return Per Trade'].values
472
+
473
+ df['Compounded Return'] = df['Return Per Trade'].cumprod()
474
+ df['New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df['Compounded Return']]
475
+ df['Balance used in Trade'] = np.concatenate([[principal_balance], df['New Balance'].values[:-1]])
476
+ df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
477
+ df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
478
+
479
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
480
+ cum_pl = df.loc[df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L'] + principal_balance
481
+ #cum_sdp = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
482
+ #cum_sdm = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
483
+ else:
484
+ cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
485
+ #cum_sdp = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
486
+ #cum_sdm = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
487
+ #sd = 2*.00026
488
+ #sd_df = get_sd_df(get_sd_df(df.copy(), sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance)
489
+
490
+ effective_return = 100*((cum_pl - principal_balance)/principal_balance)
491
+
492
+ st.header(f"{bot_selections} Results")
493
+ with st.container():
494
+
495
+ if len(bot_selections) > 1:
496
+ col1, col2 = st.columns(2)
497
+ with col1:
498
+ st.metric(
499
+ "Total Account Balance",
500
+ f"${cum_pl:.2f}",
501
+ f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
502
+ )
503
+
504
+ # with col2:
505
+ # st.write("95% of trades should fall within this 2 std. dev. range.")
506
+ # st.metric(
507
+ # "High Range (+ 2 std. dev.)",
508
+ # f"", #${cum_sdp:.2f}
509
+ # f"{100*(cum_sdp-principal_balance)/(principal_balance):.2f} %",
510
+ # )
511
+ # st.metric(
512
+ # "Low Range (- 2 std. dev.)",
513
+ # f"" ,#${cum_sdm:.2f}"
514
+ # f"{100*(cum_sdm-principal_balance)/(principal_balance):.2f} %",
515
+ # )
516
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
517
+ #st.line_chart(data=df.drop('Drawdown %', axis=1).dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
518
+ dfdata = df.drop('Drawdown %', axis=1).dropna()
519
+ #sd_df = sd_df.drop('Drawdown %', axis=1).dropna()
520
+ else:
521
+ #st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
522
+ dfdata = df.dropna()
523
+ #sd_df = sd_df.dropna()
524
+
525
+ # Create figure
526
+ fig = go.Figure()
527
+
528
+ pyLogo = Image.open("logo.png")
529
+
530
+ # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (+)'],line_shape='spline',
531
+ # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), showlegend = False)
532
+ # )
533
+
534
+ # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (-)'],
535
+ # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), line_shape='spline',
536
+ # fill='tonexty',
537
+ # fillcolor = 'rgba(31, 119, 200,.2)', name = '+/- Standard Deviation')
538
+ # )
539
+
540
+ # Add trace
541
+ fig.add_trace(
542
+ go.Scatter(x=dfdata['Exit Date'], y=np.round(dfdata['Cumulative P/L'].values,2), line_shape='spline',
543
+ line = {'smoothing': 1.0, 'color' : 'rgba(31, 119, 200,.8)'},
544
+ name='Cumulative P/L')
545
+ )
546
+ buyhold = (principal_balance/dfdata['Buy Price'][dfdata.index[0]])*(dfdata['Buy Price']-dfdata['Buy Price'][dfdata.index[0]])
547
+ fig.add_trace(go.Scatter(x=dfdata['Exit Date'], y=np.round(buyhold.values,2), line_shape='spline',
548
+ line = {'smoothing': 1.0, 'color' :'red'}, name = 'Buy & Hold Return')
549
+ )
550
+
551
+ fig.add_layout_image(
552
+ dict(
553
+ source=pyLogo,
554
+ xref="paper",
555
+ yref="paper",
556
+ x = 0.05, #dfdata['Exit Date'].astype('int64').min() // 10**9,
557
+ y = .85, #dfdata['Cumulative P/L'].max(),
558
+ sizex= .9, #(dfdata['Exit Date'].astype('int64').max() - dfdata['Exit Date'].astype('int64').min()) // 10**9,
559
+ sizey= .9, #(dfdata['Cumulative P/L'].max() - dfdata['Cumulative P/L'].min()),
560
+ sizing="contain",
561
+ opacity=0.2,
562
+ layer = "below")
563
+ )
564
+
565
+ #style layout
566
+ fig.update_layout(
567
+ height = 600,
568
+ xaxis=dict(
569
+ title="Exit Date",
570
+ tickmode='array',
571
+ ),
572
+ yaxis=dict(
573
+ title="Cumulative P/L"
574
+ ) )
575
+
576
+ st.plotly_chart(fig, theme=None, use_container_width=True,height=600)
577
+ st.write()
578
+ df['Per Trade Return Rate'] = df['Return Per Trade']-1
579
+
580
+ totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
581
+ if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
582
+ data = get_hist_info(df.drop('Drawdown %', axis=1).dropna(), principal_balance,'Per Trade Return Rate')
583
+ else:
584
+ data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
585
+ totals.loc[len(totals)] = list(i for i in data)
586
+
587
+ totals['Cum. P/L'] = cum_pl-principal_balance
588
+ totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
589
+
590
+ if df.empty:
591
+ st.error("Oops! None of the data provided matches your selection(s). Please try again.")
592
+ else:
593
+ with st.container():
594
+ for row in totals.itertuples():
595
+ col1, col2, col3, col4= st.columns(4)
596
+ c1, c2, c3, c4 = st.columns(4)
597
+ with col1:
598
+ st.metric(
599
+ "Total Trades",
600
+ f"{row._1:.0f}",
601
+ )
602
+ with c1:
603
+ st.metric(
604
+ "Profit Factor",
605
+ f"{row._5:.2f}",
606
+ )
607
+ with col2:
608
+ st.metric(
609
+ "Wins",
610
+ f"{row.Wins:.0f}",
611
+ )
612
+ with c2:
613
+ st.metric(
614
+ "Cumulative P/L",
615
+ f"${row._6:.2f}",
616
+ f"{row._7:.2f} %",
617
+ )
618
+ with col3:
619
+ st.metric(
620
+ "Losses",
621
+ f"{row.Losses:.0f}",
622
+ )
623
+ with c3:
624
+ st.metric(
625
+ "Rolling 7 Days",
626
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
627
+ f"{get_rolling_stats(df,lev, otimeheader, 7):.2f}%",
628
+ )
629
+ st.metric(
630
+ "Rolling 30 Days",
631
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
632
+ f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
633
+ )
634
+
635
+ with col4:
636
+ st.metric(
637
+ "Win Rate",
638
+ f"{row._4:.1f}%",
639
+ )
640
+ with c4:
641
+ st.metric(
642
+ "Rolling 90 Days",
643
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
644
+ f"{get_rolling_stats(df,lev, otimeheader, 90):.2f}%",
645
+ )
646
+ st.metric(
647
+ "Rolling 180 Days",
648
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
649
+ f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
650
+ )
651
 
652
+ if bot_selections == "Cinnamon Toast":
653
+ if submitted:
654
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
655
+ 'Sell Price' : 'max',
656
+ 'Net P/L Per Trade': 'mean',
657
+ 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
658
+ 'DCA': lambda x: int(np.floor(x.max()))})
659
+ grouped_df.index = range(1, len(grouped_df)+1)
660
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
661
+ 'Net P/L Per Trade':'Net P/L',
662
+ 'Calculated Return %':'P/L %'}, inplace=True)
663
+ else:
664
+ dca_map = {1: 25/100, 2: 25/100, 3: 25/100, 4: 25/100, 1.1: 50/100, 2.1: 50/100}
665
+ df['DCA %'] = df['DCA'].map(dca_map)
666
+ df['Calculated Return %'] = (df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
667
+
668
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
669
+ 'Sell Price' : 'max',
670
+ 'P/L per token': 'mean',
671
+ 'Calculated Return %' : lambda x: np.round(100*x.sum(),2),
672
+ 'DCA': lambda x: int(np.floor(x.max()))})
673
+ grouped_df.index = range(1, len(grouped_df)+1)
674
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
675
+ 'Calculated Return %':'P/L %',
676
+ 'P/L per token':'Net P/L'}, inplace=True)
677
+
678
+ else:
679
+ if submitted:
680
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
681
+ 'Sell Price' : 'max',
682
+ 'Net P/L Per Trade': 'mean',
683
+ 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2)})
684
+ grouped_df.index = range(1, len(grouped_df)+1)
685
+ grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
686
+ 'Net P/L Per Trade':'Net P/L',
687
+ 'Calculated Return %':'P/L %'}, inplace=True)
688
+ else:
689
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
690
+ 'Sell Price' : 'max',
691
+ 'P/L per token': 'mean',
692
+ 'P/L %':'mean'})
693
+ grouped_df.index = range(1, len(grouped_df)+1)
694
+ grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
695
+ 'P/L per token':'Net P/L'}, inplace=True)
696
+ st.subheader("Trade Logs")
697
+ grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
698
+ grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
699
+ if bot_selections == "Cosmic Cupcake" or bot_selections == "CT Toasted":
700
+ coding = cc_coding if bot_selections == "Cosmic Cupcake" else ctt_coding
701
+ st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
702
+ .apply(coding, axis=1)\
703
+ .applymap(my_style,subset=['Net P/L'])\
704
+ .applymap(my_style,subset=['P/L %']), use_container_width=True)
705
+ new_title = '<div style="text-align: right;"><span style="background-color:lightgrey;">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span> Not Live Traded</div>'
706
+ st.markdown(new_title, unsafe_allow_html=True)
707
+ else:
708
+ st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
709
+ .applymap(my_style,subset=['Net P/L'])\
710
+ .applymap(my_style,subset=['P/L %']), use_container_width=True)
711
+
712
+ # st.subheader("Checking Status")
713
+ # if submitted:
714
+ # st.dataframe(sd_df)
715
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
716
  if __name__ == "__main__":
717
  st.set_page_config(
718
  "Trading Bot Dashboard",
 
722
  # -
723
 
724
 
725
+
726
+
historical_app.py DELETED
@@ -1,726 +0,0 @@
1
- # ---
2
- # jupyter:
3
- # jupytext:
4
- # text_representation:
5
- # extension: .py
6
- # format_name: light
7
- # format_version: '1.5'
8
- # jupytext_version: 1.14.2
9
- # kernelspec:
10
- # display_name: Python [conda env:bbytes] *
11
- # language: python
12
- # name: conda-env-bbytes-py
13
- # ---
14
-
15
- # +
16
- import csv
17
- import pandas as pd
18
- from datetime import datetime, timedelta
19
- import numpy as np
20
- import datetime as dt
21
- import matplotlib.pyplot as plt
22
- from pathlib import Path
23
- import time
24
- import plotly.graph_objects as go
25
- import plotly.io as pio
26
- from PIL import Image
27
-
28
- import streamlit as st
29
- import plotly.express as px
30
- import altair as alt
31
- import dateutil.parser
32
- from matplotlib.colors import LinearSegmentedColormap
33
-
34
-
35
- # +
36
- class color:
37
- PURPLE = '\033[95m'
38
- CYAN = '\033[96m'
39
- DARKCYAN = '\033[36m'
40
- BLUE = '\033[94m'
41
- GREEN = '\033[92m'
42
- YELLOW = '\033[93m'
43
- RED = '\033[91m'
44
- BOLD = '\033[1m'
45
- UNDERLINE = '\033[4m'
46
- END = '\033[0m'
47
-
48
- @st.experimental_memo
49
- def print_PL(amnt, thresh, extras = "" ):
50
- if amnt > 0:
51
- return color.BOLD + color.GREEN + str(amnt) + extras + color.END
52
- elif amnt < 0:
53
- return color.BOLD + color.RED + str(amnt)+ extras + color.END
54
- elif np.isnan(amnt):
55
- return str(np.nan)
56
- else:
57
- return str(amnt + extras)
58
-
59
- @st.experimental_memo
60
- def get_headers(logtype):
61
- otimeheader = ""
62
- cheader = ""
63
- plheader = ""
64
- fmat = '%Y-%m-%d %H:%M:%S'
65
-
66
- if logtype == "ByBit":
67
- otimeheader = 'Create Time'
68
- cheader = 'Contracts'
69
- plheader = 'Closed P&L'
70
- fmat = '%Y-%m-%d %H:%M:%S'
71
-
72
- if logtype == "BitGet":
73
- otimeheader = 'Date'
74
- cheader = 'Futures'
75
- plheader = 'Realized P/L'
76
- fmat = '%Y-%m-%d %H:%M:%S'
77
-
78
- if logtype == "MEXC":
79
- otimeheader = 'Trade time'
80
- cheader = 'Futures'
81
- plheader = 'closing position'
82
- fmat = '%Y/%m/%d %H:%M'
83
-
84
- if logtype == "Binance":
85
- otimeheader = 'Date'
86
- cheader = 'Symbol'
87
- plheader = 'Realized Profit'
88
- fmat = '%Y-%m-%d %H:%M:%S'
89
-
90
- #if logtype == "Kucoin":
91
- # otimeheader = 'Time'
92
- # cheader = 'Contract'
93
- # plheader = ''
94
- # fmat = '%Y/%m/%d %H:%M:%S'
95
-
96
-
97
- if logtype == "Kraken":
98
- otimeheader = 'time'
99
- cheader = 'asset'
100
- plheader = 'amount'
101
- fmat = '%Y-%m-%d %H:%M:%S.%f'
102
-
103
- if logtype == "OkX":
104
- otimeheader = '\ufeffOrder Time'
105
- cheader = '\ufeffInstrument'
106
- plheader = '\ufeffPL'
107
- fmat = '%Y-%m-%d %H:%M:%S'
108
-
109
- return otimeheader.lower(), cheader.lower(), plheader.lower(), fmat
110
-
111
- @st.experimental_memo
112
- def get_coin_info(df_coin, principal_balance,plheader):
113
- numtrades = int(len(df_coin))
114
- numwin = int(sum(df_coin[plheader] > 0))
115
- numloss = int(sum(df_coin[plheader] < 0))
116
- winrate = np.round(100*numwin/numtrades,2)
117
-
118
- grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
119
- grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
120
- if grossloss != 0:
121
- pfactor = -1*np.round(grosswin/grossloss,2)
122
- else:
123
- pfactor = np.nan
124
-
125
- cum_PL = np.round(sum(df_coin[plheader].values),2)
126
- cum_PL_perc = np.round(100*cum_PL/principal_balance,2)
127
- mean_PL = np.round(sum(df_coin[plheader].values/len(df_coin)),2)
128
- mean_PL_perc = np.round(100*mean_PL/principal_balance,2)
129
-
130
- return numtrades, numwin, numloss, winrate, pfactor, cum_PL, cum_PL_perc, mean_PL, mean_PL_perc
131
-
132
- @st.experimental_memo
133
- def get_hist_info(df_coin, principal_balance,plheader):
134
- numtrades = int(len(df_coin))
135
- numwin = int(sum(df_coin[plheader] > 0))
136
- numloss = int(sum(df_coin[plheader] < 0))
137
- if numtrades != 0:
138
- winrate = int(np.round(100*numwin/numtrades,2))
139
- else:
140
- winrate = np.nan
141
-
142
- grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
143
- grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
144
- if grossloss != 0:
145
- pfactor = -1*np.round(grosswin/grossloss,2)
146
- else:
147
- pfactor = np.nan
148
- return numtrades, numwin, numloss, winrate, pfactor
149
-
150
- @st.experimental_memo
151
- def get_rolling_stats(df, lev, otimeheader, days):
152
- max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
153
-
154
- if max_roll >= days:
155
- rollend = df[otimeheader].max()-timedelta(days=days)
156
- rolling_df = df[df[otimeheader] >= rollend]
157
-
158
- if len(rolling_df) > 0:
159
- rolling_perc = rolling_df['Return Per Trade'].dropna().cumprod().values[-1]-1
160
- else:
161
- rolling_perc = np.nan
162
- else:
163
- rolling_perc = np.nan
164
- return 100*rolling_perc
165
- @st.experimental_memo
166
- def cc_coding(row):
167
- return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2022-12-16 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
168
- def ctt_coding(row):
169
- return ['background-color: lightgrey'] * len(row) if row['Exit Date'] <= datetime.strptime('2023-01-02 00:00:00','%Y-%m-%d %H:%M:%S').date() else [''] * len(row)
170
-
171
- @st.experimental_memo
172
- def my_style(v, props=''):
173
- props = 'color:red' if v < 0 else 'color:green'
174
- return props
175
-
176
- def filt_df(df, cheader, symbol_selections):
177
-
178
- df = df.copy()
179
- df = df[df[cheader].isin(symbol_selections)]
180
-
181
- return df
182
-
183
- def tv_reformat(close50filename):
184
- try:
185
- data = pd.read_csv(open('CT-Trade-Log-50.csv','r'), sep='[,|\t]', engine='python')
186
- except:
187
- data = pd.DataFrame([])
188
-
189
- if data.empty:
190
- return data
191
- else:
192
- entry_df = data[data['Type'] == "Entry Long"]
193
- exit_df = data[data['Type']=="Exit Long"]
194
-
195
- entry_df.index = range(len(entry_df))
196
- exit_df.index = range(len(exit_df))
197
-
198
- df = pd.DataFrame([], columns=['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %'])
199
-
200
- df['Trade'] = entry_df.index
201
- df['Entry Date'] = entry_df['Date/Time']
202
- df['Buy Price'] = entry_df['Price USDT']
203
-
204
- df['Sell Price'] = exit_df['Price USDT']
205
- df['Exit Date'] = exit_df['Date/Time']
206
- df['P/L per token'] = df['Sell Price'] - df['Buy Price']
207
- df['P/L %'] = exit_df['Profit %']
208
- df['Drawdown %'] = exit_df['Drawdown %']
209
- df['Close 50'] = [int(i == "Close 50% of Position") for i in exit_df['Signal']]
210
- df.loc[df['Close 50'] == 1, 'Exit Date'] = np.copy(df.loc[df[df['Close 50'] == 1].index.values -1]['Exit Date'])
211
-
212
- grouped_df = df.groupby('Entry Date').agg({'Entry Date': 'min', 'Buy Price':'mean',
213
- 'Sell Price' : 'mean',
214
- 'Exit Date': 'max',
215
- 'P/L per token': 'mean',
216
- 'P/L %' : 'mean'})
217
-
218
- grouped_df.insert(0,'Trade', range(len(grouped_df)))
219
- grouped_df.index = range(len(grouped_df))
220
- return grouped_df
221
-
222
- def load_data(filename, otimeheader, fmat):
223
- df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
224
- close50filename = filename.split('.')[0] + '-50.' + filename.split('.')[1]
225
- df2 = tv_reformat(close50filename)
226
-
227
- if filename == "CT-Trade-Log.csv":
228
- df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
229
- df.insert(1, 'Signal', ['Long']*len(df))
230
- elif filename == "CC-Trade-Log.csv":
231
- df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
232
- else:
233
- df.columns = ['Trade','Signal','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %']
234
-
235
- if filename != "CT-Toasted-Trade-Log.csv":
236
- df['Signal'] = df['Signal'].str.replace(' ', '', regex=True)
237
- df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
238
- df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
239
- df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
240
- df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
241
- df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
242
- df['P/L per token'] = df['P/L per token'].str.replace(',', '', regex=True)
243
- df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
244
-
245
- df['Buy Price'] = pd.to_numeric(df['Buy Price'])
246
- df['Sell Price'] = pd.to_numeric(df['Sell Price'])
247
- df['P/L per token'] = pd.to_numeric(df['P/L per token'])
248
- df['P/L %'] = pd.to_numeric(df['P/L %'])
249
-
250
- if df2.empty:
251
- df = df
252
- else:
253
- df = pd.concat([df,df2], axis=0, ignore_index=True)
254
-
255
- if filename == "CT-Trade-Log.csv":
256
- df['Signal'] = ['Long']*len(df)
257
-
258
- dateheader = 'Date'
259
- theader = 'Time'
260
-
261
- df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
262
- df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
263
-
264
- df[otimeheader]= [dateutil.parser.parse(date+' '+time)
265
- for date,time in zip(df[dateheader],df[theader])]
266
- df[otimeheader] = pd.to_datetime(df[otimeheader])
267
- df['Exit Date'] = pd.to_datetime(df['Exit Date'])
268
- df.sort_values(by=otimeheader, inplace=True)
269
-
270
- df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
271
- df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
272
- df['Trade'] = df.index + 1 #reindex
273
-
274
- if filename == "CT-Trade-Log.csv":
275
- df['DCA'] = np.nan
276
-
277
- for exit in pd.unique(df['Exit Date']):
278
- df_exit = df[df['Exit Date']==exit]
279
- if dateutil.parser.parse(str(exit)) < dateutil.parser.parse('2023-02-07 13:00:00'):
280
- for i in range(len(df_exit)):
281
- ind = df_exit.index[i]
282
- df.loc[ind,'DCA'] = i+1
283
-
284
- else:
285
- for i in range(len(df_exit)):
286
- ind = df_exit.index[i]
287
- df.loc[ind,'DCA'] = i+1.1
288
- return df
289
-
290
-
291
- def get_sd_df(sd_df, sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance):
292
- sd = 2*.00026
293
- # ------ Standard Dev. Calculations.
294
- if bot_selections == "Cinnamon Toast":
295
- dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
296
- sd_df['DCA %'] = sd_df['DCA'].map(dca_map)
297
- sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
298
- sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
299
- sd_df['DCA'] = np.floor(sd_df['DCA'].values)
300
-
301
- sd_df['Return Per Trade (+)'] = np.nan
302
- sd_df['Return Per Trade (-)'] = np.nan
303
- sd_df['Balance used in Trade (+)'] = np.nan
304
- sd_df['Balance used in Trade (-)'] = np.nan
305
- sd_df['New Balance (+)'] = np.nan
306
- sd_df['New Balance (-)'] = np.nan
307
-
308
- g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
309
- g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
310
- sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
311
- sd_df.loc[sd_df['DCA']==1.0,'Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
312
-
313
- sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
314
- sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
315
- sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (+)']]
316
- sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (+)'].values[:-1]])
317
-
318
- sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df.loc[sd_df['DCA']==1.0,'Compounded Return (-)']]
319
- sd_df.loc[sd_df['DCA']==1.0,'Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df.loc[sd_df['DCA']==1.0,'New Balance (-)'].values[:-1]])
320
- else:
321
- sd_df['Calculated Return % (+)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1+df['Signal'].map(signal_map)*sd) - df['Buy Price']*(1-df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1-df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
322
- sd_df['Calculated Return % (-)'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']*(1-df['Signal'].map(signal_map)*sd)-df['Buy Price']*(1+df['Signal'].map(signal_map)*sd))/df['Buy Price']*(1+df['Signal'].map(signal_map)*sd) - fees) #accounts for fees on open and close of trade
323
- sd_df['Return Per Trade (+)'] = np.nan
324
- sd_df['Return Per Trade (-)'] = np.nan
325
-
326
- g1 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (+)'].reset_index(name='Return Per Trade (+)')
327
- g2 = sd_df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return % (-)'].reset_index(name='Return Per Trade (-)')
328
- sd_df['Return Per Trade (+)'] = 1+lev*g1['Return Per Trade (+)'].values
329
- sd_df['Return Per Trade (-)'] = 1+lev*g2['Return Per Trade (-)'].values
330
-
331
- sd_df['Compounded Return (+)'] = sd_df['Return Per Trade (+)'].cumprod()
332
- sd_df['Compounded Return (-)'] = sd_df['Return Per Trade (-)'].cumprod()
333
- sd_df['New Balance (+)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (+)']]
334
- sd_df['Balance used in Trade (+)'] = np.concatenate([[principal_balance], sd_df['New Balance (+)'].values[:-1]])
335
-
336
- sd_df['New Balance (-)'] = [min(dollar_cap/lev, bal*principal_balance) for bal in sd_df['Compounded Return (-)']]
337
- sd_df['Balance used in Trade (-)'] = np.concatenate([[principal_balance], sd_df['New Balance (-)'].values[:-1]])
338
-
339
- sd_df['Net P/L Per Trade (+)'] = (sd_df['Return Per Trade (+)']-1)*sd_df['Balance used in Trade (+)']
340
- sd_df['Cumulative P/L (+)'] = sd_df['Net P/L Per Trade (+)'].cumsum()
341
-
342
- sd_df['Net P/L Per Trade (-)'] = (sd_df['Return Per Trade (-)']-1)*sd_df['Balance used in Trade (-)']
343
- sd_df['Cumulative P/L (-)'] = sd_df['Net P/L Per Trade (-)'].cumsum()
344
- return sd_df
345
-
346
- def runapp() -> None:
347
- bot_selections = "Cinnamon Toast"
348
- otimeheader = 'Exit Date'
349
- fmat = '%Y-%m-%d %H:%M:%S'
350
- fees = .075/100
351
-
352
- st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
353
- no_errors = True
354
- st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
355
- "the performance of our trading bots.")
356
-
357
- if bot_selections == "Cinnamon Toast":
358
- lev_cap = 5
359
- dollar_cap = 1000000000.00
360
- data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
361
- if bot_selections == "French Toast":
362
- lev_cap = 3
363
- dollar_cap = 10000000000.00
364
- data = load_data("FT-Trade-Log.csv",otimeheader, fmat)
365
- if bot_selections == "Short Bread":
366
- lev_cap = 5
367
- dollar_cap = 100000.00
368
- data = load_data("SB-Trade-Log.csv",otimeheader, fmat)
369
- if bot_selections == "Cosmic Cupcake":
370
- lev_cap = 3
371
- dollar_cap = 100000.00
372
- data = load_data("CC-Trade-Log.csv",otimeheader, fmat)
373
- if bot_selections == "CT Toasted":
374
- lev_cap = 5
375
- dollar_cap = 100000.00
376
- data = load_data("CT-Toasted-Trade-Log.csv",otimeheader, fmat)
377
-
378
- df = data.copy(deep=True)
379
-
380
- dateheader = 'Date'
381
- theader = 'Time'
382
-
383
- st.subheader("Choose your settings:")
384
- with st.form("user input", ):
385
- if no_errors:
386
- with st.container():
387
- col1, col2 = st.columns(2)
388
- with col1:
389
- try:
390
- startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
391
- except:
392
- st.error("Please select your exchange or upload a supported trade log file.")
393
- no_errors = False
394
- with col2:
395
- try:
396
- enddate = st.date_input("End Date", value=datetime.today())
397
- except:
398
- st.error("Please select your exchange or upload a supported trade log file.")
399
- no_errors = False
400
- #st.sidebar.subheader("Customize your Dashboard")
401
-
402
- if no_errors and (enddate < startdate):
403
- st.error("End Date must be later than Start date. Please try again.")
404
- no_errors = False
405
- with st.container():
406
- col1,col2 = st.columns(2)
407
- with col2:
408
- lev = st.number_input('Leverage', min_value=1, value=1, max_value= lev_cap, step=1)
409
- with col1:
410
- principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
411
-
412
- if bot_selections == "Cinnamon Toast":
413
- st.write("Choose your DCA setup (for trades before 02/07/2023)")
414
- with st.container():
415
- col1, col2, col3, col4 = st.columns(4)
416
- with col1:
417
- dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
418
- with col2:
419
- dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
420
- with col3:
421
- dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
422
- with col4:
423
- dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
424
- st.write("Choose your DCA setup (for trades on or after 02/07/2023)")
425
- with st.container():
426
- col1, col2 = st.columns(2)
427
- with col1:
428
- dca5 = st.number_input('DCA 1 Allocation', min_value=0, value=50, max_value= 100, step=1)
429
- with col2:
430
- dca6 = st.number_input('DCA 2 Allocation', min_value=0, value=50, max_value= 100, step=1)
431
-
432
- #hack way to get button centered
433
- c = st.columns(9)
434
- with c[4]:
435
- submitted = st.form_submit_button("Get Cookin'!")
436
-
437
- if submitted and principal_balance * lev > dollar_cap:
438
- lev = np.floor(dollar_cap/principal_balance)
439
- st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
440
-
441
- if submitted and no_errors:
442
- df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
443
- signal_map = {'Long': 1, 'Short':-1}
444
-
445
-
446
- if len(df) == 0:
447
- st.error("There are no available trades matching your selections. Please try again!")
448
- no_errors = False
449
-
450
- if no_errors:
451
- if bot_selections == "Cinnamon Toast":
452
- dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
453
- df['DCA %'] = df['DCA'].map(dca_map)
454
- df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
455
- df['DCA'] = np.floor(df['DCA'].values)
456
-
457
- df['Return Per Trade'] = np.nan
458
- df['Balance used in Trade'] = np.nan
459
- df['New Balance'] = np.nan
460
-
461
- g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
462
- df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+lev*g['Return Per Trade'].values
463
-
464
- df['Compounded Return'] = df['Return Per Trade'].cumprod()
465
- df.loc[df['DCA']==1.0,'New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df.loc[df['DCA']==1.0,'Compounded Return']]
466
- df.loc[df['DCA']==1.0,'Balance used in Trade'] = np.concatenate([[principal_balance], df.loc[df['DCA']==1.0,'New Balance'].values[:-1]])
467
- else:
468
- df['Calculated Return %'] = df['Signal'].map(signal_map)*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
469
- df['Return Per Trade'] = np.nan
470
- g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
471
- df['Return Per Trade'] = 1+lev*g['Return Per Trade'].values
472
-
473
- df['Compounded Return'] = df['Return Per Trade'].cumprod()
474
- df['New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df['Compounded Return']]
475
- df['Balance used in Trade'] = np.concatenate([[principal_balance], df['New Balance'].values[:-1]])
476
- df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
477
- df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
478
-
479
- if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
480
- cum_pl = df.loc[df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L'] + principal_balance
481
- #cum_sdp = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
482
- #cum_sdm = sd_df.loc[sd_df.drop('Drawdown %', axis=1).dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
483
- else:
484
- cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
485
- #cum_sdp = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (+)'] + principal_balance
486
- #cum_sdm = sd_df.loc[sd_df.dropna().index[-1],'Cumulative P/L (-)'] + principal_balance
487
- #sd = 2*.00026
488
- #sd_df = get_sd_df(get_sd_df(df.copy(), sd, bot_selections, dca1, dca2, dca3, dca4, dca5, dca6, fees, lev, dollar_cap, principal_balance)
489
-
490
- effective_return = 100*((cum_pl - principal_balance)/principal_balance)
491
-
492
- st.header(f"{bot_selections} Results")
493
- with st.container():
494
-
495
- if len(bot_selections) > 1:
496
- col1, col2 = st.columns(2)
497
- with col1:
498
- st.metric(
499
- "Total Account Balance",
500
- f"${cum_pl:.2f}",
501
- f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
502
- )
503
-
504
- # with col2:
505
- # st.write("95% of trades should fall within this 2 std. dev. range.")
506
- # st.metric(
507
- # "High Range (+ 2 std. dev.)",
508
- # f"", #${cum_sdp:.2f}
509
- # f"{100*(cum_sdp-principal_balance)/(principal_balance):.2f} %",
510
- # )
511
- # st.metric(
512
- # "Low Range (- 2 std. dev.)",
513
- # f"" ,#${cum_sdm:.2f}"
514
- # f"{100*(cum_sdm-principal_balance)/(principal_balance):.2f} %",
515
- # )
516
- if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
517
- #st.line_chart(data=df.drop('Drawdown %', axis=1).dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
518
- dfdata = df.drop('Drawdown %', axis=1).dropna()
519
- #sd_df = sd_df.drop('Drawdown %', axis=1).dropna()
520
- else:
521
- #st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
522
- dfdata = df.dropna()
523
- #sd_df = sd_df.dropna()
524
-
525
- # Create figure
526
- fig = go.Figure()
527
-
528
- pyLogo = Image.open("logo.png")
529
-
530
- # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (+)'],line_shape='spline',
531
- # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), showlegend = False)
532
- # )
533
-
534
- # fig.add_traces(go.Scatter(x=sd_df['Exit Date'], y = sd_df['Cumulative P/L (-)'],
535
- # line = dict(smoothing = 1.3, color='rgba(31, 119, 200,0)'), line_shape='spline',
536
- # fill='tonexty',
537
- # fillcolor = 'rgba(31, 119, 200,.2)', name = '+/- Standard Deviation')
538
- # )
539
-
540
- # Add trace
541
- fig.add_trace(
542
- go.Scatter(x=dfdata['Exit Date'], y=np.round(dfdata['Cumulative P/L'].values,2), line_shape='spline',
543
- line = {'smoothing': 1.0, 'color' : 'rgba(31, 119, 200,.8)'},
544
- name='Cumulative P/L')
545
- )
546
- buyhold = (principal_balance/dfdata['Buy Price'][dfdata.index[0]])*(dfdata['Buy Price']-dfdata['Buy Price'][dfdata.index[0]])
547
- fig.add_trace(go.Scatter(x=dfdata['Exit Date'], y=np.round(buyhold.values,2), line_shape='spline',
548
- line = {'smoothing': 1.0, 'color' :'red'}, name = 'Buy & Hold Return')
549
- )
550
-
551
- fig.add_layout_image(
552
- dict(
553
- source=pyLogo,
554
- xref="paper",
555
- yref="paper",
556
- x = 0.05, #dfdata['Exit Date'].astype('int64').min() // 10**9,
557
- y = .85, #dfdata['Cumulative P/L'].max(),
558
- sizex= .9, #(dfdata['Exit Date'].astype('int64').max() - dfdata['Exit Date'].astype('int64').min()) // 10**9,
559
- sizey= .9, #(dfdata['Cumulative P/L'].max() - dfdata['Cumulative P/L'].min()),
560
- sizing="contain",
561
- opacity=0.2,
562
- layer = "below")
563
- )
564
-
565
- #style layout
566
- fig.update_layout(
567
- height = 600,
568
- xaxis=dict(
569
- title="Exit Date",
570
- tickmode='array',
571
- ),
572
- yaxis=dict(
573
- title="Cumulative P/L"
574
- ) )
575
-
576
- st.plotly_chart(fig, theme=None, use_container_width=True,height=600)
577
- st.write()
578
- df['Per Trade Return Rate'] = df['Return Per Trade']-1
579
-
580
- totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
581
- if bot_selections == "Cinnamon Toast" or bot_selections == "Cosmic Cupcake":
582
- data = get_hist_info(df.drop('Drawdown %', axis=1).dropna(), principal_balance,'Per Trade Return Rate')
583
- else:
584
- data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
585
- totals.loc[len(totals)] = list(i for i in data)
586
-
587
- totals['Cum. P/L'] = cum_pl-principal_balance
588
- totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
589
-
590
- if df.empty:
591
- st.error("Oops! None of the data provided matches your selection(s). Please try again.")
592
- else:
593
- with st.container():
594
- for row in totals.itertuples():
595
- col1, col2, col3, col4= st.columns(4)
596
- c1, c2, c3, c4 = st.columns(4)
597
- with col1:
598
- st.metric(
599
- "Total Trades",
600
- f"{row._1:.0f}",
601
- )
602
- with c1:
603
- st.metric(
604
- "Profit Factor",
605
- f"{row._5:.2f}",
606
- )
607
- with col2:
608
- st.metric(
609
- "Wins",
610
- f"{row.Wins:.0f}",
611
- )
612
- with c2:
613
- st.metric(
614
- "Cumulative P/L",
615
- f"${row._6:.2f}",
616
- f"{row._7:.2f} %",
617
- )
618
- with col3:
619
- st.metric(
620
- "Losses",
621
- f"{row.Losses:.0f}",
622
- )
623
- with c3:
624
- st.metric(
625
- "Rolling 7 Days",
626
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
627
- f"{get_rolling_stats(df,lev, otimeheader, 7):.2f}%",
628
- )
629
- st.metric(
630
- "Rolling 30 Days",
631
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
632
- f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
633
- )
634
-
635
- with col4:
636
- st.metric(
637
- "Win Rate",
638
- f"{row._4:.1f}%",
639
- )
640
- with c4:
641
- st.metric(
642
- "Rolling 90 Days",
643
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
644
- f"{get_rolling_stats(df,lev, otimeheader, 90):.2f}%",
645
- )
646
- st.metric(
647
- "Rolling 180 Days",
648
- "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
649
- f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
650
- )
651
-
652
- if bot_selections == "Cinnamon Toast":
653
- if submitted:
654
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
655
- 'Sell Price' : 'max',
656
- 'Net P/L Per Trade': 'mean',
657
- 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
658
- 'DCA': lambda x: int(np.floor(x.max()))})
659
- grouped_df.index = range(1, len(grouped_df)+1)
660
- grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
661
- 'Net P/L Per Trade':'Net P/L',
662
- 'Calculated Return %':'P/L %'}, inplace=True)
663
- else:
664
- dca_map = {1: 25/100, 2: 25/100, 3: 25/100, 4: 25/100, 1.1: 50/100, 2.1: 50/100}
665
- df['DCA %'] = df['DCA'].map(dca_map)
666
- df['Calculated Return %'] = (df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
667
-
668
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
669
- 'Sell Price' : 'max',
670
- 'P/L per token': 'mean',
671
- 'Calculated Return %' : lambda x: np.round(100*x.sum(),2),
672
- 'DCA': lambda x: int(np.floor(x.max()))})
673
- grouped_df.index = range(1, len(grouped_df)+1)
674
- grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
675
- 'Calculated Return %':'P/L %',
676
- 'P/L per token':'Net P/L'}, inplace=True)
677
-
678
- else:
679
- if submitted:
680
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
681
- 'Sell Price' : 'max',
682
- 'Net P/L Per Trade': 'mean',
683
- 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2)})
684
- grouped_df.index = range(1, len(grouped_df)+1)
685
- grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
686
- 'Net P/L Per Trade':'Net P/L',
687
- 'Calculated Return %':'P/L %'}, inplace=True)
688
- else:
689
- grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
690
- 'Sell Price' : 'max',
691
- 'P/L per token': 'mean',
692
- 'P/L %':'mean'})
693
- grouped_df.index = range(1, len(grouped_df)+1)
694
- grouped_df.rename(columns={'Buy Price':'Avg. Buy Price',
695
- 'P/L per token':'Net P/L'}, inplace=True)
696
- st.subheader("Trade Logs")
697
- grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
698
- grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
699
- if bot_selections == "Cosmic Cupcake" or bot_selections == "CT Toasted":
700
- coding = cc_coding if bot_selections == "Cosmic Cupcake" else ctt_coding
701
- st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
702
- .apply(coding, axis=1)\
703
- .applymap(my_style,subset=['Net P/L'])\
704
- .applymap(my_style,subset=['P/L %']), use_container_width=True)
705
- new_title = '<div style="text-align: right;"><span style="background-color:lightgrey;">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</span> Not Live Traded</div>'
706
- st.markdown(new_title, unsafe_allow_html=True)
707
- else:
708
- st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}', 'Net P/L':'${:.2f}', 'P/L %':'{:.2f}%'})\
709
- .applymap(my_style,subset=['Net P/L'])\
710
- .applymap(my_style,subset=['P/L %']), use_container_width=True)
711
-
712
- # st.subheader("Checking Status")
713
- # if submitted:
714
- # st.dataframe(sd_df)
715
-
716
- if __name__ == "__main__":
717
- st.set_page_config(
718
- "Trading Bot Dashboard",
719
- layout="wide",
720
- )
721
- runapp()
722
- # -
723
-
724
-
725
-
726
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
old_app.py ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ---
2
+ # jupyter:
3
+ # jupytext:
4
+ # text_representation:
5
+ # extension: .py
6
+ # format_name: light
7
+ # format_version: '1.5'
8
+ # jupytext_version: 1.14.2
9
+ # kernelspec:
10
+ # display_name: Python [conda env:bbytes] *
11
+ # language: python
12
+ # name: conda-env-bbytes-py
13
+ # ---
14
+
15
+ # +
16
+ import csv
17
+ import pandas as pd
18
+ from datetime import datetime, timedelta
19
+ import numpy as np
20
+ import datetime as dt
21
+ import matplotlib.pyplot as plt
22
+ from pathlib import Path
23
+
24
+ import streamlit as st
25
+ import plotly.express as px
26
+ import altair as alt
27
+ import dateutil.parser
28
+ import copy
29
+
30
+
31
+ # +
32
+ @st.experimental_memo
33
+ def get_hist_info(df_coin, principal_balance,plheader):
34
+ numtrades = int(len(df_coin))
35
+ numwin = int(sum(df_coin[plheader] > 0))
36
+ numloss = int(sum(df_coin[plheader] < 0))
37
+ winrate = int(np.round(100*numwin/numtrades,2))
38
+
39
+ grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
40
+ grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
41
+ if grossloss !=0:
42
+ pfactor = -1*np.round(grosswin/grossloss,2)
43
+ else:
44
+ pfactor = np.nan
45
+ return numtrades, numwin, numloss, winrate, pfactor
46
+ @st.experimental_memo
47
+ def get_rolling_stats(df, lev, otimeheader, days):
48
+ max_roll = (df[otimeheader].max() - df[otimeheader].min()).days
49
+
50
+ if max_roll >= days:
51
+ rollend = df[otimeheader].max()-timedelta(days=days)
52
+ rolling_df = df[df[otimeheader] >= rollend]
53
+
54
+ if len(rolling_df) > 0:
55
+ rolling_perc = rolling_df['Return Per Trade'].dropna().cumprod().values[-1]-1
56
+ else:
57
+ rolling_perc = np.nan
58
+ else:
59
+ rolling_perc = np.nan
60
+ return 100*rolling_perc
61
+
62
+ @st.experimental_memo
63
+ def filt_df(df, cheader, symbol_selections):
64
+ """
65
+ Inputs: df (pd.DataFrame), cheader (str) and symbol_selections (list[str]).
66
+
67
+ Returns a filtered pd.DataFrame containing only data that matches symbol_selections (list[str])
68
+ from df[cheader].
69
+ """
70
+
71
+ df = df.copy()
72
+ df = df[df[cheader].isin(symbol_selections)]
73
+
74
+ return df
75
+
76
+ @st.experimental_memo
77
+ def my_style(v, props=''):
78
+ props = 'color:red' if v < 0 else 'color:green'
79
+ return props
80
+
81
+ @st.cache(ttl=24*3600, allow_output_mutation=True)
82
+ def load_data(filename, otimeheader, fmat):
83
+ df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
84
+ df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
85
+ df.insert(1, 'Signal', ['Long']*len(df))
86
+
87
+ df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
88
+ df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
89
+ df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
90
+ df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
91
+ df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
92
+ df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
93
+
94
+ df['Buy Price'] = pd.to_numeric(df['Buy Price'])
95
+ df['Sell Price'] = pd.to_numeric(df['Sell Price'])
96
+ df['P/L per token'] = pd.to_numeric(df['P/L per token'])
97
+ df['P/L %'] = pd.to_numeric(df['P/L %'])
98
+
99
+ dateheader = 'Date'
100
+ theader = 'Time'
101
+
102
+ df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
103
+ df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
104
+
105
+ df[otimeheader]= [dateutil.parser.parse(date+' '+time)
106
+ for date,time in zip(df[dateheader],df[theader])]
107
+
108
+ df[otimeheader] = pd.to_datetime(df[otimeheader])
109
+ df['Exit Date'] = pd.to_datetime(df['Exit Date'])
110
+ df.sort_values(by=otimeheader, inplace=True)
111
+
112
+ df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
113
+ df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
114
+ df['Trade'] = df.index + 1 #reindex
115
+
116
+ df['DCA'] = np.nan
117
+
118
+ for exit in pd.unique(df['Exit Date']):
119
+ df_exit = df[df['Exit Date']==exit]
120
+ if dateutil.parser.parse(str(exit)) < dateutil.parser.parse('2023-02-07 13:00:00'):
121
+ for i in range(len(df_exit)):
122
+ ind = df_exit.index[i]
123
+ df.loc[ind,'DCA'] = i+1
124
+
125
+ else:
126
+ for i in range(len(df_exit)):
127
+ ind = df_exit.index[i]
128
+ df.loc[ind,'DCA'] = i+1.1
129
+ return df
130
+
131
+ def runapp():
132
+ bot_selections = "Cinnamon Toast"
133
+ otimeheader = 'Exit Date'
134
+ fmat = '%Y-%m-%d %H:%M:%S'
135
+ dollar_cap = 100000.00
136
+ fees = .075/100
137
+ st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
138
+ st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
139
+ "the performance of our trading bots.")
140
+ # st.sidebar.header("FAQ")
141
+
142
+ # with st.sidebar.subheader("FAQ"):
143
+ # st.write(Path("FAQ_README.md").read_text())
144
+ st.subheader("Choose your settings:")
145
+ no_errors = True
146
+
147
+ data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
148
+ df = data.copy(deep=True)
149
+
150
+ dateheader = 'Date'
151
+ theader = 'Time'
152
+
153
+ with st.form("user input", ):
154
+ if no_errors:
155
+ with st.container():
156
+ col1, col2 = st.columns(2)
157
+ with col1:
158
+ try:
159
+ startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
160
+ except:
161
+ st.error("Please select your exchange or upload a supported trade log file.")
162
+ no_errors = False
163
+ with col2:
164
+ try:
165
+ enddate = st.date_input("End Date", value=datetime.today())
166
+ except:
167
+ st.error("Please select your exchange or upload a supported trade log file.")
168
+ no_errors = False
169
+ #st.sidebar.subheader("Customize your Dashboard")
170
+
171
+ if no_errors and (enddate < startdate):
172
+ st.error("End Date must be later than Start date. Please try again.")
173
+ no_errors = False
174
+ with st.container():
175
+ col1,col2 = st.columns(2)
176
+ with col2:
177
+ lev = st.number_input('Leverage', min_value=1, value=1, max_value= 5, step=1)
178
+ with col1:
179
+ principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
180
+ st.write("Choose your DCA setup (for trades before 02/07/2023)")
181
+ with st.container():
182
+ col1, col2, col3, col4 = st.columns(4)
183
+ with col1:
184
+ dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
185
+ with col2:
186
+ dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
187
+ with col3:
188
+ dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
189
+ with col4:
190
+ dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
191
+ st.write("Choose your DCA setup (for trades on or after 02/07/2023)")
192
+ with st.container():
193
+ col1, col2 = st.columns(2)
194
+ with col1:
195
+ dca5 = st.number_input('DCA 1 Allocation', min_value=0, value=50, max_value= 100, step=1)
196
+ with col2:
197
+ dca6 = st.number_input('DCA 2 Allocation', min_value=0, value=50, max_value= 100, step=1)
198
+
199
+
200
+ #hack way to get button centered
201
+ c = st.columns(9)
202
+ with c[4]:
203
+ submitted = st.form_submit_button("Get Cookin'!")
204
+
205
+ signal_map = {'Long': 1, 'Short':-1} # 1 for long #-1 for short
206
+ dca_map = {1: 25/100, 2: 25/100, 3: 25/100, 4: 25/100, 1.1: 50/100, 2.1: 50/100}
207
+ df['DCA %'] = df['DCA'].map(dca_map)
208
+ df['Calculated Return %'] = (df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
209
+
210
+ if submitted and principal_balance * lev > dollar_cap:
211
+ lev = np.floor(dollar_cap/principal_balance)
212
+ st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
213
+
214
+ if submitted and no_errors:
215
+ df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
216
+
217
+ if len(df) == 0:
218
+ st.error("There are no available trades matching your selections. Please try again!")
219
+ no_errors = False
220
+ if no_errors:
221
+
222
+ dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100, 1.1: dca5/100, 2.1: dca6/100}
223
+ df['DCA %'] = df['DCA'].map(dca_map)
224
+ df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
225
+ df['DCA'] = np.floor(df['DCA'].values)
226
+
227
+ df['Return Per Trade'] = np.nan
228
+ df['Balance used in Trade'] = np.nan
229
+ df['New Balance'] = np.nan
230
+
231
+ g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
232
+
233
+ df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+lev*g['Return Per Trade'].values
234
+
235
+ df['Compounded Return'] = df['Return Per Trade'].cumprod()
236
+ df.loc[df['DCA']==1.0,'New Balance'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df.loc[df['DCA']==1.0,'Compounded Return']]
237
+ df.loc[df['DCA']==1.0,'Balance used in Trade'] = np.concatenate([[principal_balance], df.loc[df['DCA']==1.0,'New Balance'].values[:-1]])
238
+ df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*df['Balance used in Trade']
239
+ df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
240
+ cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
241
+
242
+ effective_return = 100*((cum_pl - principal_balance)/principal_balance)
243
+
244
+ st.header(f"{bot_selections} Results")
245
+ if len(bot_selections) > 1:
246
+ st.metric(
247
+ "Total Account Balance",
248
+ f"${cum_pl:.2f}",
249
+ f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
250
+ )
251
+
252
+ st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
253
+
254
+ df['Per Trade Return Rate'] = df['Return Per Trade']-1
255
+
256
+ totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
257
+ data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
258
+ totals.loc[len(totals)] = list(i for i in data)
259
+
260
+ totals['Cum. P/L'] = cum_pl-principal_balance
261
+ totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
262
+ #results_df['Avg. P/L'] = (cum_pl-principal_balance)/results_df['# of Trades'].values[0]
263
+ #results_df['Avg. P/L (%)'] = 100*results_df['Avg. P/L'].values[0]/principal_balance
264
+
265
+ if df.empty:
266
+ st.error("Oops! None of the data provided matches your selection(s). Please try again.")
267
+ else:
268
+ #st.dataframe(totals.style.format({'# of Trades': '{:.0f}','Wins': '{:.0f}','Losses': '{:.0f}','Win Rate': '{:.2f}%','Profit Factor' : '{:.2f}', 'Avg. P/L (%)': '{:.2f}%', 'Cum. P/L (%)': '{:.2f}%', 'Cum. P/L': '{:.2f}', 'Avg. P/L': '{:.2f}'})
269
+ #.text_gradient(subset=['Win Rate'],cmap="RdYlGn", vmin = 0, vmax = 100)\
270
+ #.text_gradient(subset=['Profit Factor'],cmap="RdYlGn", vmin = 0, vmax = 2), use_container_width=True)
271
+ for row in totals.itertuples():
272
+ col1, col2, col3, col4 = st.columns(4)
273
+ c1, c2, c3, c4 = st.columns(4)
274
+ with col1:
275
+ st.metric(
276
+ "Total Trades",
277
+ f"{row._1:.0f}",
278
+ )
279
+ with c1:
280
+ st.metric(
281
+ "Profit Factor",
282
+ f"{row._5:.2f}",
283
+ )
284
+ with col2:
285
+ st.metric(
286
+ "Wins",
287
+ f"{row.Wins:.0f}",
288
+ )
289
+ with c2:
290
+ st.metric(
291
+ "Cumulative P/L",
292
+ f"${row._6:.2f}",
293
+ f"{row._7:.2f} %",
294
+ )
295
+ with col3:
296
+ st.metric(
297
+ "Losses",
298
+ f"{row.Losses:.0f}",
299
+ )
300
+ with c3:
301
+ st.metric(
302
+ "Rolling 7 Days",
303
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
304
+ f"{get_rolling_stats(df,lev, otimeheader,7):.2f}%",
305
+ )
306
+ st.metric(
307
+ "Rolling 30 Days",
308
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
309
+ f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
310
+ )
311
+
312
+ with col4:
313
+ st.metric(
314
+ "Win Rate",
315
+ f"{row._4:.1f}%",
316
+ )
317
+ with c4:
318
+ st.metric(
319
+ "Rolling 90 Days",
320
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
321
+ f"{get_rolling_stats(df,lev, otimeheader,90):.2f}%",
322
+ )
323
+ st.metric(
324
+ "Rolling 180 Days",
325
+ "",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
326
+ f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
327
+ )
328
+ if submitted:
329
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
330
+ 'Sell Price' : 'max',
331
+ 'Net P/L Per Trade': 'mean',
332
+ 'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
333
+ 'DCA': lambda x: int(np.floor(x.max()))})
334
+ grouped_df.index = range(1, len(grouped_df)+1)
335
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
336
+ 'Net P/L Per Trade':'Net P/L',
337
+ 'Calculated Return %':'P/L %'}, inplace=True)
338
+ else:
339
+ grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
340
+ 'Sell Price' : 'max',
341
+ 'P/L per token': 'mean',
342
+ 'Calculated Return %' : lambda x: np.round(100*x.sum(),2),
343
+ 'DCA': lambda x: int(np.floor(x.max()))})
344
+ grouped_df.index = range(1, len(grouped_df)+1)
345
+ grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
346
+ 'Calculated Return %':'P/L %',
347
+ 'P/L per token':'Net P/L'}, inplace=True)
348
+
349
+ st.subheader("Trade Logs")
350
+ grouped_df['Entry Date'] = pd.to_datetime(grouped_df['Entry Date'])
351
+ grouped_df['Exit Date'] = pd.to_datetime(grouped_df['Exit Date'])
352
+ st.dataframe(grouped_df.style.format({'Entry Date':'{:%m-%d-%Y %H:%M:%S}','Exit Date':'{:%m-%d-%Y %H:%M:%S}','Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}','# of DCAs':'{:.0f}', 'Net P/L':'${:.2f}', 'P/L %' :'{:.2f}%'})\
353
+ .applymap(my_style,subset=['Net P/L'])\
354
+ .applymap(my_style,subset=['P/L %']), use_container_width=True)
355
+
356
+ if __name__ == "__main__":
357
+ st.set_page_config(
358
+ "Trading Bot Dashboard",
359
+ layout="wide",
360
+ )
361
+ runapp()
362
+ # -
363
+
364
+