Spaces:
Sleeping
Sleeping
anaucoin
commited on
Commit
•
29deb23
1
Parent(s):
33f02f1
initial commit of app files
Browse files- ct_app.py +337 -0
- requirements.txt +8 -0
ct_app.py
ADDED
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ---
|
2 |
+
# jupyter:
|
3 |
+
# jupytext:
|
4 |
+
# text_representation:
|
5 |
+
# extension: .py
|
6 |
+
# format_name: light
|
7 |
+
# format_version: '1.5'
|
8 |
+
# jupytext_version: 1.14.2
|
9 |
+
# kernelspec:
|
10 |
+
# display_name: Python [conda env:bbytes] *
|
11 |
+
# language: python
|
12 |
+
# name: conda-env-bbytes-py
|
13 |
+
# ---
|
14 |
+
|
15 |
+
# +
|
16 |
+
import csv
|
17 |
+
import pandas as pd
|
18 |
+
from datetime import datetime, timedelta
|
19 |
+
import numpy as np
|
20 |
+
import datetime as dt
|
21 |
+
import matplotlib.pyplot as plt
|
22 |
+
from pathlib import Path
|
23 |
+
|
24 |
+
import streamlit as st
|
25 |
+
import plotly.express as px
|
26 |
+
import altair as alt
|
27 |
+
import dateutil.parser
|
28 |
+
import copy
|
29 |
+
|
30 |
+
|
31 |
+
# +
|
32 |
+
@st.experimental_memo
|
33 |
+
def get_hist_info(df_coin, principal_balance,plheader):
|
34 |
+
numtrades = int(len(df_coin))
|
35 |
+
numwin = int(sum(df_coin[plheader] > 0))
|
36 |
+
numloss = int(sum(df_coin[plheader] < 0))
|
37 |
+
winrate = int(np.round(100*numwin/numtrades,2))
|
38 |
+
|
39 |
+
grosswin = sum(df_coin[df_coin[plheader] > 0][plheader])
|
40 |
+
grossloss = sum(df_coin[df_coin[plheader] < 0][plheader])
|
41 |
+
if grossloss !=0:
|
42 |
+
pfactor = -1*np.round(grosswin/grossloss,2)
|
43 |
+
else:
|
44 |
+
pfactor = np.nan
|
45 |
+
return numtrades, numwin, numloss, winrate, pfactor
|
46 |
+
@st.experimental_memo
|
47 |
+
def get_rolling_stats(df, lev, otimeheader, days):
|
48 |
+
rollend = datetime.today()-timedelta(days=days)
|
49 |
+
rolling_df = df[df[otimeheader] >= rollend]
|
50 |
+
|
51 |
+
if len(rolling_df) > 0:
|
52 |
+
rolling_perc = rolling_df['Return Per Trade'].dropna().cumprod().values[-1]-1
|
53 |
+
else:
|
54 |
+
rolling_perc = 0
|
55 |
+
return 100*lev*rolling_perc
|
56 |
+
|
57 |
+
@st.experimental_memo
|
58 |
+
def filt_df(
|
59 |
+
df: pd.DataFrame, cheader : str, symbol_selections: list[str]) -> pd.DataFrame:
|
60 |
+
"""
|
61 |
+
Inputs: df (pd.DataFrame), cheader (str) and symbol_selections (list[str]).
|
62 |
+
|
63 |
+
Returns a filtered pd.DataFrame containing only data that matches symbol_selections (list[str])
|
64 |
+
from df[cheader].
|
65 |
+
"""
|
66 |
+
|
67 |
+
df = df.copy()
|
68 |
+
df = df[df[cheader].isin(symbol_selections)]
|
69 |
+
|
70 |
+
return df
|
71 |
+
|
72 |
+
@st.experimental_memo
|
73 |
+
def my_style(v, props=''):
|
74 |
+
props = 'color:red' if v < 0 else 'color:green'
|
75 |
+
return props
|
76 |
+
|
77 |
+
@st.cache(ttl=24*3600, allow_output_mutation=True)
|
78 |
+
def load_data(filename, otimeheader, fmat):
|
79 |
+
df = pd.read_csv(open(filename,'r'), sep='\t') # so as not to mutate cached value
|
80 |
+
df.columns = ['Trade','Entry Date','Buy Price', 'Sell Price','Exit Date', 'P/L per token', 'P/L %', 'Drawdown %']
|
81 |
+
df.insert(1, 'Signal', ['Long']*len(df))
|
82 |
+
|
83 |
+
df['Buy Price'] = df['Buy Price'].str.replace('$', '', regex=True)
|
84 |
+
df['Sell Price'] = df['Sell Price'].str.replace('$', '', regex=True)
|
85 |
+
df['Buy Price'] = df['Buy Price'].str.replace(',', '', regex=True)
|
86 |
+
df['Sell Price'] = df['Sell Price'].str.replace(',', '', regex=True)
|
87 |
+
df['P/L per token'] = df['P/L per token'].str.replace('$', '', regex=True)
|
88 |
+
df['P/L %'] = df['P/L %'].str.replace('%', '', regex=True)
|
89 |
+
|
90 |
+
df['Buy Price'] = pd.to_numeric(df['Buy Price'])
|
91 |
+
df['Sell Price'] = pd.to_numeric(df['Sell Price'])
|
92 |
+
df['P/L per token'] = pd.to_numeric(df['P/L per token'])
|
93 |
+
df['P/L %'] = pd.to_numeric(df['P/L %'])
|
94 |
+
|
95 |
+
dateheader = 'Date'
|
96 |
+
theader = 'Time'
|
97 |
+
|
98 |
+
df[dateheader] = [tradetimes.split(" ")[0] for tradetimes in df[otimeheader].values]
|
99 |
+
df[theader] = [tradetimes.split(" ")[1] for tradetimes in df[otimeheader].values]
|
100 |
+
|
101 |
+
df[otimeheader]= [dateutil.parser.parse(date+' '+time)
|
102 |
+
for date,time in zip(df[dateheader],df[theader])]
|
103 |
+
|
104 |
+
df[otimeheader] = pd.to_datetime(df[otimeheader])
|
105 |
+
df['Exit Date'] = pd.to_datetime(df['Exit Date'])
|
106 |
+
df.sort_values(by=otimeheader, inplace=True)
|
107 |
+
|
108 |
+
df[dateheader] = [dateutil.parser.parse(date).date() for date in df[dateheader]]
|
109 |
+
df[theader] = [dateutil.parser.parse(time).time() for time in df[theader]]
|
110 |
+
df['Trade'] = df.index + 1 #reindex
|
111 |
+
|
112 |
+
df['DCA'] = np.nan
|
113 |
+
|
114 |
+
for exit in pd.unique(df['Exit Date']):
|
115 |
+
df_exit = df[df['Exit Date']==exit]
|
116 |
+
for i in range(len(df_exit)):
|
117 |
+
ind = df_exit.index[i]
|
118 |
+
df.loc[ind,'DCA'] = i+1
|
119 |
+
return df
|
120 |
+
|
121 |
+
def runapp() -> None:
|
122 |
+
bot_selections = "Cinnamon Toast"
|
123 |
+
otimeheader = 'Entry Date'
|
124 |
+
fmat = '%Y-%m-%d %H:%M:%S'
|
125 |
+
dollar_cap = 30000.00
|
126 |
+
fees = .075/100
|
127 |
+
st.header(f"{bot_selections} Performance Dashboard :bread: :moneybag:")
|
128 |
+
st.write("Welcome to the Trading Bot Dashboard by BreadBytes! You can use this dashboard to track " +
|
129 |
+
"the performance of our trading bots.")
|
130 |
+
# st.sidebar.header("FAQ")
|
131 |
+
|
132 |
+
# with st.sidebar.subheader("FAQ"):
|
133 |
+
# st.write(Path("FAQ_README.md").read_text())
|
134 |
+
st.subheader("Choose your settings:")
|
135 |
+
no_errors = True
|
136 |
+
|
137 |
+
data = load_data("CT-Trade-Log.csv",otimeheader, fmat)
|
138 |
+
df = data.copy(deep=True)
|
139 |
+
|
140 |
+
dateheader = 'Date'
|
141 |
+
theader = 'Time'
|
142 |
+
|
143 |
+
with st.form("user input", ):
|
144 |
+
if no_errors:
|
145 |
+
with st.container():
|
146 |
+
col1, col2 = st.columns(2)
|
147 |
+
with col1:
|
148 |
+
try:
|
149 |
+
startdate = st.date_input("Start Date", value=pd.to_datetime(df[otimeheader]).min())
|
150 |
+
except:
|
151 |
+
st.error("Please select your exchange or upload a supported trade log file.")
|
152 |
+
no_errors = False
|
153 |
+
with col2:
|
154 |
+
try:
|
155 |
+
enddate = st.date_input("End Date", value=datetime.today())
|
156 |
+
except:
|
157 |
+
st.error("Please select your exchange or upload a supported trade log file.")
|
158 |
+
no_errors = False
|
159 |
+
#st.sidebar.subheader("Customize your Dashboard")
|
160 |
+
|
161 |
+
if no_errors and (enddate < startdate):
|
162 |
+
st.error("End Date must be later than Start date. Please try again.")
|
163 |
+
no_errors = False
|
164 |
+
with st.container():
|
165 |
+
col1,col2 = st.columns(2)
|
166 |
+
with col2:
|
167 |
+
lev = st.number_input('Leverage', min_value=1, value=1, max_value= 5, step=1)
|
168 |
+
with col1:
|
169 |
+
principal_balance = st.number_input('Starting Balance', min_value=0.00, value=1000.00, max_value= dollar_cap, step=.01)
|
170 |
+
with st.container():
|
171 |
+
col1, col2, col3, col4 = st.columns(4)
|
172 |
+
with col1:
|
173 |
+
dca1 = st.number_input('DCA 1 Allocation', min_value=0, value=25, max_value= 100, step=1)
|
174 |
+
with col2:
|
175 |
+
dca2 = st.number_input('DCA 2 Allocation', min_value=0, value=25, max_value= 100, step=1)
|
176 |
+
with col3:
|
177 |
+
dca3 = st.number_input('DCA 3 Allocation', min_value=0, value=25, max_value= 100, step=1)
|
178 |
+
with col4:
|
179 |
+
dca4 = st.number_input('DCA 4 Allocation', min_value=0, value=25, max_value= 100, step=1)
|
180 |
+
|
181 |
+
#hack way to get button centered
|
182 |
+
c = st.columns(9)
|
183 |
+
with c[4]:
|
184 |
+
submitted = st.form_submit_button("Get Cookin'!")
|
185 |
+
|
186 |
+
if submitted and principal_balance * lev > dollar_cap:
|
187 |
+
lev = np.floor(dollar_cap/principal_balance)
|
188 |
+
st.error(f"WARNING: (Starting Balance)*(Leverage) exceeds the ${dollar_cap} limit. Using maximum available leverage of {lev}")
|
189 |
+
|
190 |
+
if submitted and no_errors:
|
191 |
+
df = df[(df[dateheader] >= startdate) & (df[dateheader] <= enddate)]
|
192 |
+
|
193 |
+
if len(df) == 0:
|
194 |
+
st.error("There are no available trades matching your selections. Please try again!")
|
195 |
+
no_errors = False
|
196 |
+
if no_errors:
|
197 |
+
|
198 |
+
dca_map = {1: dca1/100, 2: dca2/100, 3: dca3/100, 4: dca4/100}
|
199 |
+
|
200 |
+
df['DCA %'] = df['DCA'].map(dca_map)
|
201 |
+
|
202 |
+
signal_map = {'Long': 1, 'Short':-1} # 1 for long #-1 for short
|
203 |
+
|
204 |
+
df['Calculated Return %'] = df['Signal'].map(signal_map)*(df['DCA %'])*(1-fees)*((df['Sell Price']-df['Buy Price'])/df['Buy Price'] - fees) #accounts for fees on open and close of trade
|
205 |
+
|
206 |
+
df['Return Per Trade'] = np.nan
|
207 |
+
|
208 |
+
g = df.groupby('Exit Date').sum(numeric_only=True)['Calculated Return %'].reset_index(name='Return Per Trade')
|
209 |
+
|
210 |
+
df.loc[df['DCA']==1.0,'Return Per Trade'] = 1+g['Return Per Trade'].values
|
211 |
+
|
212 |
+
df['Compounded Return'] = df['Return Per Trade'].cumprod()
|
213 |
+
df['Balance used in Trade'] = [min(dollar_cap/lev, bal*principal_balance) for bal in df['Compounded Return']]
|
214 |
+
df['Net P/L Per Trade'] = (df['Return Per Trade']-1)*lev*df['Balance used in Trade']
|
215 |
+
df['Cumulative P/L'] = df['Net P/L Per Trade'].cumsum()
|
216 |
+
cum_pl = df.loc[df.dropna().index[-1],'Cumulative P/L'] + principal_balance
|
217 |
+
|
218 |
+
effective_return = 100*((cum_pl - principal_balance)/principal_balance)
|
219 |
+
|
220 |
+
st.header(f"{bot_selections} Results")
|
221 |
+
if len(bot_selections) > 1:
|
222 |
+
st.metric(
|
223 |
+
"Total Account Balance",
|
224 |
+
f"${cum_pl:.2f}",
|
225 |
+
f"{100*(cum_pl-principal_balance)/(principal_balance):.2f} %",
|
226 |
+
)
|
227 |
+
|
228 |
+
st.line_chart(data=df.dropna(), x='Exit Date', y='Cumulative P/L', use_container_width=True)
|
229 |
+
|
230 |
+
df['Per Trade Return Rate'] = df['Return Per Trade']-1
|
231 |
+
|
232 |
+
totals = pd.DataFrame([], columns = ['# of Trades', 'Wins', 'Losses', 'Win Rate', 'Profit Factor'])
|
233 |
+
data = get_hist_info(df.dropna(), principal_balance,'Per Trade Return Rate')
|
234 |
+
totals.loc[len(totals)] = list(i for i in data)
|
235 |
+
|
236 |
+
totals['Cum. P/L'] = cum_pl-principal_balance
|
237 |
+
totals['Cum. P/L (%)'] = 100*(cum_pl-principal_balance)/principal_balance
|
238 |
+
#results_df['Avg. P/L'] = (cum_pl-principal_balance)/results_df['# of Trades'].values[0]
|
239 |
+
#results_df['Avg. P/L (%)'] = 100*results_df['Avg. P/L'].values[0]/principal_balance
|
240 |
+
|
241 |
+
if df.empty:
|
242 |
+
st.error("Oops! None of the data provided matches your selection(s). Please try again.")
|
243 |
+
else:
|
244 |
+
#st.dataframe(totals.style.format({'# of Trades': '{:.0f}','Wins': '{:.0f}','Losses': '{:.0f}','Win Rate': '{:.2f}%','Profit Factor' : '{:.2f}', 'Avg. P/L (%)': '{:.2f}%', 'Cum. P/L (%)': '{:.2f}%', 'Cum. P/L': '{:.2f}', 'Avg. P/L': '{:.2f}'})
|
245 |
+
#.text_gradient(subset=['Win Rate'],cmap="RdYlGn", vmin = 0, vmax = 100)\
|
246 |
+
#.text_gradient(subset=['Profit Factor'],cmap="RdYlGn", vmin = 0, vmax = 2), use_container_width=True)
|
247 |
+
for row in totals.itertuples():
|
248 |
+
col1, col2, col3, col4 = st.columns(4)
|
249 |
+
c1, c2, c3, c4 = st.columns(4)
|
250 |
+
with col1:
|
251 |
+
st.metric(
|
252 |
+
"Total Trades",
|
253 |
+
f"{row._1:.0f}",
|
254 |
+
)
|
255 |
+
with c1:
|
256 |
+
st.metric(
|
257 |
+
"Profit Factor",
|
258 |
+
f"{row._5:.2f}",
|
259 |
+
)
|
260 |
+
with col2:
|
261 |
+
st.metric(
|
262 |
+
"Wins",
|
263 |
+
f"{row.Wins:.0f}",
|
264 |
+
)
|
265 |
+
with c2:
|
266 |
+
st.metric(
|
267 |
+
"Cumulative P/L",
|
268 |
+
f"${row._6:.2f}",
|
269 |
+
f"{row._7:.2f} %",
|
270 |
+
)
|
271 |
+
with col3:
|
272 |
+
st.metric(
|
273 |
+
"Losses",
|
274 |
+
f"{row.Losses:.0f}",
|
275 |
+
)
|
276 |
+
with c3:
|
277 |
+
st.metric(
|
278 |
+
"Rolling 7 Days",
|
279 |
+
"",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
|
280 |
+
f"{get_rolling_stats(df,lev, otimeheader, 7):.2f}%",
|
281 |
+
)
|
282 |
+
st.metric(
|
283 |
+
"Rolling 30 Days",
|
284 |
+
"",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
|
285 |
+
f"{get_rolling_stats(df,lev, otimeheader, 30):.2f}%",
|
286 |
+
)
|
287 |
+
|
288 |
+
with col4:
|
289 |
+
st.metric(
|
290 |
+
"Win Rate",
|
291 |
+
f"{row._4:.1f}%",
|
292 |
+
)
|
293 |
+
with c4:
|
294 |
+
st.metric(
|
295 |
+
"Rolling 90 Days",
|
296 |
+
"",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
|
297 |
+
f"{get_rolling_stats(df,lev, otimeheader, 90):.2f}%",
|
298 |
+
)
|
299 |
+
st.metric(
|
300 |
+
"Rolling 180 Days",
|
301 |
+
"",#f"{(1+get_rolling_stats(df,otimeheader, 30))*principal_balance:.2f}",
|
302 |
+
f"{get_rolling_stats(df,lev, otimeheader, 180):.2f}%",
|
303 |
+
)
|
304 |
+
if submitted:
|
305 |
+
grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
|
306 |
+
'Sell Price' : 'max',
|
307 |
+
'P/L per token': 'mean',
|
308 |
+
'Calculated Return %' : lambda x: np.round(100*lev*x.sum(),2),
|
309 |
+
'DCA': 'max'})
|
310 |
+
grouped_df.index = range(1, len(grouped_df)+1)
|
311 |
+
grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
|
312 |
+
'P/L per token':'Avg. P/L per token',
|
313 |
+
'Calculated Return %':'P/L %'}, inplace=True)
|
314 |
+
else:
|
315 |
+
grouped_df = df.groupby('Exit Date').agg({'Signal':'min','Entry Date': 'min','Exit Date': 'max','Buy Price': 'mean',
|
316 |
+
'Sell Price' : 'max',
|
317 |
+
'P/L per token': 'mean',
|
318 |
+
'P/L %':lambda x: np.round(x.sum()/4,2),
|
319 |
+
'DCA': 'max'})
|
320 |
+
grouped_df.index = range(1, len(grouped_df)+1)
|
321 |
+
grouped_df.rename(columns={'DCA' : '# of DCAs', 'Buy Price':'Avg. Buy Price',
|
322 |
+
'P/L per token':'Avg. P/L per token'}, inplace=True)
|
323 |
+
|
324 |
+
st.subheader("Trade Logs")
|
325 |
+
st.dataframe(grouped_df.style.format({'Avg. Buy Price': '${:.2f}', 'Sell Price': '${:.2f}','# of DCAs':'{:.0f}', 'Avg. P/L per token':'${:.2f}', 'P/L %' :'{:.2f}%'})\
|
326 |
+
.applymap(my_style,subset=['Avg. P/L per token'])\
|
327 |
+
.applymap(my_style,subset=['P/L %']), use_container_width=True)
|
328 |
+
|
329 |
+
if __name__ == "__main__":
|
330 |
+
st.set_page_config(
|
331 |
+
"Trading Bot Dashboard",
|
332 |
+
layout="wide",
|
333 |
+
)
|
334 |
+
runapp()
|
335 |
+
# -
|
336 |
+
|
337 |
+
|
requirements.txt
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
pandas
|
2 |
+
datetime
|
3 |
+
numpy
|
4 |
+
matplotlib
|
5 |
+
pathlib
|
6 |
+
plotly
|
7 |
+
altair
|
8 |
+
streamlit
|