Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,926 +1,401 @@
|
|
1 |
import streamlit as st
|
2 |
st.set_page_config(layout="wide")
|
3 |
-
|
4 |
-
for name in dir():
|
5 |
-
if not name.startswith('_'):
|
6 |
-
del globals()[name]
|
7 |
-
|
8 |
import numpy as np
|
9 |
import pandas as pd
|
10 |
-
import streamlit as st
|
11 |
import gspread
|
12 |
-
import
|
13 |
-
import
|
14 |
|
15 |
@st.cache_resource
|
16 |
def init_conn():
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
|
37 |
-
|
38 |
|
39 |
-
|
|
|
|
|
40 |
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
load_display = pd.DataFrame(worksheet.get_all_records())
|
46 |
-
load_display.replace('', np.nan, inplace=True)
|
47 |
-
raw_display = load_display.dropna(subset=['PPR'])
|
48 |
-
raw_display.rename(columns={"name": "Player", "PPR": "Median"}, inplace = True)
|
49 |
-
raw_display = raw_display[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own', 'rush_yards', 'rec']]
|
50 |
-
dk_roo_raw = raw_display.loc[raw_display['Median'] > 0]
|
51 |
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
59 |
|
60 |
-
|
61 |
-
load_display = pd.DataFrame(worksheet.get_all_records())
|
62 |
-
load_display.replace('', np.nan, inplace=True)
|
63 |
-
raw_display = load_display.dropna(subset=['PPR'])
|
64 |
-
raw_display.rename(columns={"name": "Player", "PPR": "Median"}, inplace = True)
|
65 |
-
raw_display = raw_display[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own', 'rush_yards', 'rec']]
|
66 |
-
dk_roo_raw_2 = raw_display.loc[raw_display['Median'] > 0]
|
67 |
|
68 |
-
|
69 |
-
|
70 |
-
load_display.replace('', np.nan, inplace=True)
|
71 |
-
raw_display = load_display.dropna(subset=['Half_PPR'])
|
72 |
-
raw_display.rename(columns={"name": "Player", "Half_PPR": "Median"}, inplace = True)
|
73 |
-
raw_display = raw_display[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own', 'rush_yards', 'rec']]
|
74 |
-
fd_roo_raw_2 = raw_display.loc[raw_display['Median'] > 0]
|
75 |
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
dk_roo_raw_3 = raw_display.loc[raw_display['Median'] > 0]
|
83 |
|
84 |
-
|
85 |
-
load_display = pd.DataFrame(worksheet.get_all_records())
|
86 |
-
load_display.replace('', np.nan, inplace=True)
|
87 |
-
raw_display = load_display.dropna(subset=['Half_PPR'])
|
88 |
-
raw_display.rename(columns={"name": "Player", "Half_PPR": "Median"}, inplace = True)
|
89 |
-
raw_display = raw_display[['Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', 'Own', 'rush_yards', 'rec']]
|
90 |
-
fd_roo_raw_3 = raw_display.loc[raw_display['Median'] > 0]
|
91 |
|
|
|
|
|
|
|
|
|
92 |
worksheet = sh.worksheet('SD_Projections')
|
93 |
load_display = pd.DataFrame(worksheet.get_all_records())
|
94 |
load_display.replace('', np.nan, inplace=True)
|
95 |
-
load_display
|
96 |
-
|
97 |
-
|
|
|
|
|
98 |
|
99 |
worksheet = sh.worksheet('FD_SD_Projections')
|
100 |
load_display = pd.DataFrame(worksheet.get_all_records())
|
101 |
load_display.replace('', np.nan, inplace=True)
|
102 |
-
load_display
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
SimVar = 1
|
115 |
Sim_Winners = []
|
116 |
-
fp_array =
|
117 |
-
|
118 |
-
if insert_port == 1:
|
119 |
-
up_array = CleanPortfolio.values
|
120 |
|
121 |
# Pre-vectorize functions
|
122 |
vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
|
123 |
vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
|
124 |
|
125 |
-
if insert_port == 1:
|
126 |
-
vec_up_projection_map = np.vectorize(up_dict['Projection_map'].__getitem__)
|
127 |
-
vec_up_stdev_map = np.vectorize(up_dict['STDev_map'].__getitem__)
|
128 |
-
|
129 |
st.write('Simulating contest on frames')
|
130 |
|
131 |
while SimVar <= Sim_size:
|
132 |
-
|
133 |
-
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size-len(CleanPortfolio))]
|
134 |
-
elif insert_port == 0:
|
135 |
-
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
|
136 |
|
137 |
sample_arrays1 = np.c_[
|
138 |
fp_random,
|
139 |
np.sum(np.random.normal(
|
140 |
-
loc=vec_projection_map(fp_random[:, :-
|
141 |
-
scale=vec_stdev_map(fp_random[:, :-
|
142 |
axis=1)
|
143 |
]
|
144 |
|
145 |
-
|
146 |
-
sample_arrays2 = np.c_[
|
147 |
-
up_array,
|
148 |
-
np.sum(np.random.normal(
|
149 |
-
loc=vec_up_projection_map(up_array[:, :-5]),
|
150 |
-
scale=vec_up_stdev_map(up_array[:, :-5])),
|
151 |
-
axis=1)
|
152 |
-
]
|
153 |
-
sample_arrays = np.vstack((sample_arrays1, sample_arrays2))
|
154 |
-
else:
|
155 |
-
sample_arrays = sample_arrays1
|
156 |
|
157 |
-
final_array = sample_arrays[sample_arrays[:,
|
158 |
best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
|
159 |
Sim_Winners.append(best_lineup)
|
160 |
SimVar += 1
|
161 |
|
162 |
return Sim_Winners
|
163 |
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
Strength_var_def = Strength_var
|
168 |
-
strength_grow_def = strength_grow
|
169 |
-
Teams_used_def = Teams_used
|
170 |
-
Total_Runs_def = Total_Runs
|
171 |
-
|
172 |
-
st.write('Creating Seed Frames')
|
173 |
-
|
174 |
-
while RunsVar <= seed_depth_def:
|
175 |
-
if RunsVar <= 3:
|
176 |
-
FieldStrength = Strength_var_def
|
177 |
-
FinalPortfolio, maps_dict = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
178 |
-
FinalPortfolio2, maps_dict2 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
179 |
-
FinalPortfolio_init = pd.concat([FinalPortfolio, FinalPortfolio2], axis=0)
|
180 |
-
maps_dict.update(maps_dict2)
|
181 |
-
elif RunsVar > 3 and RunsVar <= 4:
|
182 |
-
FieldStrength += (strength_grow_def + ((30 - len(Teams_used_def)) * .001))
|
183 |
-
FinalPortfolio3, maps_dict3 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
184 |
-
FinalPortfolio4, maps_dict4 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
185 |
-
FinalPortfolio_merge_3 = pd.concat([FinalPortfolio_init, FinalPortfolio3], axis=0)
|
186 |
-
FinalPortfolio_merge_4 = pd.concat([FinalPortfolio_merge_3, FinalPortfolio4], axis=0)
|
187 |
-
FinalPortfolio_step_2 = FinalPortfolio_merge_4.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
|
188 |
-
maps_dict.update(maps_dict3)
|
189 |
-
maps_dict.update(maps_dict4)
|
190 |
-
elif RunsVar > 4:
|
191 |
-
FieldStrength = 1
|
192 |
-
FinalPortfolio5, maps_dict5 = get_correlated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
193 |
-
FinalPortfolio6, maps_dict6 = get_uncorrelated_portfolio_for_sim(Total_Runs_def * .25, sharp_split, field_growth)
|
194 |
-
FinalPortfolio_merge_5 = pd.concat([FinalPortfolio_step_2, FinalPortfolio5], axis=0)
|
195 |
-
FinalPortfolio_merge_6 = pd.concat([FinalPortfolio_merge_5, FinalPortfolio6], axis=0)
|
196 |
-
FinalPortfolio_export = FinalPortfolio_merge_6.drop_duplicates(subset = ['Projection', 'Own'],keep = 'last').reset_index(drop = True)
|
197 |
-
maps_dict.update(maps_dict5)
|
198 |
-
maps_dict.update(maps_dict6)
|
199 |
-
RunsVar += 1
|
200 |
-
|
201 |
-
return FinalPortfolio_export, maps_dict
|
202 |
-
|
203 |
-
def create_overall_dfs(pos_players, table_name, dict_name, pos):
|
204 |
-
pos_players = pos_players.sort_values(by='Value', ascending=False)
|
205 |
-
table_name_raw = pos_players.reset_index(drop=True)
|
206 |
-
overall_table_name = table_name_raw.head(round(len(table_name_raw)))
|
207 |
-
overall_table_name = overall_table_name.assign(Var = range(0,len(overall_table_name)))
|
208 |
-
overall_dict_name = pd.Series(overall_table_name.Player.values, index=overall_table_name.Var).to_dict()
|
209 |
-
|
210 |
-
return overall_table_name, overall_dict_name
|
211 |
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
'
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
O_merge, full_pos_player_dict = get_overall_merged_df()
|
231 |
-
Overall_Merge = O_merge[['Var', 'Player', 'Team', 'Salary', 'Median', 'Own']].copy()
|
232 |
|
233 |
-
|
234 |
-
|
235 |
-
|
236 |
-
|
|
|
237 |
|
238 |
-
|
239 |
-
|
240 |
-
|
241 |
-
|
|
|
|
|
|
|
|
|
|
|
242 |
|
243 |
-
|
244 |
-
|
245 |
-
|
|
|
|
|
246 |
|
247 |
-
|
248 |
-
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
def get_correlated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
|
254 |
-
|
255 |
-
sizesplit = round(Total_Sample_Size * sharp_split)
|
256 |
-
|
257 |
-
RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
|
258 |
-
|
259 |
-
RandomPortfolio['CPT'] = pd.Series(list(RandomPortfolio['CPT'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
260 |
-
RandomPortfolio['FLEX1'] = pd.Series(list(RandomPortfolio['FLEX1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
261 |
-
RandomPortfolio['FLEX2'] = pd.Series(list(RandomPortfolio['FLEX2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
262 |
-
RandomPortfolio['FLEX3'] = pd.Series(list(RandomPortfolio['FLEX3'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
263 |
-
RandomPortfolio['FLEX4'] = pd.Series(list(RandomPortfolio['FLEX4'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
264 |
-
RandomPortfolio['FLEX5'] = pd.Series(list(RandomPortfolio['FLEX5'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
265 |
-
RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
|
266 |
-
RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
|
267 |
-
RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 7].drop(columns=['plyr_list','plyr_count']).\
|
268 |
-
reset_index(drop=True)
|
269 |
-
|
270 |
-
RandomPortfolio['CPTs'] = RandomPortfolio['CPT'].map(maps_dict['Salary_map']).astype(np.int32) * 1.5
|
271 |
-
RandomPortfolio['FLEX1s'] = RandomPortfolio['FLEX1'].map(maps_dict['Salary_map']).astype(np.int32)
|
272 |
-
RandomPortfolio['FLEX2s'] = RandomPortfolio['FLEX2'].map(maps_dict['Salary_map']).astype(np.int32)
|
273 |
-
RandomPortfolio['FLEX3s'] = RandomPortfolio['FLEX3'].map(maps_dict['Salary_map']).astype(np.int32)
|
274 |
-
RandomPortfolio['FLEX4s'] = RandomPortfolio['FLEX4'].map(maps_dict['Salary_map']).astype(np.int32)
|
275 |
-
RandomPortfolio['FLEX5s'] = RandomPortfolio['FLEX5'].map(maps_dict['Salary_map']).astype(np.int32)
|
276 |
-
|
277 |
-
RandomPortfolio['CPTp'] = RandomPortfolio['CPT'].map(maps_dict['Projection_map']).astype(np.float16) * 1.5
|
278 |
-
RandomPortfolio['FLEX1p'] = RandomPortfolio['FLEX1'].map(maps_dict['Projection_map']).astype(np.float16)
|
279 |
-
RandomPortfolio['FLEX2p'] = RandomPortfolio['FLEX2'].map(maps_dict['Projection_map']).astype(np.float16)
|
280 |
-
RandomPortfolio['FLEX3p'] = RandomPortfolio['FLEX3'].map(maps_dict['Projection_map']).astype(np.float16)
|
281 |
-
RandomPortfolio['FLEX4p'] = RandomPortfolio['FLEX4'].map(maps_dict['Projection_map']).astype(np.float16)
|
282 |
-
RandomPortfolio['FLEX5p'] = RandomPortfolio['FLEX5'].map(maps_dict['Projection_map']).astype(np.float16)
|
283 |
-
|
284 |
-
RandomPortfolio['CPTo'] = RandomPortfolio['CPT'].map(maps_dict['Own_map']).astype(np.float16) / 4
|
285 |
-
RandomPortfolio['FLEX1o'] = RandomPortfolio['FLEX1'].map(maps_dict['Own_map']).astype(np.float16)
|
286 |
-
RandomPortfolio['FLEX2o'] = RandomPortfolio['FLEX2'].map(maps_dict['Own_map']).astype(np.float16)
|
287 |
-
RandomPortfolio['FLEX3o'] = RandomPortfolio['FLEX3'].map(maps_dict['Own_map']).astype(np.float16)
|
288 |
-
RandomPortfolio['FLEX4o'] = RandomPortfolio['FLEX4'].map(maps_dict['Own_map']).astype(np.float16)
|
289 |
-
RandomPortfolio['FLEX5o'] = RandomPortfolio['FLEX5'].map(maps_dict['Own_map']).astype(np.float16)
|
290 |
-
|
291 |
-
portHeaderList = RandomPortfolio.columns.values.tolist()
|
292 |
-
portHeaderList.append('Salary')
|
293 |
-
portHeaderList.append('Projection')
|
294 |
-
portHeaderList.append('Own')
|
295 |
-
|
296 |
-
RandomPortArray = RandomPortfolio.to_numpy()
|
297 |
-
|
298 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,7:13].astype(int))]
|
299 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,13:19].astype(np.double))]
|
300 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:25].astype(np.double))]
|
301 |
-
|
302 |
-
RandomPortArrayOut = np.delete(RandomPortArray, np.s_[7:25], axis=1)
|
303 |
-
RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'User/Field', 'Salary', 'Projection', 'Own'])
|
304 |
-
RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
|
305 |
-
|
306 |
-
if insert_port == 1:
|
307 |
-
CleanPortfolio['Salary'] = sum([CleanPortfolio['CPT'].map(up_dict['Salary_map']) * 1.5,
|
308 |
-
CleanPortfolio['FLEX1'].map(up_dict['Salary_map']),
|
309 |
-
CleanPortfolio['FLEX2'].map(up_dict['Salary_map']),
|
310 |
-
CleanPortfolio['FLEX3'].map(up_dict['Salary_map']),
|
311 |
-
CleanPortfolio['FLEX4'].map(up_dict['Salary_map']),
|
312 |
-
CleanPortfolio['FLEX5'].map(up_dict['Salary_map'])
|
313 |
-
]).astype(np.int16)
|
314 |
-
if insert_port == 1:
|
315 |
-
CleanPortfolio['Projection'] = sum([CleanPortfolio['CPT'].map(up_dict['Projection_map']) * 1.5,
|
316 |
-
CleanPortfolio['FLEX1'].map(up_dict['Projection_map']),
|
317 |
-
CleanPortfolio['FLEX2'].map(up_dict['Projection_map']),
|
318 |
-
CleanPortfolio['FLEX3'].map(up_dict['Projection_map']),
|
319 |
-
CleanPortfolio['FLEX4'].map(up_dict['Projection_map']),
|
320 |
-
CleanPortfolio['FLEX5'].map(up_dict['Projection_map'])
|
321 |
-
]).astype(np.float16)
|
322 |
-
if insert_port == 1:
|
323 |
-
CleanPortfolio['Own'] = sum([CleanPortfolio['CPT'].map(up_dict['Own_map']) / 4,
|
324 |
-
CleanPortfolio['FLEX1'].map(up_dict['Own_map']),
|
325 |
-
CleanPortfolio['FLEX2'].map(up_dict['Own_map']),
|
326 |
-
CleanPortfolio['FLEX3'].map(up_dict['Own_map']),
|
327 |
-
CleanPortfolio['FLEX4'].map(up_dict['Own_map']),
|
328 |
-
CleanPortfolio['FLEX5'].map(up_dict['Own_map'])
|
329 |
-
]).astype(np.float16)
|
330 |
-
|
331 |
-
if site_var1 == 'Draftkings':
|
332 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
|
333 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 49500 - (FieldStrength * 1000)].reset_index(drop=True)
|
334 |
-
elif site_var1 == 'Fanduel':
|
335 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
|
336 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 59500 - (FieldStrength * 1000)].reset_index(drop=True)
|
337 |
-
|
338 |
-
RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
|
339 |
-
|
340 |
-
RandomPortfolio = RandomPortfolio[['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'User/Field', 'Salary', 'Projection', 'Own']]
|
341 |
-
|
342 |
-
return RandomPortfolio, maps_dict
|
343 |
-
|
344 |
-
def get_uncorrelated_portfolio_for_sim(Total_Sample_Size, sharp_split, field_growth):
|
345 |
-
|
346 |
-
sizesplit = round(Total_Sample_Size * (1-sharp_split))
|
347 |
-
|
348 |
-
RandomPortfolio, maps_dict, ranges_dict, full_pos_player_dict = create_random_portfolio(sizesplit, raw_baselines, field_growth)
|
349 |
-
|
350 |
-
RandomPortfolio['CPT'] = pd.Series(list(RandomPortfolio['CPT'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
351 |
-
RandomPortfolio['FLEX1'] = pd.Series(list(RandomPortfolio['FLEX1'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
352 |
-
RandomPortfolio['FLEX2'] = pd.Series(list(RandomPortfolio['FLEX2'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
353 |
-
RandomPortfolio['FLEX3'] = pd.Series(list(RandomPortfolio['FLEX3'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
354 |
-
RandomPortfolio['FLEX4'] = pd.Series(list(RandomPortfolio['FLEX4'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
355 |
-
RandomPortfolio['FLEX5'] = pd.Series(list(RandomPortfolio['FLEX5'].map(full_pos_player_dict['pos_dicts'][0])), dtype="string[pyarrow]")
|
356 |
-
RandomPortfolio['plyr_list'] = RandomPortfolio[RandomPortfolio.columns.values.tolist()].values.tolist()
|
357 |
-
RandomPortfolio['plyr_count'] = RandomPortfolio['plyr_list'].apply(lambda x: len(set(x)))
|
358 |
-
RandomPortfolio = RandomPortfolio[RandomPortfolio['plyr_count'] == 7].drop(columns=['plyr_list','plyr_count']).\
|
359 |
-
reset_index(drop=True)
|
360 |
|
361 |
-
RandomPortfolio['CPTs'] = RandomPortfolio['CPT'].map(maps_dict['Salary_map']).astype(np.int32) * 1.5
|
362 |
-
RandomPortfolio['FLEX1s'] = RandomPortfolio['FLEX1'].map(maps_dict['Salary_map']).astype(np.int32)
|
363 |
-
RandomPortfolio['FLEX2s'] = RandomPortfolio['FLEX2'].map(maps_dict['Salary_map']).astype(np.int32)
|
364 |
-
RandomPortfolio['FLEX3s'] = RandomPortfolio['FLEX3'].map(maps_dict['Salary_map']).astype(np.int32)
|
365 |
-
RandomPortfolio['FLEX4s'] = RandomPortfolio['FLEX4'].map(maps_dict['Salary_map']).astype(np.int32)
|
366 |
-
RandomPortfolio['FLEX5s'] = RandomPortfolio['FLEX5'].map(maps_dict['Salary_map']).astype(np.int32)
|
367 |
-
|
368 |
-
RandomPortfolio['CPTp'] = RandomPortfolio['CPT'].map(maps_dict['Projection_map']).astype(np.float16) * 1.5
|
369 |
-
RandomPortfolio['FLEX1p'] = RandomPortfolio['FLEX1'].map(maps_dict['Projection_map']).astype(np.float16)
|
370 |
-
RandomPortfolio['FLEX2p'] = RandomPortfolio['FLEX2'].map(maps_dict['Projection_map']).astype(np.float16)
|
371 |
-
RandomPortfolio['FLEX3p'] = RandomPortfolio['FLEX3'].map(maps_dict['Projection_map']).astype(np.float16)
|
372 |
-
RandomPortfolio['FLEX4p'] = RandomPortfolio['FLEX4'].map(maps_dict['Projection_map']).astype(np.float16)
|
373 |
-
RandomPortfolio['FLEX5p'] = RandomPortfolio['FLEX5'].map(maps_dict['Projection_map']).astype(np.float16)
|
374 |
-
|
375 |
-
RandomPortfolio['CPTo'] = RandomPortfolio['CPT'].map(maps_dict['Own_map']).astype(np.float16) / 4
|
376 |
-
RandomPortfolio['FLEX1o'] = RandomPortfolio['FLEX1'].map(maps_dict['Own_map']).astype(np.float16)
|
377 |
-
RandomPortfolio['FLEX2o'] = RandomPortfolio['FLEX2'].map(maps_dict['Own_map']).astype(np.float16)
|
378 |
-
RandomPortfolio['FLEX3o'] = RandomPortfolio['FLEX3'].map(maps_dict['Own_map']).astype(np.float16)
|
379 |
-
RandomPortfolio['FLEX4o'] = RandomPortfolio['FLEX4'].map(maps_dict['Own_map']).astype(np.float16)
|
380 |
-
RandomPortfolio['FLEX5o'] = RandomPortfolio['FLEX5'].map(maps_dict['Own_map']).astype(np.float16)
|
381 |
-
|
382 |
-
portHeaderList = RandomPortfolio.columns.values.tolist()
|
383 |
-
portHeaderList.append('Salary')
|
384 |
-
portHeaderList.append('Projection')
|
385 |
-
portHeaderList.append('Own')
|
386 |
-
|
387 |
-
RandomPortArray = RandomPortfolio.to_numpy()
|
388 |
-
|
389 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,7:13].astype(int))]
|
390 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,13:19].astype(np.double))]
|
391 |
-
RandomPortArray = np.c_[RandomPortArray, np.einsum('ij->i',RandomPortArray[:,19:25].astype(np.double))]
|
392 |
-
|
393 |
-
RandomPortArrayOut = np.delete(RandomPortArray, np.s_[7:25], axis=1)
|
394 |
-
RandomPortfolioDF = pd.DataFrame(RandomPortArrayOut, columns = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'User/Field', 'Salary', 'Projection', 'Own'])
|
395 |
-
RandomPortfolioDF = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
|
396 |
-
|
397 |
-
if insert_port == 1:
|
398 |
-
CleanPortfolio['Salary'] = sum([CleanPortfolio['CPT'].map(up_dict['Salary_map']) * 1.5,
|
399 |
-
CleanPortfolio['FLEX1'].map(up_dict['Salary_map']),
|
400 |
-
CleanPortfolio['FLEX2'].map(up_dict['Salary_map']),
|
401 |
-
CleanPortfolio['FLEX3'].map(up_dict['Salary_map']),
|
402 |
-
CleanPortfolio['FLEX4'].map(up_dict['Salary_map']),
|
403 |
-
CleanPortfolio['FLEX5'].map(up_dict['Salary_map'])
|
404 |
-
]).astype(np.int16)
|
405 |
-
if insert_port == 1:
|
406 |
-
CleanPortfolio['Projection'] = sum([CleanPortfolio['CPT'].map(up_dict['Projection_map']) * 1.5,
|
407 |
-
CleanPortfolio['FLEX1'].map(up_dict['Projection_map']),
|
408 |
-
CleanPortfolio['FLEX2'].map(up_dict['Projection_map']),
|
409 |
-
CleanPortfolio['FLEX3'].map(up_dict['Projection_map']),
|
410 |
-
CleanPortfolio['FLEX4'].map(up_dict['Projection_map']),
|
411 |
-
CleanPortfolio['FLEX5'].map(up_dict['Projection_map'])
|
412 |
-
]).astype(np.float16)
|
413 |
-
if insert_port == 1:
|
414 |
-
CleanPortfolio['Own'] = sum([CleanPortfolio['CPT'].map(up_dict['Own_map']) / 4,
|
415 |
-
CleanPortfolio['FLEX1'].map(up_dict['Own_map']),
|
416 |
-
CleanPortfolio['FLEX2'].map(up_dict['Own_map']),
|
417 |
-
CleanPortfolio['FLEX3'].map(up_dict['Own_map']),
|
418 |
-
CleanPortfolio['FLEX4'].map(up_dict['Own_map']),
|
419 |
-
CleanPortfolio['FLEX5'].map(up_dict['Own_map'])
|
420 |
-
]).astype(np.float16)
|
421 |
-
|
422 |
-
if site_var1 == 'Draftkings':
|
423 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 50000].reset_index(drop=True)
|
424 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 49500 - (FieldStrength * 1000)].reset_index(drop=True)
|
425 |
-
elif site_var1 == 'Fanduel':
|
426 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] <= 60000].reset_index(drop=True)
|
427 |
-
RandomPortfolioDF = RandomPortfolioDF[RandomPortfolioDF['Salary'] >= 59500 - (FieldStrength * 1000)].reset_index(drop=True)
|
428 |
-
|
429 |
-
RandomPortfolio = RandomPortfolioDF.sort_values(by=Sim_function, ascending=False)
|
430 |
-
|
431 |
-
RandomPortfolio = RandomPortfolio[['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'User/Field', 'Salary', 'Projection', 'Own']]
|
432 |
-
|
433 |
-
return RandomPortfolio, maps_dict
|
434 |
-
|
435 |
-
tab1, tab2 = st.tabs(['Uploads', 'Contest Sim'])
|
436 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
437 |
with tab1:
|
438 |
-
with st.container():
|
439 |
-
st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'. Upload your projections first to avoid an error message.")
|
440 |
-
col1, col2 = st.columns([3, 3])
|
441 |
-
|
442 |
-
with col1:
|
443 |
-
proj_file = st.file_uploader("Upload Projections File", key = 'proj_uploader')
|
444 |
-
|
445 |
-
if proj_file is not None:
|
446 |
-
try:
|
447 |
-
proj_dataframe = pd.read_csv(proj_file)
|
448 |
-
proj_dataframe = proj_dataframe.dropna(subset='Median')
|
449 |
-
except:
|
450 |
-
proj_dataframe = pd.read_excel(proj_file)
|
451 |
-
proj_dataframe = proj_dataframe.dropna(subset='Median')
|
452 |
-
|
453 |
-
player_salary_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Salary))
|
454 |
-
player_proj_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Median))
|
455 |
-
player_own_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Own))
|
456 |
-
player_team_dict = dict(zip(proj_dataframe.Player, proj_dataframe.Team))
|
457 |
-
|
458 |
-
with col2:
|
459 |
-
portfolio_file = st.file_uploader("Upload Portfolio File", key = 'portfolio_uploader')
|
460 |
-
|
461 |
-
if portfolio_file is not None:
|
462 |
-
try:
|
463 |
-
portfolio_dataframe = pd.read_csv(portfolio_file)
|
464 |
-
except:
|
465 |
-
portfolio_dataframe = pd.read_excel(portfolio_file)
|
466 |
-
try:
|
467 |
-
try:
|
468 |
-
portfolio_dataframe.columns=["CPT", "FLEX1", "FLEX2", "FLEX3", "FLEX4", "FLEX5"]
|
469 |
-
split_portfolio = portfolio_dataframe
|
470 |
-
split_portfolio[['CPT', 'CPT_ID']] = split_portfolio.CPT.str.split("(", n=1, expand = True)
|
471 |
-
split_portfolio[['FLEX1', 'FLEX1_ID']] = split_portfolio.FLEX1.str.split("(", n=1, expand = True)
|
472 |
-
split_portfolio[['FLEX2', 'FLEX2_ID']] = split_portfolio.FLEX2.str.split("(", n=1, expand = True)
|
473 |
-
split_portfolio[['FLEX3', 'FLEX3_ID']] = split_portfolio.FLEX3.str.split("(", n=1, expand = True)
|
474 |
-
split_portfolio[['FLEX4', 'FLEX4_ID']] = split_portfolio.FLEX4.str.split("(", n=1, expand = True)
|
475 |
-
split_portfolio[['FLEX5', 'FLEX5_ID']] = split_portfolio.FLEX5.str.split("(", n=1, expand = True)
|
476 |
-
|
477 |
-
split_portfolio['CPT'] = split_portfolio['CPT'].str.strip()
|
478 |
-
split_portfolio['FLEX1'] = split_portfolio['FLEX1'].str.strip()
|
479 |
-
split_portfolio['FLEX2'] = split_portfolio['FLEX2'].str.strip()
|
480 |
-
split_portfolio['FLEX3'] = split_portfolio['FLEX3'].str.strip()
|
481 |
-
split_portfolio['FLEX4'] = split_portfolio['FLEX4'].str.strip()
|
482 |
-
split_portfolio['FLEX5'] = split_portfolio['FLEX5'].str.strip()
|
483 |
-
|
484 |
-
CPT_dict = dict(zip(split_portfolio.CPT, split_portfolio.CPT_ID))
|
485 |
-
FLEX1_dict = dict(zip(split_portfolio.FLEX1, split_portfolio.FLEX1_ID))
|
486 |
-
FLEX2_dict = dict(zip(split_portfolio.FLEX2, split_portfolio.FLEX2_ID))
|
487 |
-
FLEX3_dict = dict(zip(split_portfolio.FLEX3, split_portfolio.FLEX3_ID))
|
488 |
-
FLEX4_dict = dict(zip(split_portfolio.FLEX4, split_portfolio.FLEX4_ID))
|
489 |
-
FLEX5_dict = dict(zip(split_portfolio.FLEX5, split_portfolio.FLEX5_ID))
|
490 |
-
|
491 |
-
split_portfolio['Salary'] = sum([split_portfolio['CPT'].map(player_salary_dict) * 1.5,
|
492 |
-
split_portfolio['FLEX1'].map(player_salary_dict),
|
493 |
-
split_portfolio['FLEX2'].map(player_salary_dict),
|
494 |
-
split_portfolio['FLEX3'].map(player_salary_dict),
|
495 |
-
split_portfolio['FLEX4'].map(player_salary_dict),
|
496 |
-
split_portfolio['FLEX5'].map(player_salary_dict)])
|
497 |
-
|
498 |
-
split_portfolio['Projection'] = sum([split_portfolio['CPT'].map(player_proj_dict) * 1.5,
|
499 |
-
split_portfolio['FLEX1'].map(player_proj_dict),
|
500 |
-
split_portfolio['FLEX2'].map(player_proj_dict),
|
501 |
-
split_portfolio['FLEX3'].map(player_proj_dict),
|
502 |
-
split_portfolio['FLEX4'].map(player_proj_dict),
|
503 |
-
split_portfolio['FLEX5'].map(player_proj_dict)])
|
504 |
-
|
505 |
-
split_portfolio['Ownership'] = sum([split_portfolio['CPT'].map(player_own_dict) / 4,
|
506 |
-
split_portfolio['FLEX1'].map(player_own_dict),
|
507 |
-
split_portfolio['FLEX2'].map(player_own_dict),
|
508 |
-
split_portfolio['FLEX3'].map(player_own_dict),
|
509 |
-
split_portfolio['FLEX4'].map(player_own_dict),
|
510 |
-
split_portfolio['FLEX5'].map(player_own_dict)])
|
511 |
-
|
512 |
-
except:
|
513 |
-
portfolio_dataframe.columns=["CPT", "FLEX1", "FLEX2", "FLEX3", "FLEX4", "FLEX5"]
|
514 |
-
split_portfolio = portfolio_dataframe
|
515 |
-
split_portfolio[['CPT_ID', 'CPT']] = split_portfolio.CPT.str.split(":", n=1, expand = True)
|
516 |
-
split_portfolio[['FLEX1_ID', 'FLEX1']] = split_portfolio.FLEX1.str.split(":", n=1, expand = True)
|
517 |
-
split_portfolio[['FLEX2_ID', 'FLEX2']] = split_portfolio.FLEX2.str.split(":", n=1, expand = True)
|
518 |
-
split_portfolio[['FLEX3_ID', 'FLEX3']] = split_portfolio.FLEX3.str.split(":", n=1, expand = True)
|
519 |
-
split_portfolio[['FLEX4_ID', 'FLEX4']] = split_portfolio.FLEX4.str.split(":", n=1, expand = True)
|
520 |
-
split_portfolio[['FLEX5_ID', 'FLEX5']] = split_portfolio.FLEX5.str.split(":", n=1, expand = True)
|
521 |
-
|
522 |
-
split_portfolio['CPT'] = split_portfolio['CPT'].str.strip()
|
523 |
-
split_portfolio['FLEX1'] = split_portfolio['FLEX1'].str.strip()
|
524 |
-
split_portfolio['FLEX2'] = split_portfolio['FLEX2'].str.strip()
|
525 |
-
split_portfolio['FLEX3'] = split_portfolio['FLEX3'].str.strip()
|
526 |
-
split_portfolio['FLEX4'] = split_portfolio['FLEX4'].str.strip()
|
527 |
-
split_portfolio['FLEX5'] = split_portfolio['FLEX5'].str.strip()
|
528 |
-
|
529 |
-
CPT_dict = dict(zip(split_portfolio.CPT, split_portfolio.CPT_ID))
|
530 |
-
FLEX1_dict = dict(zip(split_portfolio.FLEX1, split_portfolio.FLEX1_ID))
|
531 |
-
FLEX2_dict = dict(zip(split_portfolio.FLEX2, split_portfolio.FLEX2_ID))
|
532 |
-
FLEX3_dict = dict(zip(split_portfolio.FLEX3, split_portfolio.FLEX3_ID))
|
533 |
-
FLEX4_dict = dict(zip(split_portfolio.FLEX4, split_portfolio.FLEX4_ID))
|
534 |
-
FLEX5_dict = dict(zip(split_portfolio.FLEX5, split_portfolio.FLEX5_ID))
|
535 |
-
|
536 |
-
split_portfolio['Salary'] = sum([split_portfolio['CPT'].map(player_salary_dict),
|
537 |
-
split_portfolio['FLEX1'].map(player_salary_dict),
|
538 |
-
split_portfolio['FLEX2'].map(player_salary_dict),
|
539 |
-
split_portfolio['FLEX3'].map(player_salary_dict),
|
540 |
-
split_portfolio['FLEX4'].map(player_salary_dict),
|
541 |
-
split_portfolio['FLEX5'].map(player_salary_dict)])
|
542 |
-
|
543 |
-
split_portfolio['Projection'] = sum([split_portfolio['CPT'].map(player_proj_dict) * 1.5,
|
544 |
-
split_portfolio['FLEX1'].map(player_proj_dict),
|
545 |
-
split_portfolio['FLEX2'].map(player_proj_dict),
|
546 |
-
split_portfolio['FLEX3'].map(player_proj_dict),
|
547 |
-
split_portfolio['FLEX4'].map(player_proj_dict),
|
548 |
-
split_portfolio['FLEX5'].map(player_proj_dict)])
|
549 |
-
|
550 |
-
split_portfolio['Ownership'] = sum([split_portfolio['CPT'].map(player_own_dict) / 4,
|
551 |
-
split_portfolio['FLEX1'].map(player_own_dict),
|
552 |
-
split_portfolio['FLEX2'].map(player_own_dict),
|
553 |
-
split_portfolio['FLEX3'].map(player_own_dict),
|
554 |
-
split_portfolio['FLEX4'].map(player_own_dict),
|
555 |
-
split_portfolio['FLEX5'].map(player_own_dict)])
|
556 |
-
|
557 |
-
except:
|
558 |
-
split_portfolio = portfolio_dataframe
|
559 |
-
|
560 |
-
split_portfolio['CPT'] = split_portfolio['CPT'].str[:-6]
|
561 |
-
split_portfolio['FLEX1'] = split_portfolio['FLEX1'].str[:-6]
|
562 |
-
split_portfolio['FLEX2'] = split_portfolio['FLEX2'].str[:-6]
|
563 |
-
split_portfolio['FLEX3'] = split_portfolio['FLEX3'].str[:-6]
|
564 |
-
split_portfolio['FLEX4'] = split_portfolio['FLEX4'].str[:-6]
|
565 |
-
split_portfolio['FLEX5'] = split_portfolio['FLEX5'].str[:-6]
|
566 |
-
|
567 |
-
split_portfolio['CPT'] = split_portfolio['CPT'].str.strip()
|
568 |
-
split_portfolio['FLEX1'] = split_portfolio['FLEX1'].str.strip()
|
569 |
-
split_portfolio['FLEX2'] = split_portfolio['FLEX2'].str.strip()
|
570 |
-
split_portfolio['FLEX3'] = split_portfolio['FLEX3'].str.strip()
|
571 |
-
split_portfolio['FLEX4'] = split_portfolio['FLEX4'].str.strip()
|
572 |
-
split_portfolio['FLEX5'] = split_portfolio['FLEX5'].str.strip()
|
573 |
-
|
574 |
-
split_portfolio['Salary'] = sum([split_portfolio['CPT'].map(player_salary_dict) * 1.5,
|
575 |
-
split_portfolio['FLEX1'].map(player_salary_dict),
|
576 |
-
split_portfolio['FLEX2'].map(player_salary_dict),
|
577 |
-
split_portfolio['FLEX3'].map(player_salary_dict),
|
578 |
-
split_portfolio['FLEX4'].map(player_salary_dict),
|
579 |
-
split_portfolio['FLEX5'].map(player_salary_dict)])
|
580 |
-
|
581 |
-
split_portfolio['Projection'] = sum([split_portfolio['CPT'].map(player_proj_dict) * 1.5,
|
582 |
-
split_portfolio['FLEX1'].map(player_proj_dict),
|
583 |
-
split_portfolio['FLEX2'].map(player_proj_dict),
|
584 |
-
split_portfolio['FLEX3'].map(player_proj_dict),
|
585 |
-
split_portfolio['FLEX4'].map(player_proj_dict),
|
586 |
-
split_portfolio['FLEX5'].map(player_proj_dict)])
|
587 |
-
|
588 |
-
split_portfolio['Ownership'] = sum([split_portfolio['CPT'].map(player_own_dict) / 4,
|
589 |
-
split_portfolio['FLEX1'].map(player_own_dict),
|
590 |
-
split_portfolio['FLEX2'].map(player_own_dict),
|
591 |
-
split_portfolio['FLEX3'].map(player_own_dict),
|
592 |
-
split_portfolio['FLEX4'].map(player_own_dict),
|
593 |
-
split_portfolio['FLEX5'].map(player_own_dict)])
|
594 |
-
|
595 |
-
|
596 |
-
gc.collect()
|
597 |
-
|
598 |
-
with tab2:
|
599 |
col1, col2 = st.columns([1, 7])
|
600 |
with col1:
|
601 |
-
if st.button("Load/Reset Data", key='
|
602 |
st.cache_data.clear()
|
603 |
for key in st.session_state.keys():
|
604 |
del st.session_state[key]
|
605 |
-
|
606 |
-
|
607 |
-
|
608 |
-
|
609 |
-
|
610 |
-
|
611 |
-
|
612 |
-
|
613 |
-
|
614 |
-
|
615 |
-
|
616 |
-
|
617 |
-
|
618 |
-
elif site_var1 == 'Fanduel':
|
619 |
-
if slate_var1 == 'User':
|
620 |
-
raw_baselines = proj_dataframe
|
621 |
-
elif slate_var1 == 'Paydirt (Main)':
|
622 |
-
raw_baselines = dk_roo_raw
|
623 |
-
elif slate_var1 == 'Paydirt (Secondary)':
|
624 |
-
raw_baselines = dk_roo_raw_2
|
625 |
-
elif slate_var1 == 'Paydirt (Third)':
|
626 |
-
raw_baselines = dk_roo_raw_3
|
627 |
-
|
628 |
-
st.info("If you are uploading a portfolio, note that there is an adjustments to projections and deviation mapping to prevent 'Projection Bias' and create a fair simulation")
|
629 |
-
insert_port1 = st.selectbox("Are you uploading a portfolio?", ('No', 'Yes'))
|
630 |
-
if insert_port1 == 'Yes':
|
631 |
-
insert_port = 1
|
632 |
-
elif insert_port1 == 'No':
|
633 |
-
insert_port = 0
|
634 |
-
contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large'))
|
635 |
if contest_var1 == 'Small':
|
636 |
-
Contest_Size =
|
637 |
elif contest_var1 == 'Medium':
|
638 |
-
Contest_Size = 2500
|
639 |
-
elif contest_var1 == 'Large':
|
640 |
Contest_Size = 5000
|
641 |
-
|
|
|
|
|
|
|
|
|
642 |
if strength_var1 == 'Not Very':
|
643 |
-
sharp_split =
|
644 |
-
|
645 |
-
|
646 |
elif strength_var1 == 'Average':
|
647 |
-
sharp_split =
|
648 |
-
|
649 |
-
|
650 |
elif strength_var1 == 'Very':
|
651 |
-
sharp_split =
|
652 |
-
|
653 |
-
scaling_var = 15
|
654 |
-
|
655 |
-
Sort_function = 'Median'
|
656 |
-
Sim_function = 'Projection'
|
657 |
-
|
658 |
-
if Contest_Size <= 1000:
|
659 |
-
strength_grow = .01
|
660 |
-
elif Contest_Size > 1000 and Contest_Size <= 2500:
|
661 |
-
strength_grow = .025
|
662 |
-
elif Contest_Size > 2500 and Contest_Size <= 5000:
|
663 |
-
strength_grow = .05
|
664 |
-
elif Contest_Size > 5000 and Contest_Size <= 20000:
|
665 |
-
strength_grow = .075
|
666 |
-
elif Contest_Size > 20000:
|
667 |
-
strength_grow = .1
|
668 |
-
|
669 |
-
field_growth = 100 * strength_grow
|
670 |
-
|
671 |
-
with col2:
|
672 |
-
with st.container():
|
673 |
-
if st.button("Simulate Contest", key='sim1'):
|
674 |
-
with st.container():
|
675 |
-
for key in st.session_state.keys():
|
676 |
-
del st.session_state[key]
|
677 |
-
|
678 |
-
if slate_var1 == 'User':
|
679 |
-
initial_proj = proj_dataframe[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
|
680 |
-
|
681 |
-
# Define the calculation to be applied
|
682 |
-
def calculate_own(position, own, mean_own, factor, max_own=75):
|
683 |
-
return np.where((position == 'QB') & (own - mean_own >= 0),
|
684 |
-
own * (factor * (own - mean_own) / 100) + mean_own,
|
685 |
-
own)
|
686 |
-
|
687 |
-
# Set the factors based on the contest_var1
|
688 |
-
factor_qb, factor_other = {
|
689 |
-
'Small': (8, 10),
|
690 |
-
'Medium': (5, 5),
|
691 |
-
'Large': (1.5, 1.5),
|
692 |
-
}[contest_var1]
|
693 |
-
|
694 |
-
# Apply the calculation to the DataFrame
|
695 |
-
initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_qb if row['Position'] == 'QB' else factor_other), axis=1)
|
696 |
-
initial_proj['Own%'] = initial_proj['Own%'].clip(upper=75)
|
697 |
-
initial_proj['Own'] = initial_proj['Own%'] * (600 / initial_proj['Own%'].sum())
|
698 |
-
|
699 |
-
# Drop unnecessary columns and create the final DataFrame
|
700 |
-
Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
|
701 |
-
|
702 |
-
elif slate_var1 != 'User':
|
703 |
-
# Copy only the necessary columns
|
704 |
-
initial_proj = raw_baselines[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
|
705 |
-
|
706 |
-
# Define the calculation to be applied
|
707 |
-
def calculate_own(position, own, mean_own, factor, max_own=75):
|
708 |
-
return np.where((position == 'QB') & (own - mean_own >= 0),
|
709 |
-
own * (factor * (own - mean_own) / 100) + mean_own,
|
710 |
-
own)
|
711 |
-
|
712 |
-
# Set the factors based on the contest_var1
|
713 |
-
factor_qb, factor_other = {
|
714 |
-
'Small': (8, 10),
|
715 |
-
'Medium': (5, 5),
|
716 |
-
'Large': (1.5, 1.5),
|
717 |
-
}[contest_var1]
|
718 |
-
|
719 |
-
# Apply the calculation to the DataFrame
|
720 |
-
initial_proj['Own%'] = initial_proj.apply(lambda row: calculate_own(row['Position'], row['Own'], initial_proj.loc[initial_proj['Position'] == row['Position'], 'Own'].mean(), factor_qb if row['Position'] == 'QB' else factor_other), axis=1)
|
721 |
-
initial_proj['Own%'] = initial_proj['Own%'].clip(upper=75)
|
722 |
-
initial_proj['Own'] = initial_proj['Own%'] * (600 / initial_proj['Own%'].sum())
|
723 |
-
|
724 |
-
# Drop unnecessary columns and create the final DataFrame
|
725 |
-
Overall_Proj = initial_proj[['Player', 'Team', 'Position', 'Median', 'Own', 'Salary']]
|
726 |
-
|
727 |
-
if insert_port == 1:
|
728 |
-
UserPortfolio = portfolio_dataframe[['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5']]
|
729 |
-
elif insert_port == 0:
|
730 |
-
UserPortfolio = pd.DataFrame(columns = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5'])
|
731 |
-
|
732 |
-
Overall_Proj.replace('', np.nan, inplace=True)
|
733 |
-
Overall_Proj = Overall_Proj.dropna(subset=['Median'])
|
734 |
-
Overall_Proj = Overall_Proj.assign(Value=lambda x: (x.Median / (x.Salary / 1000)))
|
735 |
-
Overall_Proj['Sort_var'] = (Overall_Proj['Median'].rank(ascending=False) + Overall_Proj['Value'].rank(ascending=False)) / 2
|
736 |
-
Overall_Proj = Overall_Proj.sort_values(by='Sort_var', ascending=False)
|
737 |
-
Overall_Proj['Own'] = np.where((Overall_Proj['Median'] > 0) & (Overall_Proj['Own'] == 0), 1, Overall_Proj['Own'])
|
738 |
-
Overall_Proj = Overall_Proj.loc[Overall_Proj['Own'] > 0]
|
739 |
-
|
740 |
-
Overall_Proj['Floor'] = np.where(Overall_Proj['Position'] == 'QB', Overall_Proj['Median'] * .5, Overall_Proj['Median'] * .25)
|
741 |
-
Overall_Proj['Ceiling'] = np.where(Overall_Proj['Position'] == 'WR', Overall_Proj['Median'] + Overall_Proj['Median'], Overall_Proj['Median'] + Overall_Proj['Floor'])
|
742 |
-
Overall_Proj['STDev'] = Overall_Proj['Median'] / 4
|
743 |
-
|
744 |
-
Teams_used = Overall_Proj['Team'].drop_duplicates().reset_index(drop=True)
|
745 |
-
Teams_used = Teams_used.reset_index()
|
746 |
-
Teams_used['team_item'] = Teams_used['index'] + 1
|
747 |
-
Teams_used = Teams_used.drop(columns=['index'])
|
748 |
-
Teams_used_dictraw = Teams_used.drop(columns=['team_item'])
|
749 |
-
|
750 |
-
team_list = Teams_used['Team'].to_list()
|
751 |
-
item_list = Teams_used['team_item'].to_list()
|
752 |
-
|
753 |
-
FieldStrength_raw = Strength_var + ((30 - len(Teams_used)) * .01)
|
754 |
-
FieldStrength = FieldStrength_raw - (FieldStrength_raw * (20000 / Contest_Size))
|
755 |
-
|
756 |
-
if FieldStrength < 0:
|
757 |
-
FieldStrength = Strength_var
|
758 |
-
field_split = Strength_var
|
759 |
-
|
760 |
-
for checkVar in range(len(team_list)):
|
761 |
-
Overall_Proj['Team'] = Overall_Proj['Team'].replace(team_list, item_list)
|
762 |
-
|
763 |
-
flex_raw = Overall_Proj
|
764 |
-
flex_raw.dropna(subset=['Median']).reset_index(drop=True)
|
765 |
-
flex_raw = flex_raw.reset_index(drop=True)
|
766 |
-
flex_raw = flex_raw.sort_values(by='Own', ascending=False)
|
767 |
-
|
768 |
-
pos_players = flex_raw
|
769 |
-
pos_players.dropna(subset=['Median']).reset_index(drop=True)
|
770 |
-
pos_players = pos_players.reset_index(drop=True)
|
771 |
-
|
772 |
-
if insert_port == 1:
|
773 |
-
try:
|
774 |
-
# Initialize an empty DataFrame to store raw portfolio data
|
775 |
-
Raw_Portfolio = pd.DataFrame()
|
776 |
-
|
777 |
-
# Split each portfolio column and concatenate to Raw_Portfolio
|
778 |
-
columns_to_process = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5']
|
779 |
-
for col in columns_to_process:
|
780 |
-
temp_df = UserPortfolio[col].str.split("(", n=1, expand=True)
|
781 |
-
temp_df.columns = [col, 'Drop']
|
782 |
-
Raw_Portfolio = pd.concat([Raw_Portfolio, temp_df], axis=1)
|
783 |
-
|
784 |
-
# Keep only required variables and remove whitespace
|
785 |
-
keep_vars = columns_to_process
|
786 |
-
CleanPortfolio = Raw_Portfolio[keep_vars]
|
787 |
-
CleanPortfolio = CleanPortfolio.apply(lambda x: x.str.strip())
|
788 |
-
|
789 |
-
# Reset index and clean up the DataFrame
|
790 |
-
CleanPortfolio.reset_index(inplace=True)
|
791 |
-
CleanPortfolio['User/Field'] = CleanPortfolio['index'] + 1
|
792 |
-
CleanPortfolio.drop(columns=['index'], inplace=True)
|
793 |
-
CleanPortfolio.replace('', np.nan, inplace=True)
|
794 |
-
CleanPortfolio.dropna(subset=['CPT'], inplace=True)
|
795 |
-
|
796 |
-
# Create cleaport_players DataFrame
|
797 |
-
unique_vals, counts = np.unique(CleanPortfolio.iloc[:, 0:6].values, return_counts=True)
|
798 |
-
cleaport_players = pd.DataFrame(np.column_stack([unique_vals, counts]), columns=['Player', 'Freq']).astype({'Freq': int}).sort_values('Freq', ascending=False).reset_index(drop=True)
|
799 |
-
|
800 |
-
# Merge and update nerf_frame DataFrame
|
801 |
-
nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
|
802 |
-
nerf_frame[['Median', 'Floor', 'Ceiling', 'STDev']] *= .9
|
803 |
-
|
804 |
-
del Raw_Portfolio
|
805 |
-
except:
|
806 |
-
# Reset index and perform column-wise operations
|
807 |
-
CleanPortfolio = UserPortfolio.reset_index(drop=True)
|
808 |
-
CleanPortfolio['User/Field'] = CleanPortfolio.index + 1
|
809 |
-
CleanPortfolio.replace('', np.nan, inplace=True)
|
810 |
-
CleanPortfolio.dropna(subset=['CPT'], inplace=True)
|
811 |
-
|
812 |
-
# Create cleaport_players DataFrame
|
813 |
-
unique_vals, counts = np.unique(CleanPortfolio.iloc[:, 0:6].values, return_counts=True)
|
814 |
-
cleaport_players = pd.DataFrame({'Player': unique_vals, 'Freq': counts}).sort_values('Freq', ascending=False).reset_index(drop=True).astype({'Freq': int})
|
815 |
-
|
816 |
-
# Merge and update nerf_frame DataFrame
|
817 |
-
nerf_frame = pd.merge(cleaport_players, Overall_Proj, on='Player', how='left')
|
818 |
-
nerf_frame[['Median', 'Floor', 'Ceiling', 'STDev']] *= .9
|
819 |
-
|
820 |
-
st.table(nerf_frame)
|
821 |
|
822 |
-
|
823 |
-
|
824 |
-
|
825 |
-
|
826 |
-
|
827 |
-
|
828 |
-
|
829 |
-
|
830 |
-
'
|
831 |
-
'
|
832 |
-
'pos_dicts':['flex_dict']
|
833 |
-
}
|
834 |
-
|
835 |
-
maps_dict = {
|
836 |
-
'Floor_map':dict(zip(Overall_Proj.Player,Overall_Proj.Floor)),
|
837 |
-
'Projection_map':dict(zip(Overall_Proj.Player,Overall_Proj.Median)),
|
838 |
-
'Ceiling_map':dict(zip(Overall_Proj.Player,Overall_Proj.Ceiling)),
|
839 |
-
'Salary_map':dict(zip(Overall_Proj.Player,Overall_Proj.Salary)),
|
840 |
-
'Pos_map':dict(zip(Overall_Proj.Player,Overall_Proj.Position)),
|
841 |
-
'Own_map':dict(zip(Overall_Proj.Player,Overall_Proj.Own)),
|
842 |
-
'Team_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team)),
|
843 |
-
'STDev_map':dict(zip(Overall_Proj.Player,Overall_Proj.STDev)),
|
844 |
-
'team_check_map':dict(zip(Overall_Proj.Player,Overall_Proj.Team))
|
845 |
}
|
846 |
-
|
847 |
-
|
848 |
-
|
849 |
-
|
850 |
-
|
851 |
-
|
852 |
-
|
853 |
-
|
854 |
-
|
855 |
-
|
856 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
857 |
}
|
858 |
-
|
859 |
-
|
860 |
-
|
861 |
-
|
862 |
-
|
863 |
-
|
864 |
-
|
865 |
-
|
866 |
-
|
867 |
-
|
868 |
-
|
869 |
-
|
870 |
-
|
871 |
-
|
872 |
-
|
873 |
-
|
874 |
-
|
875 |
-
|
876 |
-
|
877 |
-
|
878 |
-
|
879 |
-
|
880 |
-
|
881 |
-
|
882 |
-
|
883 |
-
|
884 |
-
|
885 |
-
|
886 |
-
|
887 |
-
st.session_state.player_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:6].values, return_counts=True)),
|
888 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
889 |
-
|
890 |
-
|
891 |
-
st.session_state.player_freq['Salary'] = st.session_state.player_freq['Player'].map(maps_dict['Salary_map'])
|
892 |
-
st.session_state.player_freq['Proj Own'] = st.session_state.player_freq['Player'].map(maps_dict['Own_map']) / 100
|
893 |
-
st.session_state.player_freq['Exposure'] = st.session_state.player_freq['Freq']/(5000)
|
894 |
-
st.session_state.player_freq['Edge'] = st.session_state.player_freq['Exposure'] - st.session_state.player_freq['Proj Own']
|
895 |
-
st.session_state.player_freq['Team'] = st.session_state.player_freq['Player'].map(maps_dict['Team_map'])
|
896 |
-
for checkVar in range(len(team_list)):
|
897 |
-
st.session_state.player_freq['Team'] = st.session_state.player_freq['Team'].replace(item_list, team_list)
|
898 |
-
|
899 |
-
st.session_state.cpt_freq = pd.DataFrame(np.column_stack(np.unique(st.session_state.Sim_Winner_Display.iloc[:,0:1].values, return_counts=True)),
|
900 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
901 |
-
|
902 |
-
|
903 |
-
|
904 |
-
|
905 |
-
|
906 |
-
|
907 |
-
|
908 |
-
|
909 |
-
|
910 |
-
|
911 |
-
|
912 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
913 |
-
|
914 |
-
|
915 |
-
|
916 |
-
|
917 |
-
|
918 |
-
|
919 |
-
|
920 |
-
|
921 |
-
|
922 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
923 |
with st.container():
|
|
|
|
|
|
|
924 |
if 'player_freq' in st.session_state:
|
925 |
player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
|
926 |
if player_split_var2 == 'Specific Players':
|
@@ -929,53 +404,62 @@ with tab2:
|
|
929 |
find_var2 = st.session_state.player_freq.Player.values.tolist()
|
930 |
|
931 |
if player_split_var2 == 'Specific Players':
|
932 |
-
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(
|
933 |
if player_split_var2 == 'Full Players':
|
934 |
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
|
935 |
if 'Sim_Winner_Display' in st.session_state:
|
936 |
-
st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').
|
937 |
if 'Sim_Winner_Export' in st.session_state:
|
938 |
-
|
939 |
-
|
940 |
-
|
941 |
-
|
942 |
-
|
943 |
-
|
944 |
-
|
945 |
with st.container():
|
946 |
-
|
947 |
-
|
948 |
-
|
949 |
-
|
950 |
-
|
951 |
-
|
952 |
-
|
953 |
-
|
954 |
-
|
955 |
-
|
956 |
-
|
957 |
-
|
958 |
-
|
959 |
-
|
960 |
-
|
961 |
-
|
962 |
-
|
963 |
-
|
964 |
-
)
|
965 |
-
|
966 |
-
|
967 |
-
|
968 |
-
|
969 |
-
|
970 |
-
|
971 |
-
|
972 |
-
|
973 |
-
|
974 |
-
|
975 |
-
|
976 |
-
|
977 |
-
|
978 |
-
|
979 |
-
|
980 |
-
|
981 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
st.set_page_config(layout="wide")
|
|
|
|
|
|
|
|
|
|
|
3 |
import numpy as np
|
4 |
import pandas as pd
|
|
|
5 |
import gspread
|
6 |
+
import pymongo
|
7 |
+
import time
|
8 |
|
9 |
@st.cache_resource
|
10 |
def init_conn():
|
11 |
+
scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
|
12 |
+
|
13 |
+
credentials = {
|
14 |
+
"type": "service_account",
|
15 |
+
"project_id": "model-sheets-connect",
|
16 |
+
"private_key_id": "0e0bc2fdef04e771172fe5807392b9d6639d945e",
|
17 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDiu1v/e6KBKOcK\ncx0KQ23nZK3ZVvADYy8u/RUn/EDI82QKxTd/DizRLIV81JiNQxDJXSzgkbwKYEDm\n48E8zGvupU8+Nk76xNPakrQKy2Y8+VJlq5psBtGchJTuUSHcXU5Mg2JhQsB376PJ\nsCw552K6Pw8fpeMDJDZuxpKSkaJR6k9G5Dhf5q8HDXnC5Rh/PRFuKJ2GGRpX7n+2\nhT/sCax0J8jfdTy/MDGiDfJqfQrOPrMKELtsGHR9Iv6F4vKiDqXpKfqH+02E9ptz\nBk+MNcbZ3m90M8ShfRu28ebebsASfarNMzc3dk7tb3utHOGXKCf4tF8yYKo7x8BZ\noO9X4gSfAgMBAAECggEAU8ByyMpSKlTCF32TJhXnVJi/kS+IhC/Qn5JUDMuk4LXr\naAEWsWO6kV/ZRVXArjmuSzuUVrXumISapM9Ps5Ytbl95CJmGDiLDwRL815nvv6k3\nUyAS8EGKjz74RpoIoH6E7EWCAzxlnUgTn+5oP9Flije97epYk3H+e2f1f5e1Nn1d\nYNe8U+1HqJgILcxA1TAUsARBfoD7+K3z/8DVPHI8IpzAh6kTHqhqC23Rram4XoQ6\nzj/ZdVBjvnKuazETfsD+Vl3jGLQA8cKQVV70xdz3xwLcNeHsbPbpGBpZUoF73c65\nkAXOrjYl0JD5yAk+hmYhXr6H9c6z5AieuZGDrhmlFQKBgQDzV6LRXmjn4854DP/J\nI82oX2GcI4eioDZPRukhiQLzYerMQBmyqZIRC+/LTCAhYQSjNgMa+ZKyvLqv48M0\n/x398op/+n3xTs+8L49SPI48/iV+mnH7k0WI/ycd4OOKh8rrmhl/0EWb9iitwJYe\nMjTV/QxNEpPBEXfR1/mvrN/lVQKBgQDuhomOxUhWVRVH6x03slmyRBn0Oiw4MW+r\nrt1hlNgtVmTc5Mu+4G0USMZwYuOB7F8xG4Foc7rIlwS7Ic83jMJxemtqAelwOLdV\nXRLrLWJfX8+O1z/UE15l2q3SUEnQ4esPHbQnZowHLm0mdL14qSVMl1mu1XfsoZ3z\nJZTQb48CIwKBgEWbzQRtKD8lKDupJEYqSrseRbK/ax43DDITS77/DWwHl33D3FYC\nMblUm8ygwxQpR4VUfwDpYXBlklWcJovzamXpSnsfcYVkkQH47NuOXPXPkXQsw+w+\nDYcJzeu7F/vZqk9I7oBkWHUrrik9zPNoUzrfPvSRGtkAoTDSwibhoc5dAoGBAMHE\nK0T/ANeZQLNuzQps6S7G4eqjwz5W8qeeYxsdZkvWThOgDd/ewt3ijMnJm5X05hOn\ni4XF1euTuvUl7wbqYx76Wv3/1ZojiNNgy7ie4rYlyB/6vlBS97F4ZxJdxMlabbCW\n6b3EMWa4EVVXKoA1sCY7IVDE+yoQ1JYsZmq45YzPAoGBANWWHuVueFGZRDZlkNlK\nh5OmySmA0NdNug3G1upaTthyaTZ+CxGliwBqMHAwpkIRPwxUJpUwBTSEGztGTAxs\nWsUOVWlD2/1JaKSmHE8JbNg6sxLilcG6WEDzxjC5dLL1OrGOXj9WhC9KX3sq6qb6\nF/j9eUXfXjAlb042MphoF3ZC\n-----END PRIVATE KEY-----\n",
|
18 |
+
"client_email": "gspread-connection@model-sheets-connect.iam.gserviceaccount.com",
|
19 |
+
"client_id": "100369174533302798535",
|
20 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
21 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
22 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
23 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40model-sheets-connect.iam.gserviceaccount.com"
|
24 |
+
}
|
25 |
+
uri = "mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster"
|
26 |
+
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
|
27 |
+
db = client["testing_db"]
|
28 |
+
|
29 |
+
NFL_Data = 'https://docs.google.com/spreadsheets/d/1I_1Ve3F4tftgfLQQoRKOJ351XfEG48s36OxXUKxmgS8/edit?gid=186883101#gid=186883101'
|
30 |
|
31 |
+
gc_con = gspread.service_account_from_dict(credentials, scope)
|
32 |
|
33 |
+
return gc_con, db, NFL_Data
|
34 |
+
|
35 |
+
gcservice_account, db, NFL_Data = init_conn()
|
36 |
|
37 |
+
percentages_format = {'Exposure': '{:.2%}'}
|
38 |
+
freq_format = {'Exposure': '{:.2%}', 'Proj Own': '{:.2%}', 'Edge': '{:.2%}'}
|
39 |
+
dk_columns = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']
|
40 |
+
fd_columns = ['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
|
42 |
+
@st.cache_data(ttl = 599)
|
43 |
+
def init_DK_seed_frames():
|
44 |
+
|
45 |
+
collection = db["DK_NFL_SD_seed_frame"]
|
46 |
+
cursor = collection.find()
|
47 |
+
|
48 |
+
raw_display = pd.DataFrame(list(cursor))
|
49 |
+
raw_display = raw_display[['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'FLEX5', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']]
|
50 |
+
DK_seed = raw_display.to_numpy()
|
51 |
|
52 |
+
return DK_seed
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
+
@st.cache_data(ttl = 599)
|
55 |
+
def init_FD_seed_frames():
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
+
collection = db["FD_NFL_SD_seed_frame"]
|
58 |
+
cursor = collection.find()
|
59 |
+
|
60 |
+
raw_display = pd.DataFrame(list(cursor))
|
61 |
+
raw_display = raw_display[['CPT', 'FLEX1', 'FLEX2', 'FLEX3', 'FLEX4', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']]
|
62 |
+
FD_seed = raw_display.to_numpy()
|
|
|
63 |
|
64 |
+
return FD_seed
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
+
@st.cache_data(ttl = 599)
|
67 |
+
def init_baselines():
|
68 |
+
sh = gcservice_account.open_by_url(NFL_Data)
|
69 |
+
|
70 |
worksheet = sh.worksheet('SD_Projections')
|
71 |
load_display = pd.DataFrame(worksheet.get_all_records())
|
72 |
load_display.replace('', np.nan, inplace=True)
|
73 |
+
load_display['STDev'] = load_display['PPR'] / 3
|
74 |
+
load_display = load_display.drop_duplicates(subset=['name'], keep='first')
|
75 |
+
load_display.rename(columns={"name": "Player"}, inplace = True)
|
76 |
+
|
77 |
+
dk_raw = load_display.dropna(subset=['Median'])
|
78 |
|
79 |
worksheet = sh.worksheet('FD_SD_Projections')
|
80 |
load_display = pd.DataFrame(worksheet.get_all_records())
|
81 |
load_display.replace('', np.nan, inplace=True)
|
82 |
+
load_display['STDev'] = load_display['Half_PPR'] / 3
|
83 |
+
load_display = load_display.drop_duplicates(subset=['name'], keep='first')
|
84 |
+
load_display.rename(columns={"name": "Player"}, inplace = True)
|
85 |
+
|
86 |
+
fd_raw = load_display.dropna(subset=['Median'])
|
87 |
+
|
88 |
+
return dk_raw, fd_raw
|
89 |
+
|
90 |
+
@st.cache_data
|
91 |
+
def convert_df(array):
|
92 |
+
array = pd.DataFrame(array, columns=column_names)
|
93 |
+
return array.to_csv().encode('utf-8')
|
94 |
+
|
95 |
+
@st.cache_data
|
96 |
+
def calculate_DK_value_frequencies(np_array):
|
97 |
+
unique, counts = np.unique(np_array[:, :6], return_counts=True)
|
98 |
+
frequencies = counts / len(np_array) # Normalize by the number of rows
|
99 |
+
combined_array = np.column_stack((unique, frequencies))
|
100 |
+
return combined_array
|
101 |
+
|
102 |
+
@st.cache_data
|
103 |
+
def calculate_FD_value_frequencies(np_array):
|
104 |
+
unique, counts = np.unique(np_array[:, :5], return_counts=True)
|
105 |
+
frequencies = counts / len(np_array) # Normalize by the number of rows
|
106 |
+
combined_array = np.column_stack((unique, frequencies))
|
107 |
+
return combined_array
|
108 |
+
|
109 |
+
@st.cache_data
|
110 |
+
def sim_contest(Sim_size, seed_frame, maps_dict, sharp_split, Contest_Size):
|
111 |
SimVar = 1
|
112 |
Sim_Winners = []
|
113 |
+
fp_array = seed_frame[:sharp_split, :]
|
|
|
|
|
|
|
114 |
|
115 |
# Pre-vectorize functions
|
116 |
vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__)
|
117 |
vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__)
|
118 |
|
|
|
|
|
|
|
|
|
119 |
st.write('Simulating contest on frames')
|
120 |
|
121 |
while SimVar <= Sim_size:
|
122 |
+
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)]
|
|
|
|
|
|
|
123 |
|
124 |
sample_arrays1 = np.c_[
|
125 |
fp_random,
|
126 |
np.sum(np.random.normal(
|
127 |
+
loc=vec_projection_map(fp_random[:, :-6]),
|
128 |
+
scale=vec_stdev_map(fp_random[:, :-6])),
|
129 |
axis=1)
|
130 |
]
|
131 |
|
132 |
+
sample_arrays = sample_arrays1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
133 |
|
134 |
+
final_array = sample_arrays[sample_arrays[:, 7].argsort()[::-1]]
|
135 |
best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]]
|
136 |
Sim_Winners.append(best_lineup)
|
137 |
SimVar += 1
|
138 |
|
139 |
return Sim_Winners
|
140 |
|
141 |
+
DK_seed = init_DK_seed_frames()
|
142 |
+
FD_seed = init_FD_seed_frames()
|
143 |
+
dk_raw, fd_raw = init_baselines()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
144 |
|
145 |
+
tab1, tab2 = st.tabs(['Contest Sims', 'Data Export'])
|
146 |
+
with tab2:
|
147 |
+
col1, col2 = st.columns([1, 7])
|
148 |
+
with col1:
|
149 |
+
if st.button("Load/Reset Data", key='reset1'):
|
150 |
+
st.cache_data.clear()
|
151 |
+
for key in st.session_state.keys():
|
152 |
+
del st.session_state[key]
|
153 |
+
DK_seed = init_DK_seed_frames()
|
154 |
+
FD_seed = init_FD_seed_frames()
|
155 |
+
dk_raw, fd_raw = init_baselines()
|
156 |
+
|
157 |
+
slate_var1 = st.radio("Which data are you loading?", ('Showdown', 'Other Showdown'))
|
158 |
+
site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'))
|
159 |
+
if site_var1 == 'Draftkings':
|
160 |
+
raw_baselines = dk_raw
|
161 |
+
column_names = dk_columns
|
|
|
|
|
|
|
162 |
|
163 |
+
team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1')
|
164 |
+
if team_var1 == 'Specific Teams':
|
165 |
+
team_var2 = st.multiselect('Which teams do you want?', options = dk_raw['Team'].unique())
|
166 |
+
elif team_var1 == 'Full Slate':
|
167 |
+
team_var2 = dk_raw.Team.values.tolist()
|
168 |
|
169 |
+
stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1')
|
170 |
+
if stack_var1 == 'Specific Stack Sizes':
|
171 |
+
stack_var2 = st.multiselect('Which stack sizes do you want?', options = [5, 4, 3, 2, 1, 0])
|
172 |
+
elif stack_var1 == 'Full Slate':
|
173 |
+
stack_var2 = [5, 4, 3, 2, 1, 0]
|
174 |
+
|
175 |
+
elif site_var1 == 'Fanduel':
|
176 |
+
raw_baselines = fd_raw
|
177 |
+
column_names = fd_columns
|
178 |
|
179 |
+
team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1')
|
180 |
+
if team_var1 == 'Specific Teams':
|
181 |
+
team_var2 = st.multiselect('Which teams do you want?', options = fd_raw['Team'].unique())
|
182 |
+
elif team_var1 == 'Full Slate':
|
183 |
+
team_var2 = fd_raw.Team.values.tolist()
|
184 |
|
185 |
+
stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1')
|
186 |
+
if stack_var1 == 'Specific Stack Sizes':
|
187 |
+
stack_var2 = st.multiselect('Which stack sizes do you want?', options = [4, 3, 2, 1, 0])
|
188 |
+
elif stack_var1 == 'Full Slate':
|
189 |
+
stack_var2 = [4, 3, 2, 1, 0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
190 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
|
192 |
+
if st.button("Prepare data export", key='data_export'):
|
193 |
+
data_export = st.session_state.working_seed.copy()
|
194 |
+
st.download_button(
|
195 |
+
label="Export optimals set",
|
196 |
+
data=convert_df(data_export),
|
197 |
+
file_name='NFL_SD_optimals_export.csv',
|
198 |
+
mime='text/csv',
|
199 |
+
)
|
200 |
+
|
201 |
+
with col2:
|
202 |
+
if st.button("Load Data", key='load_data'):
|
203 |
+
if site_var1 == 'Draftkings':
|
204 |
+
if 'working_seed' in st.session_state:
|
205 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 9], team_var2)]
|
206 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 10], stack_var2)]
|
207 |
+
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names)
|
208 |
+
elif 'working_seed' not in st.session_state:
|
209 |
+
st.session_state.working_seed = DK_seed.copy()
|
210 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 9], team_var2)]
|
211 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 10], stack_var2)]
|
212 |
+
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names)
|
213 |
+
|
214 |
+
elif site_var1 == 'Fanduel':
|
215 |
+
if 'working_seed' in st.session_state:
|
216 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 8], team_var2)]
|
217 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 9], stack_var2)]
|
218 |
+
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names)
|
219 |
+
elif 'working_seed' not in st.session_state:
|
220 |
+
st.session_state.working_seed = FD_seed.copy()
|
221 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 8], team_var2)]
|
222 |
+
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 9], stack_var2)]
|
223 |
+
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names)
|
224 |
+
|
225 |
+
with st.container():
|
226 |
+
if 'data_export_display' in st.session_state:
|
227 |
+
st.dataframe(st.session_state.data_export_display.style.format(freq_format, precision=2), use_container_width = True)
|
228 |
+
|
229 |
with tab1:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
230 |
col1, col2 = st.columns([1, 7])
|
231 |
with col1:
|
232 |
+
if st.button("Load/Reset Data", key='reset2'):
|
233 |
st.cache_data.clear()
|
234 |
for key in st.session_state.keys():
|
235 |
del st.session_state[key]
|
236 |
+
DK_seed = init_DK_seed_frames()
|
237 |
+
FD_seed = init_FD_seed_frames()
|
238 |
+
dk_raw, fd_raw = init_baselines()
|
239 |
+
sim_slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Other Main Slate'), key='sim_slate_var1')
|
240 |
+
sim_site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'), key='sim_site_var1')
|
241 |
+
if sim_site_var1 == 'Draftkings':
|
242 |
+
raw_baselines = dk_raw
|
243 |
+
column_names = dk_columns
|
244 |
+
elif sim_site_var1 == 'Fanduel':
|
245 |
+
raw_baselines = fd_raw
|
246 |
+
column_names = fd_columns
|
247 |
+
|
248 |
+
contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large', 'Custom'))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
249 |
if contest_var1 == 'Small':
|
250 |
+
Contest_Size = 1000
|
251 |
elif contest_var1 == 'Medium':
|
|
|
|
|
252 |
Contest_Size = 5000
|
253 |
+
elif contest_var1 == 'Large':
|
254 |
+
Contest_Size = 10000
|
255 |
+
elif contest_var1 == 'Custom':
|
256 |
+
Contest_Size = st.number_input("Insert contest size", value=100, placeholder="Type a number under 10,000...")
|
257 |
+
strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Very', 'Above Average', 'Average', 'Below Average', 'Not Very'))
|
258 |
if strength_var1 == 'Not Very':
|
259 |
+
sharp_split = 500000
|
260 |
+
elif strength_var1 == 'Below Average':
|
261 |
+
sharp_split = 400000
|
262 |
elif strength_var1 == 'Average':
|
263 |
+
sharp_split = 300000
|
264 |
+
elif strength_var1 == 'Above Average':
|
265 |
+
sharp_split = 200000
|
266 |
elif strength_var1 == 'Very':
|
267 |
+
sharp_split = 100000
|
268 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
269 |
|
270 |
+
with col2:
|
271 |
+
if st.button("Run Contest Sim"):
|
272 |
+
if 'working_seed' in st.session_state:
|
273 |
+
maps_dict = {
|
274 |
+
'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
|
275 |
+
'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
|
276 |
+
'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
|
277 |
+
'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
|
278 |
+
'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
|
279 |
+
'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
280 |
}
|
281 |
+
Sim_Winners = sim_contest(1000, st.session_state.working_seed, maps_dict, sharp_split, Contest_Size)
|
282 |
+
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
|
283 |
+
|
284 |
+
#st.table(Sim_Winner_Frame)
|
285 |
+
|
286 |
+
# Initial setup
|
287 |
+
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
|
288 |
+
Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
|
289 |
+
Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
|
290 |
+
Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
|
291 |
+
|
292 |
+
# Type Casting
|
293 |
+
type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32}
|
294 |
+
Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
|
295 |
+
|
296 |
+
# Sorting
|
297 |
+
st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
|
298 |
+
st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
|
299 |
+
|
300 |
+
# Data Copying
|
301 |
+
st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
|
302 |
+
|
303 |
+
# Data Copying
|
304 |
+
st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
|
305 |
+
|
306 |
+
else:
|
307 |
+
if sim_site_var1 == 'Draftkings':
|
308 |
+
st.session_state.working_seed = DK_seed.copy()
|
309 |
+
elif sim_site_var1 == 'Fanduel':
|
310 |
+
st.session_state.working_seed = FD_seed.copy()
|
311 |
+
maps_dict = {
|
312 |
+
'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)),
|
313 |
+
'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)),
|
314 |
+
'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)),
|
315 |
+
'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own'])),
|
316 |
+
'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)),
|
317 |
+
'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev))
|
318 |
}
|
319 |
+
Sim_Winners = sim_contest(1000, st.session_state.working_seed, maps_dict, sharp_split, Contest_Size)
|
320 |
+
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners))
|
321 |
+
|
322 |
+
#st.table(Sim_Winner_Frame)
|
323 |
+
|
324 |
+
# Initial setup
|
325 |
+
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy'])
|
326 |
+
Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2
|
327 |
+
Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str)
|
328 |
+
Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts()))
|
329 |
+
|
330 |
+
# Type Casting
|
331 |
+
type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32}
|
332 |
+
Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict)
|
333 |
+
|
334 |
+
# Sorting
|
335 |
+
st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100)
|
336 |
+
st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True)
|
337 |
+
|
338 |
+
# Data Copying
|
339 |
+
st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy()
|
340 |
+
|
341 |
+
# Data Copying
|
342 |
+
st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy()
|
343 |
+
freq_copy = st.session_state.Sim_Winner_Display
|
344 |
+
|
345 |
+
if sim_site_var1 == 'Draftkings':
|
346 |
+
freq_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:7].values, return_counts=True)),
|
|
|
|
|
347 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
348 |
+
elif sim_site_var1 == 'Fanduel':
|
349 |
+
freq_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:6].values, return_counts=True)),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
350 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
351 |
+
freq_working['Freq'] = freq_working['Freq'].astype(int)
|
352 |
+
freq_working['Position'] = freq_working['Player'].map(maps_dict['Pos_map'])
|
353 |
+
freq_working['Salary'] = freq_working['Player'].map(maps_dict['Salary_map'])
|
354 |
+
freq_working['Proj Own'] = freq_working['Player'].map(maps_dict['Own_map']) / 100
|
355 |
+
freq_working['Exposure'] = freq_working['Freq']/(1000)
|
356 |
+
freq_working['Edge'] = freq_working['Exposure'] - freq_working['Proj Own']
|
357 |
+
freq_working['Team'] = freq_working['Player'].map(maps_dict['Team_map'])
|
358 |
+
st.session_state.player_freq = freq_working.copy()
|
359 |
+
|
360 |
+
if sim_site_var1 == 'Draftkings':
|
361 |
+
cpt_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:1].values, return_counts=True)),
|
362 |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
363 |
+
elif sim_site_var1 == 'Fanduel':
|
364 |
+
cpt_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:1].values, return_counts=True)),
|
365 |
+
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
366 |
+
cpt_working['Freq'] = cpt_working['Freq'].astype(int)
|
367 |
+
cpt_working['Position'] = cpt_working['Player'].map(maps_dict['Pos_map'])
|
368 |
+
cpt_working['Salary'] = cpt_working['Player'].map(maps_dict['Salary_map'])
|
369 |
+
cpt_working['Proj Own'] = cpt_working['Player'].map(maps_dict['Own_map']) / 100
|
370 |
+
cpt_working['Exposure'] = cpt_working['Freq']/(1000)
|
371 |
+
cpt_working['Edge'] = cpt_working['Exposure'] - cpt_working['Proj Own']
|
372 |
+
cpt_working['Team'] = cpt_working['Player'].map(maps_dict['Team_map'])
|
373 |
+
st.session_state.sp_freq = cpt_working.copy()
|
374 |
+
|
375 |
+
if sim_site_var1 == 'Draftkings':
|
376 |
+
team_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,9:10].values, return_counts=True)),
|
377 |
+
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
378 |
+
elif sim_site_var1 == 'Fanduel':
|
379 |
+
team_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,8:9].values, return_counts=True)),
|
380 |
+
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
381 |
+
team_working['Freq'] = team_working['Freq'].astype(int)
|
382 |
+
team_working['Exposure'] = team_working['Freq']/(1000)
|
383 |
+
st.session_state.team_freq = team_working.copy()
|
384 |
+
|
385 |
+
if sim_site_var1 == 'Draftkings':
|
386 |
+
stack_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,10:11].values, return_counts=True)),
|
387 |
+
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
388 |
+
elif sim_site_var1 == 'Fanduel':
|
389 |
+
stack_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,9:10].values, return_counts=True)),
|
390 |
+
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True)
|
391 |
+
stack_working['Freq'] = stack_working['Freq'].astype(int)
|
392 |
+
stack_working['Exposure'] = stack_working['Freq']/(1000)
|
393 |
+
st.session_state.stack_freq = stack_working.copy()
|
394 |
+
|
395 |
with st.container():
|
396 |
+
if st.button("Reset Sim", key='reset_sim'):
|
397 |
+
for key in st.session_state.keys():
|
398 |
+
del st.session_state[key]
|
399 |
if 'player_freq' in st.session_state:
|
400 |
player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2')
|
401 |
if player_split_var2 == 'Specific Players':
|
|
|
404 |
find_var2 = st.session_state.player_freq.Player.values.tolist()
|
405 |
|
406 |
if player_split_var2 == 'Specific Players':
|
407 |
+
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)]
|
408 |
if player_split_var2 == 'Full Players':
|
409 |
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame
|
410 |
if 'Sim_Winner_Display' in st.session_state:
|
411 |
+
st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
|
412 |
if 'Sim_Winner_Export' in st.session_state:
|
413 |
+
st.download_button(
|
414 |
+
label="Export Full Frame",
|
415 |
+
data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'),
|
416 |
+
file_name='MLB_consim_export.csv',
|
417 |
+
mime='text/csv',
|
418 |
+
)
|
419 |
+
|
420 |
with st.container():
|
421 |
+
tab1, tab2, tab3, tab4 = st.tabs(['Overall Exposures', 'SP Exposures', 'Team Exposures', 'Stack Size Exposures'])
|
422 |
+
with tab1:
|
423 |
+
if 'player_freq' in st.session_state:
|
424 |
+
|
425 |
+
st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
|
426 |
+
st.download_button(
|
427 |
+
label="Export Exposures",
|
428 |
+
data=st.session_state.player_freq.to_csv().encode('utf-8'),
|
429 |
+
file_name='player_freq_export.csv',
|
430 |
+
mime='text/csv',
|
431 |
+
key='overall'
|
432 |
+
)
|
433 |
+
with tab2:
|
434 |
+
if 'sp_freq' in st.session_state:
|
435 |
+
|
436 |
+
st.dataframe(st.session_state.sp_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True)
|
437 |
+
st.download_button(
|
438 |
+
label="Export Exposures",
|
439 |
+
data=st.session_state.sp_freq.to_csv().encode('utf-8'),
|
440 |
+
file_name='cpt_freq.csv',
|
441 |
+
mime='text/csv',
|
442 |
+
key='sp'
|
443 |
+
)
|
444 |
+
with tab3:
|
445 |
+
if 'team_freq' in st.session_state:
|
446 |
+
|
447 |
+
st.dataframe(st.session_state.team_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(percentages_format, precision=2), use_container_width = True)
|
448 |
+
st.download_button(
|
449 |
+
label="Export Exposures",
|
450 |
+
data=st.session_state.team_freq.to_csv().encode('utf-8'),
|
451 |
+
file_name='team_freq.csv',
|
452 |
+
mime='text/csv',
|
453 |
+
key='team'
|
454 |
+
)
|
455 |
+
with tab4:
|
456 |
+
if 'stack_freq' in st.session_state:
|
457 |
+
|
458 |
+
st.dataframe(st.session_state.stack_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(percentages_format, precision=2), use_container_width = True)
|
459 |
+
st.download_button(
|
460 |
+
label="Export Exposures",
|
461 |
+
data=st.session_state.stack_freq.to_csv().encode('utf-8'),
|
462 |
+
file_name='stack_freq.csv',
|
463 |
+
mime='text/csv',
|
464 |
+
key='stack'
|
465 |
+
)
|