Multichem commited on
Commit
9a13b83
1 Parent(s): cff447e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +242 -189
app.py CHANGED
@@ -47,204 +47,257 @@ def init_baselines():
47
 
48
  adp_table, stacks_table, proj_table = init_baselines()
49
 
50
- # tab1, tab2, tab3 = st.tabs(["ADPs and Ranks", "Team Projections", "Stack Tool", "Player Prop Simulations", "Stat Specific Simulations", "Bet Sheet"])
51
 
52
  def convert_df_to_csv(df):
53
  return df.to_csv().encode('utf-8')
54
 
55
- col1, col2 = st.columns([1, 5])
56
-
57
- with col1:
58
- if st.button("Load/Reset Data", key='reset4'):
59
- st.cache_data.clear()
60
- adp_table, stacks_table, proj_table = init_baselines()
61
- site_var2 = st.radio("What site are you playing?", ('Underdog', 'MFL10'), key='site_var2')
62
- split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('All Teams', 'Specific Teams'), key='split_var2')
63
- if split_var2 == 'Specific Teams':
64
- team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = adp_table['Team'].unique(), key='team_var2')
65
- elif split_var2 == 'All Teams':
66
- team_var2 = adp_table.Team.unique().tolist()
67
- pos_split2 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split2')
68
- if pos_split2 == 'Specific Positions':
69
- pos_var2 = st.multiselect('What Positions would you like to view?', options = ['QB', 'RB', 'WR', 'TE'])
70
- elif pos_split2 == 'All Positions':
71
- pos_var2 = 'All'
72
- if site_var2 == 'Underdog':
73
- adp_dict = dict(zip(adp_table.Player, adp_table.Underdog))
74
- elif site_var2 == 'MFL10':
75
- adp_dict = dict(zip(adp_table.Player, adp_table.MFL10))
76
- size_var2 = st.number_input('What size of stacks are you analyzing?', min_value = 3, max_value = 6, step=1)
77
- stack_size = size_var2
78
-
79
- team_dict = dict(zip(adp_table.Player, adp_table.Team))
80
- proj_dict = dict(zip(adp_table.Player, adp_table.Projection))
81
- diff_dict = dict(zip(adp_table.Player, adp_table.Diff))
82
-
83
- with col2:
84
- stack_hold_container = st.empty()
85
- if st.button('Run stack analysis'):
86
- comb_list = []
87
- if pos_split2 == 'All Positions':
88
- slate_teams = adp_table['Team'].values.tolist()
89
- raw_baselines = adp_table.copy()
90
- elif pos_split2 != 'All Positions':
91
- slate_teams = adp_table['Team'].values.tolist()
92
- raw_baselines = adp_table[adp_table['Position'].str.contains('|'.join(pos_var2))]
93
-
94
- for cur_team in team_var2:
95
- working_baselines = raw_baselines.copy()
96
- working_baselines = working_baselines[working_baselines['Team'] == cur_team]
97
- order_list = working_baselines['Player']
98
-
99
- comb = combinations(order_list, stack_size)
100
-
101
- for i in list(comb):
102
- comb_list.append(i)
103
-
104
- comb_DF = pd.DataFrame(comb_list)
105
-
106
- if stack_size == 3:
107
- comb_DF['Team'] = comb_DF[0].map(team_dict)
108
-
109
- comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(3)]).sum(), axis=1)
110
-
111
- comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
112
- comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
113
- comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
114
-
115
- comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(3)]).mean(), axis=1)
116
-
117
- elif stack_size == 4:
118
- comb_DF['Team'] = comb_DF[0].map(team_dict)
119
-
120
- comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(4)]).sum(), axis=1)
121
-
122
- comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
123
- comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
124
- comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
125
- comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
126
-
127
- comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(4)]).mean(), axis=1)
128
-
129
- elif stack_size == 5:
130
- comb_DF['Team'] = comb_DF[0].map(team_dict)
131
-
132
- comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(5)]).sum(), axis=1)
133
-
134
- comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
135
- comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
136
- comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
137
- comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
138
- comb_DF['ADP_5'] = comb_DF[4].map(adp_dict)
139
-
140
- comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(5)]).mean(), axis=1)
141
 
142
- elif stack_size == 6:
143
- comb_DF['Team'] = comb_DF[0].map(team_dict)
144
-
145
- comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(6)]).sum(), axis=1)
146
-
147
- comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
148
- comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
149
- comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
150
- comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
151
- comb_DF['ADP_5'] = comb_DF[4].map(adp_dict)
152
- comb_DF['ADP_6'] = comb_DF[5].map(adp_dict)
153
 
154
- comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(6)]).mean(), axis=1)
155
-
156
- comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
157
-
158
- cut_var = 0
159
-
160
- if stack_size == 3:
161
- while cut_var <= int(len(comb_DF)):
162
- try:
163
- if int(cut_var) == 0:
164
- cur_proj = float(comb_DF.iat[cut_var,4])
165
- cur_own = 0
166
- elif int(cut_var) >= 1:
167
- check_own = float(comb_DF.iat[cut_var,8])
168
- if check_own < cur_own:
169
- comb_DF = comb_DF.drop([cut_var])
170
- cur_own = cur_own
171
- cut_var = cut_var - 1
172
- comb_DF = comb_DF.reset_index()
173
- comb_DF = comb_DF.drop(['index'], axis=1)
174
- elif check_own >= cur_own:
175
- cur_own = float(comb_DF.iat[cut_var,8])
176
- cut_var = cut_var
177
- cut_var += 1
178
- except:
179
- cut_var += 1
180
-
181
- elif stack_size == 4:
182
- while cut_var <= int(len(comb_DF)):
183
- try:
184
- if int(cut_var) == 0:
185
- cur_proj = float(comb_DF.iat[cut_var,5])
186
- cur_own = 0
187
- elif int(cut_var) >= 1:
188
- check_own = float(comb_DF.iat[cut_var,10])
189
- if check_own < cur_own:
190
- comb_DF = comb_DF.drop([cut_var])
191
- cur_own = cur_own
192
- cut_var = cut_var - 1
193
- comb_DF = comb_DF.reset_index()
194
- comb_DF = comb_DF.drop(['index'], axis=1)
195
- elif check_own >= cur_own:
196
- cur_own = float(comb_DF.iat[cut_var,10])
197
- cut_var = cut_var
198
- cut_var += 1
199
- except:
200
- cut_var += 1
201
- elif stack_size == 5:
202
- while cut_var <= int(len(comb_DF)):
203
- try:
204
- if int(cut_var) == 0:
205
- cur_proj = float(comb_DF.iat[cut_var,6])
206
- cur_own = 0
207
- elif int(cut_var) >= 1:
208
- check_own = float(comb_DF.iat[cut_var,12])
209
- if check_own < cur_own:
210
- comb_DF = comb_DF.drop([cut_var])
211
- cur_own = cur_own
212
- cut_var = cut_var - 1
213
- comb_DF = comb_DF.reset_index()
214
- comb_DF = comb_DF.drop(['index'], axis=1)
215
- elif check_own >= cur_own:
216
- cur_own = float(comb_DF.iat[cut_var,12])
217
- cut_var = cut_var
218
- cut_var += 1
219
- except:
220
- cut_var += 1
221
- elif stack_size == 6:
222
- while cut_var <= int(len(comb_DF)):
223
- try:
224
- if int(cut_var) == 0:
225
- cur_proj = float(comb_DF.iat[cut_var,7])
226
- cur_own = 0
227
- elif int(cut_var) >= 1:
228
- check_own = float(comb_DF.iat[cut_var,14])
229
- if check_own < cur_own:
230
- comb_DF = comb_DF.drop([cut_var])
231
- cur_own = cur_own
232
- cut_var = cut_var - 1
233
- comb_DF = comb_DF.reset_index()
234
- comb_DF = comb_DF.drop(['index'], axis=1)
235
- elif check_own >= cur_own:
236
- cur_own = float(comb_DF.iat[cut_var,14])
237
- cut_var = cut_var
238
- cut_var += 1
239
- except:
240
- cut_var += 1
241
-
242
  with stack_hold_container:
243
  stack_hold_container = st.empty()
244
- st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
245
  st.download_button(
246
  label="Export Tables",
247
- data=convert_df_to_csv(comb_DF),
248
  file_name='NFL_Stack_Options_export.csv',
249
  mime='text/csv',
250
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  adp_table, stacks_table, proj_table = init_baselines()
49
 
50
+ tab1, tab2, tab3, tab4 = st.tabs(["ADPs and Ranks", "Team Projections", 'Player Projections', "Stack Finder"])
51
 
52
  def convert_df_to_csv(df):
53
  return df.to_csv().encode('utf-8')
54
 
55
+ with tab1:
56
+ col1, col2 = st.columns([1, 5])
57
+
58
+ with col1:
59
+ if st.button("Load/Reset Data", key='reset1'):
60
+ st.cache_data.clear()
61
+ adp_table, stacks_table, proj_table = init_baselines()
62
+ site_var1 = st.radio("What site are you playing?", ('Underdog', 'MFL10'), key='site_var1')
63
+ split_var1 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('All Teams', 'Specific Teams'), key='split_var1')
64
+ if split_var1 == 'Specific Teams':
65
+ team_var1 = st.multiselect('Which teams would you like to include in the analysis?', options = adp_table['Team'].unique(), key='team_var1')
66
+ elif split_var1 == 'All Teams':
67
+ team_var1 = adp_table.Team.unique().tolist()
68
+ pos_split1 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split1')
69
+ if pos_split1 == 'Specific Positions':
70
+ pos_var1 = st.multiselect('What Positions would you like to view?', options = ['QB', 'RB', 'WR', 'TE'])
71
+ elif pos_split1 == 'All Positions':
72
+ pos_var1 = adp_table.Position.unique().tolist()
73
+
74
+ with col2:
75
+ stack_hold_container = st.empty()
76
+ working_baselines = adp_table.copy()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
+ if pos_split1 == 'All Positions':
79
+ raw_baselines = working_baselines
80
+ elif pos_split1 != 'All Positions':
81
+ raw_baselines = working_baselines[working_baselines['Position'].str.contains('|'.join(pos_var1))]
82
+
83
+ if split_var1 == 'All Teams':
84
+ raw_baselines = raw_baselines
85
+ elif split_var1 != 'All Teams':
86
+ raw_baselines = raw_baselines[raw_baselines['Team'].str.contains('|'.join(team_var1))]
 
 
87
 
88
+ display_frame = raw_baselines.copy()
89
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
90
  with stack_hold_container:
91
  stack_hold_container = st.empty()
92
+ st.dataframe(display_frame.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
93
  st.download_button(
94
  label="Export Tables",
95
+ data=convert_df_to_csv(display_frame),
96
  file_name='NFL_Stack_Options_export.csv',
97
  mime='text/csv',
98
+ )
99
+ with tab2:
100
+ st.write('working on it')
101
+ with tab3:
102
+ st.write('working on it')
103
+ with tab4:
104
+ col1, col2 = st.columns([1, 5])
105
+
106
+ with col1:
107
+ if st.button("Load/Reset Data", key='reset4'):
108
+ st.cache_data.clear()
109
+ adp_table, stacks_table, proj_table = init_baselines()
110
+ site_var4 = st.radio("What site are you playing?", ('Underdog', 'MFL10'), key='site_var2')
111
+ split_var4 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('All Teams', 'Specific Teams'), key='split_var4')
112
+ if split_var4 == 'Specific Teams':
113
+ team_var4 = st.multiselect('Which teams would you like to include in the analysis?', options = adp_table['Team'].unique(), key='team_var4')
114
+ elif split_var4 == 'All Teams':
115
+ team_var4 = adp_table.Team.unique().tolist()
116
+ pos_split4 = st.radio("Are you viewing all positions, specific groups, or specific positions?", ('All Positions', 'Specific Positions'), key='pos_split4')
117
+ if pos_split4 == 'Specific Positions':
118
+ pos_var4 = st.multiselect('What Positions would you like to view?', options = ['QB', 'RB', 'WR', 'TE'], key='pos_var4')
119
+ elif pos_split4 == 'All Positions':
120
+ pos_var4 = adp_table.Position.unique().tolist()
121
+ if site_var4 == 'Underdog':
122
+ adp_dict = dict(zip(adp_table.Player, adp_table.Underdog))
123
+ elif site_var4 == 'MFL10':
124
+ adp_dict = dict(zip(adp_table.Player, adp_table.MFL10))
125
+ size_var4 = st.number_input('What size of stacks are you analyzing?', min_value = 3, max_value = 6, step=1)
126
+ stack_size = size_var4
127
+ cut_var4 = st.radio("Do you want to remove stacks with a negative average value?", ('Yes', 'No'), key='cut_var4')
128
+ if cut_var4 == "Yes":
129
+ cut_sequence = 1
130
+ elif cut_var4 == "No":
131
+ cut_sequence = 0
132
+
133
+ team_dict = dict(zip(adp_table.Player, adp_table.Team))
134
+ proj_dict = dict(zip(adp_table.Player, adp_table.Projection))
135
+ diff_dict = dict(zip(adp_table.Player, adp_table.Diff))
136
+
137
+ with col2:
138
+ stack_hold_container = st.empty()
139
+ if st.button('Run stack analysis'):
140
+ comb_list = []
141
+ if pos_split4 == 'All Positions':
142
+ raw_baselines = adp_table.copy()
143
+ elif pos_split4 != 'All Positions':
144
+ raw_baselines = adp_table[adp_table['Position'].str.contains('|'.join(pos_var4))]
145
+
146
+ for cur_team in team_var4:
147
+ working_baselines = raw_baselines.copy()
148
+ working_baselines = working_baselines[working_baselines['Team'] == cur_team]
149
+ order_list = working_baselines['Player']
150
+
151
+ comb = combinations(order_list, stack_size)
152
+
153
+ for i in list(comb):
154
+ comb_list.append(i)
155
+
156
+ comb_DF = pd.DataFrame(comb_list)
157
+
158
+ if stack_size == 3:
159
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
160
+
161
+ comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(3)]).sum(), axis=1)
162
+
163
+ comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
164
+ comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
165
+ comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
166
+
167
+ comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(3)]).mean(), axis=1)
168
+
169
+ elif stack_size == 4:
170
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
171
+
172
+ comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(4)]).sum(), axis=1)
173
+
174
+ comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
175
+ comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
176
+ comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
177
+ comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
178
+
179
+ comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(4)]).mean(), axis=1)
180
+
181
+ elif stack_size == 5:
182
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
183
+
184
+ comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(5)]).sum(), axis=1)
185
+
186
+ comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
187
+ comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
188
+ comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
189
+ comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
190
+ comb_DF['ADP_5'] = comb_DF[4].map(adp_dict)
191
+
192
+ comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(5)]).mean(), axis=1)
193
+
194
+ elif stack_size == 6:
195
+ comb_DF['Team'] = comb_DF[0].map(team_dict)
196
+
197
+ comb_DF['Proj'] = comb_DF.apply(lambda row: pd.Series([proj_dict.get(row[i], None) for i in range(6)]).sum(), axis=1)
198
+
199
+ comb_DF['ADP_1'] = comb_DF[0].map(adp_dict)
200
+ comb_DF['ADP_2'] = comb_DF[1].map(adp_dict)
201
+ comb_DF['ADP_3'] = comb_DF[2].map(adp_dict)
202
+ comb_DF['ADP_4'] = comb_DF[3].map(adp_dict)
203
+ comb_DF['ADP_5'] = comb_DF[4].map(adp_dict)
204
+ comb_DF['ADP_6'] = comb_DF[5].map(adp_dict)
205
+
206
+ comb_DF['Value'] = comb_DF.apply(lambda row: pd.Series([diff_dict.get(row[i], None) for i in range(6)]).mean(), axis=1)
207
+
208
+ comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
209
+
210
+ if cut_sequence == 1:
211
+ cut_var = 0
212
+
213
+ if stack_size == 3:
214
+ while cut_var <= int(len(comb_DF)):
215
+ try:
216
+ if int(cut_var) == 0:
217
+ cur_proj = float(comb_DF.iat[cut_var,4])
218
+ cur_own = 0
219
+ elif int(cut_var) >= 1:
220
+ check_own = float(comb_DF.iat[cut_var,8])
221
+ if check_own < cur_own:
222
+ comb_DF = comb_DF.drop([cut_var])
223
+ cur_own = cur_own
224
+ cut_var = cut_var - 1
225
+ comb_DF = comb_DF.reset_index()
226
+ comb_DF = comb_DF.drop(['index'], axis=1)
227
+ elif check_own >= cur_own:
228
+ cur_own = float(comb_DF.iat[cut_var,8])
229
+ cut_var = cut_var
230
+ cut_var += 1
231
+ except:
232
+ cut_var += 1
233
+
234
+ elif stack_size == 4:
235
+ while cut_var <= int(len(comb_DF)):
236
+ try:
237
+ if int(cut_var) == 0:
238
+ cur_proj = float(comb_DF.iat[cut_var,5])
239
+ cur_own = 0
240
+ elif int(cut_var) >= 1:
241
+ check_own = float(comb_DF.iat[cut_var,10])
242
+ if check_own < cur_own:
243
+ comb_DF = comb_DF.drop([cut_var])
244
+ cur_own = cur_own
245
+ cut_var = cut_var - 1
246
+ comb_DF = comb_DF.reset_index()
247
+ comb_DF = comb_DF.drop(['index'], axis=1)
248
+ elif check_own >= cur_own:
249
+ cur_own = float(comb_DF.iat[cut_var,10])
250
+ cut_var = cut_var
251
+ cut_var += 1
252
+ except:
253
+ cut_var += 1
254
+ elif stack_size == 5:
255
+ while cut_var <= int(len(comb_DF)):
256
+ try:
257
+ if int(cut_var) == 0:
258
+ cur_proj = float(comb_DF.iat[cut_var,6])
259
+ cur_own = 0
260
+ elif int(cut_var) >= 1:
261
+ check_own = float(comb_DF.iat[cut_var,12])
262
+ if check_own < cur_own:
263
+ comb_DF = comb_DF.drop([cut_var])
264
+ cur_own = cur_own
265
+ cut_var = cut_var - 1
266
+ comb_DF = comb_DF.reset_index()
267
+ comb_DF = comb_DF.drop(['index'], axis=1)
268
+ elif check_own >= cur_own:
269
+ cur_own = float(comb_DF.iat[cut_var,12])
270
+ cut_var = cut_var
271
+ cut_var += 1
272
+ except:
273
+ cut_var += 1
274
+ elif stack_size == 6:
275
+ while cut_var <= int(len(comb_DF)):
276
+ try:
277
+ if int(cut_var) == 0:
278
+ cur_proj = float(comb_DF.iat[cut_var,7])
279
+ cur_own = 0
280
+ elif int(cut_var) >= 1:
281
+ check_own = float(comb_DF.iat[cut_var,14])
282
+ if check_own < cur_own:
283
+ comb_DF = comb_DF.drop([cut_var])
284
+ cur_own = cur_own
285
+ cut_var = cut_var - 1
286
+ comb_DF = comb_DF.reset_index()
287
+ comb_DF = comb_DF.drop(['index'], axis=1)
288
+ elif check_own >= cur_own:
289
+ cur_own = float(comb_DF.iat[cut_var,14])
290
+ cut_var = cut_var
291
+ cut_var += 1
292
+ except:
293
+ cut_var += 1
294
+
295
+ with stack_hold_container:
296
+ stack_hold_container = st.empty()
297
+ st.dataframe(comb_DF.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)
298
+ st.download_button(
299
+ label="Export Tables",
300
+ data=convert_df_to_csv(comb_DF),
301
+ file_name='NFL_Stack_Options_export.csv',
302
+ mime='text/csv',
303
+ )