Multichem commited on
Commit
8122a29
1 Parent(s): 2e39e3e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -36
app.py CHANGED
@@ -176,56 +176,62 @@ with tab7:
176
 
177
  st.write("Initiated")
178
 
179
- sh = gc.open_by_url(NCAAF_model_url)
180
  worksheet = sh.worksheet('ATLranks')
181
  ranks_df = DataFrame(worksheet.get_all_records())
182
  ranks_dict = dict(zip(ranks_df.Team, ranks_df.ATL))
183
  conf_dict = dict(zip(ranks_df.Team, ranks_df.Conference))
184
-
185
  time.sleep(.5)
186
-
 
 
 
 
 
 
187
  worksheet = sh.worksheet('HFA')
188
  hfa_df = DataFrame(worksheet.get_all_records())
189
  hfa_dict = dict(zip(hfa_df.Team, hfa_df.HFA))
190
-
191
  time.sleep(.5)
192
-
193
  worksheet = sh.worksheet('Odds')
194
  odds_df = DataFrame(worksheet.get_all_records())
195
  odds_dict = dict(zip(odds_df.Point_Spread, odds_df.Favorite_Win_Chance))
196
-
197
  time.sleep(.5)
198
-
199
  worksheet = sh.worksheet('Acronyms')
200
  acros_df = DataFrame(worksheet.get_all_records())
201
  right_acro = acros_df['Team'].tolist()
202
  wrong_acro = acros_df['Acro'].tolist()
203
-
204
  time.sleep(.5)
205
-
206
  worksheet = sh.worksheet('Add games')
207
  add_games_df = DataFrame(worksheet.get_all_records())
208
  add_games_df.replace('', np.nan, inplace=True)
209
  neutral_dict = dict(zip(add_games_df.game_id, add_games_df.Neutral))
210
-
211
  time.sleep(.5)
212
-
213
  worksheet = sh.worksheet('Completed games')
214
  comp_games_df = DataFrame(worksheet.get_all_records())
215
  comp_games_df.replace('', np.nan, inplace=True)
216
-
217
  time.sleep(.5)
218
-
219
  worksheet = sh.worksheet('LY_scoring')
220
  lyscore_df = DataFrame(worksheet.get_all_records())
221
  for checkVar in range(len(wrong_acro)):
222
  lyscore_df['Team'] = lyscore_df['Team'].replace(wrong_acro, right_acro)
223
-
224
  PFA_dict = dict(zip(lyscore_df.Team, lyscore_df.PF_G_adj))
225
  PAA_dict = dict(zip(lyscore_df.Team, lyscore_df.PA_G_adj))
226
 
227
  # Send a GET request to the API
228
- response = requests.get(pff_url)
229
 
230
  st.write("retreiving PFF data")
231
 
@@ -240,7 +246,7 @@ with tab7:
240
  # Initialize an empty list to store game data
241
  games_list = []
242
  team_list = []
243
-
244
  # Iterate over each week and its games
245
  for week in weeks:
246
  week_number = week.get('week')
@@ -280,12 +286,12 @@ with tab7:
280
  games_list.append(merged_data)
281
  team_list.append(home_data)
282
  team_list.append(away_data)
283
-
284
  # Create a DataFrame from the games list
285
  df = pd.DataFrame(games_list)
286
  team_df = pd.DataFrame(team_list)
287
  team_df = team_df.drop_duplicates(subset=['team', 'conf'])
288
-
289
  # Display the DataFrame
290
  print(df)
291
  else:
@@ -311,19 +317,19 @@ with tab7:
311
  df_cleaned = pd.concat([comp_games_merge, df_merge_1])
312
  df_cleaned = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
313
  df_cleaned = df_cleaned.drop_duplicates(subset=['game_id'])
314
-
315
  def cond_away_PFA(row, df):
316
  mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
317
  return df.loc[mask, 'Away_PFA'].mean()
318
-
319
  def cond_home_PFA(row, df):
320
  mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
321
  return df.loc[mask, 'Home_PFA'].mean()
322
-
323
  def cond_away_PAA(row, df):
324
  mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
325
  return df.loc[mask, 'Away_PAA'].mean()
326
-
327
  def cond_home_PAA(row, df):
328
  mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
329
  return df.loc[mask, 'Home_PAA'].mean()
@@ -337,27 +343,32 @@ with tab7:
337
  df_cleaned['conf_game'] = df_cleaned.apply(lambda row: conf_adj.get(row['game_id'], row['conf_game_var']), axis=1)
338
  df_cleaned['Away_ATL'] = df_cleaned['Away'].map(ranks_dict)
339
  df_cleaned['Home_ATL'] = df_cleaned['Home'].map(ranks_dict)
 
 
 
 
 
340
  df_cleaned['Away_PFA'] = df_cleaned['Away'].map(PFA_dict)
341
  df_cleaned['Home_PFA'] = df_cleaned['Home'].map(PFA_dict)
342
  df_cleaned['Away_PAA'] = df_cleaned['Away'].map(PAA_dict)
343
  df_cleaned['Home_PAA'] = df_cleaned['Home'].map(PAA_dict)
344
-
345
  # Apply the function to each row in the DataFrame
346
  df_cleaned['cond_away_PFA'] = df_cleaned.apply(lambda row: cond_away_PFA(row, df_cleaned), axis=1)
347
  df_cleaned['cond_home_PFA'] = df_cleaned.apply(lambda row: cond_home_PFA(row, df_cleaned), axis=1)
348
  df_cleaned['cond_away_PAA'] = df_cleaned.apply(lambda row: cond_away_PAA(row, df_cleaned), axis=1)
349
  df_cleaned['cond_home_PAA'] = df_cleaned.apply(lambda row: cond_home_PAA(row, df_cleaned), axis=1)
350
-
351
  df_cleaned['cond_away_PFA'] = np.where((df_cleaned['Away_ATL'] <= 0), 18, df_cleaned['cond_away_PFA'])
352
  df_cleaned['cond_away_PAA'] = np.where((df_cleaned['Away_ATL'] <= 0), 36, df_cleaned['cond_away_PAA'])
353
  df_cleaned['cond_home_PFA'] = np.where((df_cleaned['Home_ATL'] <= 0), 18, df_cleaned['cond_home_PFA'])
354
  df_cleaned['cond_home_PAA'] = np.where((df_cleaned['Home_ATL'] <= 0), 36, df_cleaned['cond_home_PAA'])
355
-
356
  df_cleaned['Away_PFA'] = df_cleaned['Away_PFA'].fillna(df_cleaned['cond_away_PFA'])
357
  df_cleaned['Away_PAA'] = df_cleaned['Away_PAA'].fillna(df_cleaned['cond_away_PAA'])
358
  df_cleaned['Home_PFA'] = df_cleaned['Home_PFA'].fillna(df_cleaned['cond_home_PFA'])
359
  df_cleaned['Home_PAA'] = df_cleaned['Home_PAA'].fillna(df_cleaned['cond_home_PAA'])
360
-
361
  df_cleaned['Away_PFA_adj'] = (df_cleaned['Away_PFA'] * .75 + df_cleaned['Home_PAA'] * .25)
362
  df_cleaned['Home_PFA_adj'] = (df_cleaned['Home_PFA'] * .75 + df_cleaned['Away_PAA'] * .25)
363
  df_cleaned['Away_PFA_cond'] = (df_cleaned['cond_away_PFA'] * .75 + df_cleaned['cond_home_PAA'] * .25)
@@ -366,7 +377,7 @@ with tab7:
366
  df_cleaned['Neutral'] = df_cleaned['game_id'].map(neutral_dict)
367
  df_cleaned['HFA'] = np.where(df_cleaned['Neutral'] == 1, 0, df_cleaned['Home'].map(hfa_dict))
368
  df_cleaned['Neutral'] = np.nan
369
- df_cleaned['Home Spread'] = ((df_cleaned['Home_ATL'] - df_cleaned['Away_ATL']) + df_cleaned['HFA']) * -1
370
  df_cleaned['Win Prob'] = df_cleaned['Home Spread'].map(odds_dict)
371
  df_cleaned['Spread Adj'] = np.nan
372
  df_cleaned['Final Spread'] = np.nan
@@ -376,32 +387,29 @@ with tab7:
376
  df_cleaned['Total Adj'] = np.nan
377
  df_cleaned['Final Total'] = np.nan
378
  df_cleaned['Notes'] = np.nan
379
-
380
  export_df_1 = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread',
381
  'over_under', 'Proj Total (adj)', 'Day', 'CST', 'Neutral', 'Notes']]
382
-
383
-
384
  export_df_1.rename(columns={"pff_week": "week", "point_spread": "Vegas Spread", "over_under": "Vegas Total", "Proj Total (adj)": "Proj Total"}, inplace = True)
385
  export_df_2 = add_games_df
386
  export_df = export_df_1
387
  export_df['week'] = pd.to_numeric(export_df['week'], errors='coerce')
388
  export_df = export_df.drop_duplicates(subset=['week', 'Away', 'Home'])
389
  export_df = export_df.sort_values(by='week', ascending=True)
390
-
391
  export_df['Vegas Spread'] = pd.to_numeric(export_df['Vegas Spread'], errors='coerce')
392
  export_df['Vegas Total'] = pd.to_numeric(export_df['Vegas Total'], errors='coerce')
393
  export_df['Proj Total'] = pd.to_numeric(export_df['Proj Total'], errors='coerce')
394
  export_df['Home Spread'] = pd.to_numeric(export_df['Home Spread'], errors='coerce')
395
  export_df.replace([np.nan, np.inf, -np.inf], '', inplace=True)
396
  export_df = export_df.drop_duplicates(subset=['week', 'away_id', 'home_id'])
397
-
398
- sh = gc.open_by_url(NCAAF_model_url)
399
  worksheet = sh.worksheet('Master_sched')
400
  worksheet.batch_clear(['A:P'])
401
  worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist())
402
-
403
-
404
  st.write("Uploaded Master Schedule")
405
 
406
-
407
  st.write("Finished NCAAF Script!")
 
176
 
177
  st.write("Initiated")
178
 
179
+ sh = gc.open_by_url(sheet_url)
180
  worksheet = sh.worksheet('ATLranks')
181
  ranks_df = DataFrame(worksheet.get_all_records())
182
  ranks_dict = dict(zip(ranks_df.Team, ranks_df.ATL))
183
  conf_dict = dict(zip(ranks_df.Team, ranks_df.Conference))
184
+
185
  time.sleep(.5)
186
+
187
+ worksheet = sh.worksheet('Injuries')
188
+ injuries_df = DataFrame(worksheet.get_all_records())
189
+ injuries_dict = dict(zip(injuries_df.Team, injuries_df.Team_Modifier))
190
+
191
+ time.sleep(.5)
192
+
193
  worksheet = sh.worksheet('HFA')
194
  hfa_df = DataFrame(worksheet.get_all_records())
195
  hfa_dict = dict(zip(hfa_df.Team, hfa_df.HFA))
196
+
197
  time.sleep(.5)
198
+
199
  worksheet = sh.worksheet('Odds')
200
  odds_df = DataFrame(worksheet.get_all_records())
201
  odds_dict = dict(zip(odds_df.Point_Spread, odds_df.Favorite_Win_Chance))
202
+
203
  time.sleep(.5)
204
+
205
  worksheet = sh.worksheet('Acronyms')
206
  acros_df = DataFrame(worksheet.get_all_records())
207
  right_acro = acros_df['Team'].tolist()
208
  wrong_acro = acros_df['Acro'].tolist()
209
+
210
  time.sleep(.5)
211
+
212
  worksheet = sh.worksheet('Add games')
213
  add_games_df = DataFrame(worksheet.get_all_records())
214
  add_games_df.replace('', np.nan, inplace=True)
215
  neutral_dict = dict(zip(add_games_df.game_id, add_games_df.Neutral))
216
+
217
  time.sleep(.5)
218
+
219
  worksheet = sh.worksheet('Completed games')
220
  comp_games_df = DataFrame(worksheet.get_all_records())
221
  comp_games_df.replace('', np.nan, inplace=True)
222
+
223
  time.sleep(.5)
224
+
225
  worksheet = sh.worksheet('LY_scoring')
226
  lyscore_df = DataFrame(worksheet.get_all_records())
227
  for checkVar in range(len(wrong_acro)):
228
  lyscore_df['Team'] = lyscore_df['Team'].replace(wrong_acro, right_acro)
229
+
230
  PFA_dict = dict(zip(lyscore_df.Team, lyscore_df.PF_G_adj))
231
  PAA_dict = dict(zip(lyscore_df.Team, lyscore_df.PA_G_adj))
232
 
233
  # Send a GET request to the API
234
+ response = requests.get(url)
235
 
236
  st.write("retreiving PFF data")
237
 
 
246
  # Initialize an empty list to store game data
247
  games_list = []
248
  team_list = []
249
+
250
  # Iterate over each week and its games
251
  for week in weeks:
252
  week_number = week.get('week')
 
286
  games_list.append(merged_data)
287
  team_list.append(home_data)
288
  team_list.append(away_data)
289
+
290
  # Create a DataFrame from the games list
291
  df = pd.DataFrame(games_list)
292
  team_df = pd.DataFrame(team_list)
293
  team_df = team_df.drop_duplicates(subset=['team', 'conf'])
294
+
295
  # Display the DataFrame
296
  print(df)
297
  else:
 
317
  df_cleaned = pd.concat([comp_games_merge, df_merge_1])
318
  df_cleaned = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']]
319
  df_cleaned = df_cleaned.drop_duplicates(subset=['game_id'])
320
+
321
  def cond_away_PFA(row, df):
322
  mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
323
  return df.loc[mask, 'Away_PFA'].mean()
324
+
325
  def cond_home_PFA(row, df):
326
  mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
327
  return df.loc[mask, 'Home_PFA'].mean()
328
+
329
  def cond_away_PAA(row, df):
330
  mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5)
331
  return df.loc[mask, 'Away_PAA'].mean()
332
+
333
  def cond_home_PAA(row, df):
334
  mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5)
335
  return df.loc[mask, 'Home_PAA'].mean()
 
343
  df_cleaned['conf_game'] = df_cleaned.apply(lambda row: conf_adj.get(row['game_id'], row['conf_game_var']), axis=1)
344
  df_cleaned['Away_ATL'] = df_cleaned['Away'].map(ranks_dict)
345
  df_cleaned['Home_ATL'] = df_cleaned['Home'].map(ranks_dict)
346
+ df_cleaned['Away_inj'] = df_cleaned['Away'].map(injuries_dict)
347
+ df_cleaned['Home_inj'] = df_cleaned['Home'].map(injuries_dict)
348
+ df_cleaned['Away_inj'] = df_cleaned['Away_inj'].replace(['', np.nan], 0)
349
+ df_cleaned['Home_inj'] = df_cleaned['Home_inj'].replace(['', np.nan], 0)
350
+ df_cleaned['inj_mod'] = df_cleaned['Away_inj'] - df_cleaned['Home_inj']
351
  df_cleaned['Away_PFA'] = df_cleaned['Away'].map(PFA_dict)
352
  df_cleaned['Home_PFA'] = df_cleaned['Home'].map(PFA_dict)
353
  df_cleaned['Away_PAA'] = df_cleaned['Away'].map(PAA_dict)
354
  df_cleaned['Home_PAA'] = df_cleaned['Home'].map(PAA_dict)
355
+
356
  # Apply the function to each row in the DataFrame
357
  df_cleaned['cond_away_PFA'] = df_cleaned.apply(lambda row: cond_away_PFA(row, df_cleaned), axis=1)
358
  df_cleaned['cond_home_PFA'] = df_cleaned.apply(lambda row: cond_home_PFA(row, df_cleaned), axis=1)
359
  df_cleaned['cond_away_PAA'] = df_cleaned.apply(lambda row: cond_away_PAA(row, df_cleaned), axis=1)
360
  df_cleaned['cond_home_PAA'] = df_cleaned.apply(lambda row: cond_home_PAA(row, df_cleaned), axis=1)
361
+
362
  df_cleaned['cond_away_PFA'] = np.where((df_cleaned['Away_ATL'] <= 0), 18, df_cleaned['cond_away_PFA'])
363
  df_cleaned['cond_away_PAA'] = np.where((df_cleaned['Away_ATL'] <= 0), 36, df_cleaned['cond_away_PAA'])
364
  df_cleaned['cond_home_PFA'] = np.where((df_cleaned['Home_ATL'] <= 0), 18, df_cleaned['cond_home_PFA'])
365
  df_cleaned['cond_home_PAA'] = np.where((df_cleaned['Home_ATL'] <= 0), 36, df_cleaned['cond_home_PAA'])
366
+
367
  df_cleaned['Away_PFA'] = df_cleaned['Away_PFA'].fillna(df_cleaned['cond_away_PFA'])
368
  df_cleaned['Away_PAA'] = df_cleaned['Away_PAA'].fillna(df_cleaned['cond_away_PAA'])
369
  df_cleaned['Home_PFA'] = df_cleaned['Home_PFA'].fillna(df_cleaned['cond_home_PFA'])
370
  df_cleaned['Home_PAA'] = df_cleaned['Home_PAA'].fillna(df_cleaned['cond_home_PAA'])
371
+
372
  df_cleaned['Away_PFA_adj'] = (df_cleaned['Away_PFA'] * .75 + df_cleaned['Home_PAA'] * .25)
373
  df_cleaned['Home_PFA_adj'] = (df_cleaned['Home_PFA'] * .75 + df_cleaned['Away_PAA'] * .25)
374
  df_cleaned['Away_PFA_cond'] = (df_cleaned['cond_away_PFA'] * .75 + df_cleaned['cond_home_PAA'] * .25)
 
377
  df_cleaned['Neutral'] = df_cleaned['game_id'].map(neutral_dict)
378
  df_cleaned['HFA'] = np.where(df_cleaned['Neutral'] == 1, 0, df_cleaned['Home'].map(hfa_dict))
379
  df_cleaned['Neutral'] = np.nan
380
+ df_cleaned['Home Spread'] = (((df_cleaned['Home_ATL'] - df_cleaned['Away_ATL']) + df_cleaned['HFA']) * -1) + df_cleaned['inj_mod']
381
  df_cleaned['Win Prob'] = df_cleaned['Home Spread'].map(odds_dict)
382
  df_cleaned['Spread Adj'] = np.nan
383
  df_cleaned['Final Spread'] = np.nan
 
387
  df_cleaned['Total Adj'] = np.nan
388
  df_cleaned['Final Total'] = np.nan
389
  df_cleaned['Notes'] = np.nan
390
+
391
  export_df_1 = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread',
392
  'over_under', 'Proj Total (adj)', 'Day', 'CST', 'Neutral', 'Notes']]
393
+
394
+
395
  export_df_1.rename(columns={"pff_week": "week", "point_spread": "Vegas Spread", "over_under": "Vegas Total", "Proj Total (adj)": "Proj Total"}, inplace = True)
396
  export_df_2 = add_games_df
397
  export_df = export_df_1
398
  export_df['week'] = pd.to_numeric(export_df['week'], errors='coerce')
399
  export_df = export_df.drop_duplicates(subset=['week', 'Away', 'Home'])
400
  export_df = export_df.sort_values(by='week', ascending=True)
401
+
402
  export_df['Vegas Spread'] = pd.to_numeric(export_df['Vegas Spread'], errors='coerce')
403
  export_df['Vegas Total'] = pd.to_numeric(export_df['Vegas Total'], errors='coerce')
404
  export_df['Proj Total'] = pd.to_numeric(export_df['Proj Total'], errors='coerce')
405
  export_df['Home Spread'] = pd.to_numeric(export_df['Home Spread'], errors='coerce')
406
  export_df.replace([np.nan, np.inf, -np.inf], '', inplace=True)
407
  export_df = export_df.drop_duplicates(subset=['week', 'away_id', 'home_id'])
408
+
409
+ sh = gc.open_by_url(sheet_url)
410
  worksheet = sh.worksheet('Master_sched')
411
  worksheet.batch_clear(['A:P'])
412
  worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist())
 
 
413
  st.write("Uploaded Master Schedule")
414
 
 
415
  st.write("Finished NCAAF Script!")