Spaces:
Running
Running
| import streamlit as st | |
| import requests | |
| import pandas as pd | |
| from pandas import DataFrame | |
| import numpy as np | |
| import gspread | |
| import pytz | |
| from datetime import datetime | |
| from datetime import date, timedelta | |
| import time | |
| st.set_page_config(layout="wide") | |
| scope = ['https://www.googleapis.com/auth/spreadsheets', | |
| "https://www.googleapis.com/auth/drive"] | |
| credentials = { | |
| "type": "service_account", | |
| "project_id": "sheets-api-connect-378620", | |
| "private_key_id": st.secrets['sheets_api_connect_pk'], | |
| "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCtKa01beXwc88R\nnPZVQTNPVQuBnbwoOfc66gW3547ja/UEyIGAF112dt/VqHprRafkKGmlg55jqJNt\na4zceLKV+wTm7vBu7lDISTJfGzCf2TrxQYNqwMKE2LOjI69dBM8u4Dcb4k0wcp9v\ntW1ZzLVVuwTvmrg7JBHjiSaB+x5wxm/r3FOiJDXdlAgFlytzqgcyeZMJVKKBQHyJ\njEGg/1720A0numuOCt71w/2G0bDmijuj1e6tH32MwRWcvRNZ19K9ssyDz2S9p68s\nYDhIxX69OWxwScTIHLY6J2t8txf/XMivL/636fPlDADvBEVTdlT606n8CcKUVQeq\npUVdG+lfAgMBAAECggEAP38SUA7B69eTfRpo658ycOs3Amr0JW4H/bb1rNeAul0K\nZhwd/HnU4E07y81xQmey5kN5ZeNrD5EvqkZvSyMJHV0EEahZStwhjCfnDB/cxyix\nZ+kFhv4y9eK+kFpUAhBy5nX6T0O+2T6WvzAwbmbVsZ+X8kJyPuF9m8ldcPlD0sce\ntj8NwVq1ys52eosqs7zi2vjt+eMcaY393l4ls+vNq8Yf27cfyFw45W45CH/97/Nu\n5AmuzlCOAfFF+z4OC5g4rei4E/Qgpxa7/uom+BVfv9G0DIGW/tU6Sne0+37uoGKt\nW6DzhgtebUtoYkG7ZJ05BTXGp2lwgVcNRoPwnKJDxQKBgQDT5wYPUBDW+FHbvZSp\nd1m1UQuXyerqOTA9smFaM8sr/UraeH85DJPEIEk8qsntMBVMhvD3Pw8uIUeFNMYj\naLmZFObsL+WctepXrVo5NB6RtLB/jZYxiKMatMLUJIYtcKIp+2z/YtKiWcLnwotB\nWdCjVnPTxpkurmF2fWP/eewZ+wKBgQDRMtJg7etjvKyjYNQ5fARnCc+XsI3gkBe1\nX9oeXfhyfZFeBXWnZzN1ITgFHplDznmBdxAyYGiQdbbkdKQSghviUQ0igBvoDMYy\n1rWcy+a17Mj98uyNEfmb3X2cC6WpvOZaGHwg9+GY67BThwI3FqHIbyk6Ko09WlTX\nQpRQjMzU7QKBgAfi1iflu+q0LR+3a3vvFCiaToskmZiD7latd9AKk2ocsBd3Woy9\n+hXXecJHPOKV4oUJlJgvAZqe5HGBqEoTEK0wyPNLSQlO/9ypd+0fEnArwFHO7CMF\nycQprAKHJXM1eOOFFuZeQCaInqdPZy1UcV5Szla4UmUZWkk1m24blHzXAoGBAMcA\nyH4qdbxX9AYrC1dvsSRvgcnzytMvX05LU0uF6tzGtG0zVlub4ahvpEHCfNuy44UT\nxRWW/oFFaWjjyFxO5sWggpUqNuHEnRopg3QXx22SRRTGbN45li/+QAocTkgsiRh1\nqEcYZsO4mPCsQqAy6E2p6RcK+Xa+omxvSnVhq0x1AoGAKr8GdkCl4CF6rieLMAQ7\nLNBuuoYGaHoh8l5E2uOQpzwxVy/nMBcAv+2+KqHEzHryUv1owOi6pMLv7A9mTFoS\n18B0QRLuz5fSOsVnmldfC9fpUc6H8cH1SINZpzajqQA74bPwELJjnzrCnH79TnHG\nJuElxA33rFEjbgbzdyrE768=\n-----END PRIVATE KEY-----\n", | |
| "client_email": "gspread-connection@sheets-api-connect-378620.iam.gserviceaccount.com", | |
| "client_id": "106625872877651920064", | |
| "auth_uri": "https://accounts.google.com/o/oauth2/auth", | |
| "token_uri": "https://oauth2.googleapis.com/token", | |
| "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", | |
| "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40sheets-api-connect-378620.iam.gserviceaccount.com" | |
| } | |
| sr_headers = {"accept": "application/json", "x-api-key": "fXcD0sGNi9aDiVy7swO4u5TSPcd64PFv38N1Hwqb"} | |
| sr_api_key = st.secrets['sr_api_key'] | |
| gc = gspread.service_account_from_dict(credentials) | |
| NCAAF_model_url = st.secrets['NCAAF_model_url'] | |
| pff_url = st.secrets['pff_url'] | |
| FL_api_hub_url = st.secrets['FL_master_api_hub'] | |
| projections_model_url = st.secrets['FL_projection_model'] | |
| rankings_admin_url = st.secrets['rankings_admin'] | |
| dwain_seasonal_url = st.secrets['dwain_season_long'] | |
| freedman_seasonal_url = st.secrets['freedman_season_long'] | |
| headers = { | |
| 'Authorization': 'Bearer 6984da1f-2c81-4140-8206-d018af38533f', | |
| } | |
| name_options = ['Dwain', 'Freedman', 'Ian', 'Waz', 'Guillotine', 'Kendall', 'Sam', 'Laghezza', 'Ronis', 'Matthew Berry'] | |
| api_replace_options = ['dwain', 'freedman', 'ian', 'waz', 'guillotine', 'kendall', 'sam', 'laghezza', 'ronis', 'berry'] | |
| proj_source_options = ['Dwain', 'Freedman', 'Aggregate', 'Kickers', 'Defenses'] | |
| proj_replace_options = ['dwain', 'freedman', 'aggregate', 'kickers', 'defenses'] | |
| ros_source_options = ['Laghezza', 'Ronis', 'Dwain', 'Freedman', 'Aggregate'] | |
| ros_replace_options = ['laghezza', 'ronis', 'dwain', 'freedman', 'aggregate'] | |
| data_update_options = ['Betting Model', 'DFS', 'DVP', 'Strength of Sched', 'Air Yards', 'FAAB'] | |
| def grab_sr_fa(headers: dict) -> pd.DataFrame: | |
| sr_fas = "https://api.sportradar.com/nfl/official/trial/v7/en/league/free_agents.json" | |
| response = requests.get(sr_fas, headers=headers) | |
| data = response.json() | |
| player_fa_ids = [] | |
| player_sr_ids = [] | |
| player_first_names = [] | |
| player_prefer_first_name = [] | |
| player_last_name = [] | |
| player_name_suffix = [] | |
| player_position = [] | |
| player_team = [] | |
| row_data = [] | |
| for player in data['league']['free_agents']: | |
| player_fa_ids.append(999999) | |
| player_sr_ids.append(player['id']) | |
| player_first_names.append(player['first_name']) | |
| player_prefer_first_name.append('-') | |
| player_last_name.append(player['last_name']) | |
| player_name_suffix.append('-') | |
| player_position.append('-') | |
| player_team.append('FA') | |
| row_data.append([ | |
| 999999, player['id'], player['first_name'], '-', player['last_name'], '-', '-', 'FA' | |
| ]) | |
| return pd.DataFrame(row_data, columns=['FA_ID', 'SR_ID', 'First_Name', 'Prefer_First_Name', 'Last_Name', 'Name_Suffix', 'Position', 'Team']) | |
| def grab_sr_ids(headers: dict) -> pd.DataFrame: | |
| sr_injuries = "https://api.sportradar.com/nfl/official/trial/v7/en/league/hierarchy.json" | |
| response = requests.get(sr_injuries, headers=headers) | |
| data = response.json() | |
| print(data) | |
| team_ids = [] | |
| player_fa_ids = [] | |
| player_sr_ids = [] | |
| player_first_names = [] | |
| player_prefer_first_name = [] | |
| player_last_name = [] | |
| player_name_suffix = [] | |
| player_position = [] | |
| player_team = [] | |
| row_data = [] | |
| for conference in data['conferences']: | |
| for division in conference['divisions']: | |
| for team in division['teams']: | |
| team_ids.append(team['id']) | |
| player_fa_ids.append(999999) | |
| player_sr_ids.append(team['id']) | |
| player_first_names.append(team['market']) | |
| player_prefer_first_name.append('-') | |
| player_last_name.append(team['name']) | |
| player_name_suffix.append('-') | |
| player_position.append('DST') | |
| player_team.append(team['alias']) | |
| row_data.append([ | |
| 999999, team['id'], team['market'], '-', team['name'], '-', 'DST', team['alias'] | |
| ]) | |
| print(team_ids) | |
| for team_id in team_ids: | |
| sr_injuries = f"https://api.sportradar.com/nfl/official/trial/v7/en/teams/{team_id}/full_roster.json" | |
| response = requests.get(sr_injuries, headers=headers) | |
| data = response.json() | |
| team_name = data['alias'] | |
| st.write(team_name + ' - ' + team_id) | |
| try: | |
| for player in data['players']: | |
| player_fa_ids.append(999999) | |
| player_sr_ids.append(player['id']) | |
| player_first_names.append(player['first_name']) | |
| player_prefer_first_name.append('-') | |
| player_last_name.append(player['last_name']) | |
| player_name_suffix.append('-') | |
| player_position.append(player['position']) | |
| player_team.append(team_name) | |
| row_data.append([ | |
| 999999, player['id'], player['first_name'], '-', player['last_name'], '-', player['position'], team_name | |
| ]) | |
| except: | |
| st.info(f"Error processing team: {team_name} - {team_id}") | |
| continue | |
| return pd.DataFrame(row_data, columns=['FA_ID', 'SR_ID', 'First_Name', 'Prefer_First_Name', 'Last_Name', 'Name_Suffix', 'Position', 'Team']) | |
| tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(['Rankings Update', 'Projections Update', 'SR ID Update', 'Game Model Update', 'NCAAF Script', 'UR Push', 'Data updates']) | |
| with tab1: | |
| with st.container(): | |
| st.header("Update Regular Rankings") | |
| reg_seasonal_col, reg_ros_col, reg_weekly_col = st.columns([3, 3, 3]) | |
| with reg_seasonal_col: | |
| st.info("Update Seasonal Rankings") | |
| choose_seasonal_ranker = st.selectbox("Choose Ranker", name_options, key='seasonal_ranker') | |
| choose_seasonal_ranker = api_replace_options[name_options.index(choose_seasonal_ranker)] | |
| if st.button("Update Seasonal Rankings", key='seasonal_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/expert-rankings/v1/{choose_seasonal_ranker}/seasonal/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading seasonal rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with reg_ros_col: | |
| st.info("Update Rest of Season Rankings") | |
| choose_ros_ranker = st.selectbox("Choose Ranker", name_options, key='rest_of_season_ranker') | |
| choose_ros_ranker = api_replace_options[name_options.index(choose_ros_ranker)] | |
| if st.button("Update Rest of Season Rankings", key='rest_of_season_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/expert-rankings/v1/{choose_ros_ranker}/restofseason/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading rest of season rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with reg_weekly_col: | |
| st.info("Update Weekly Rankings") | |
| choose_weekly_ranker = st.selectbox("Choose Ranker", name_options, key='weekly_ranker') | |
| choose_weekly_ranker = api_replace_options[name_options.index(choose_weekly_ranker)] | |
| if st.button("Update Weekly Rankings", key='weekly_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/expert-rankings/v1/{choose_weekly_ranker}/weekly/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading weekly rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with st.container(): | |
| st.header("Update Guillotine Rankings") | |
| guillotine_seasonal_col, guillotine_ros_col, guillotine_weekly_col = st.columns([3, 3, 3]) | |
| with guillotine_seasonal_col: | |
| st.info("Update Seasonal Guillotine Rankings") | |
| choose_seasonal_guillotine_ranker = st.selectbox("Choose Ranker", name_options, key='seasonal_guillotine_ranker') | |
| choose_seasonal_guillotine_ranker = api_replace_options[name_options.index(choose_seasonal_guillotine_ranker)] | |
| if st.button("Update Seasonal Guillotine Rankings", key='seasonal_guillotine_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/guillotine-rankings/v1/{choose_seasonal_guillotine_ranker}/seasonal/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading seasonal guillotine rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with guillotine_ros_col: | |
| st.info("Update Rest of Season Guillotine Rankings") | |
| choose_rest_of_season_guillotine_ranker = st.selectbox("Choose Ranker", name_options, key='rest_of_season_guillotine_ranker') | |
| choose_rest_of_season_guillotine_ranker = api_replace_options[name_options.index(choose_rest_of_season_guillotine_ranker)] | |
| if st.button("Update Rest of Season Guillotine Rankings", key='rest_of_season_guillotine_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/guillotine-rankings/v1/{choose_rest_of_season_guillotine_ranker}/restofseason/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading rest of season guillotine rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with guillotine_weekly_col: | |
| st.info("Update Weekly Guillotine Rankings") | |
| choose_weekly_guillotine_ranker = st.selectbox("Choose Ranker", name_options, key='weekly_guillotine_ranker') | |
| choose_weekly_guillotine_ranker = api_replace_options[name_options.index(choose_weekly_guillotine_ranker)] | |
| if st.button("Update Weekly Guillotine Rankings", key='weekly_guillotine_rankings'): | |
| response = requests.post(f"https://api.fantasylife.com/api/guillotine-rankings/v1/{choose_weekly_guillotine_ranker}/weekly/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading weekly guillotine rankings") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with tab2: | |
| with st.container(): | |
| st.header("Update Regular Projections") | |
| reg_seasonal_proj_col, reg_ros_proj_col, reg_weekly_proj_col = st.columns([3, 3, 3]) | |
| with reg_seasonal_proj_col: | |
| st.info("Update Seasonal Projections") | |
| choose_seasonal_type = st.selectbox("Choose Type", proj_source_options, key='seasonal_type') | |
| choose_seasonal_type = proj_replace_options[proj_source_options.index(choose_seasonal_type)] | |
| if st.button("Update Seasonal Projections", key='seasonal_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_seasonal_type}/seasonal/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading seasonal aggregate projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with reg_weekly_proj_col: | |
| st.info("Update Weekly Projections") | |
| choose_weekly_type = st.selectbox("Choose Type", proj_source_options, key='weekly_type') | |
| choose_weekly_type = proj_replace_options[proj_source_options.index(choose_weekly_type)] | |
| if st.button("Update Weekly Projections", key='weekly_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_weekly_type}/weekly/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading weekly projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with reg_ros_proj_col: | |
| st.info("Update Rest of Season Projections") | |
| choose_ros_type = st.selectbox("Choose Type", ros_source_options, key='rest_of_season_type') | |
| choose_ros_type = ros_replace_options[ros_source_options.index(choose_ros_type)] | |
| if st.button("Update Rest of Season Projections", key='rest_of_season_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_ros_type}/restofseason/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading rest of season projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with st.container(): | |
| st.header("Update Guillotine Projections") | |
| guillotine_seasonal_proj_col, guillotine_ros_proj_col, guillotine_weekly_proj_col = st.columns([3, 3, 3]) | |
| with guillotine_seasonal_proj_col: | |
| st.info("Update Seasonal Guillotine Projections") | |
| choose_guillotine_seasonal_type = st.selectbox("Choose Type", proj_source_options, key='seasonal_guillotine_type') | |
| choose_guillotine_seasonal_type = proj_replace_options[proj_source_options.index(choose_guillotine_seasonal_type)] | |
| if st.button("Update Seasonal Guillotine Projections", key='seasonal_guillotine_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_guillotine_seasonal_type}/seasonal/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading seasonal guillotine aggregate projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with guillotine_weekly_proj_col: | |
| st.info("Update Weekly Guillotine Projections") | |
| choose_guillotine_weekly_type = st.selectbox("Choose Type", proj_source_options, key='weekly_guillotine_type') | |
| choose_guillotine_weekly_type = proj_replace_options[proj_source_options.index(choose_guillotine_weekly_type)] | |
| if st.button("Update Weekly Guillotine Projections", key='weekly_guillotine_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_guillotine_weekly_type}/weekly/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading weekly guillotine projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with guillotine_ros_proj_col: | |
| st.info("Update Rest of Season Guillotine Projections") | |
| choose_guillotine_ros_type = st.selectbox("Choose Type", ros_source_options, key='rest_of_season_guillotine_type') | |
| choose_guillotine_ros_type = ros_replace_options[ros_source_options.index(choose_guillotine_ros_type)] | |
| if st.button("Update Rest of Season Guillotine Projections", key='rest_of_season_guillotine_projections'): | |
| response = requests.post(f"https://api.fantasylife.com/api/projections/v1/{choose_guillotine_ros_type}/restofseason/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error uploading rest of season guillotine projections") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with tab3: | |
| with st.container(): | |
| col1, col2 = st.columns([3, 3]) | |
| with col1: | |
| st.info("Update SR IDs") | |
| if st.button("Update SR IDs", key='sr_ids'): | |
| fa_df = grab_sr_fa(sr_headers) | |
| st.write("FA SR IDs collected") | |
| player_df = grab_sr_ids(sr_headers) | |
| st.write("Player SR IDs collected") | |
| concat_df = pd.concat([fa_df, player_df]) | |
| concat_df['Team'] = concat_df['Team'].replace('JAX', 'JAC') | |
| st.write('ID sets merged, sending to workbooks') | |
| sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1GorpLF-VuArINnxHtMjgD5shTX4CoaSwvwyX953TsmY/edit?gid=0#gid=0') | |
| worksheet = sh.worksheet('NFL_SR_IDs') | |
| worksheet.batch_clear(['A:H']) | |
| worksheet.update([concat_df.columns.values.tolist()] + concat_df.values.tolist()) | |
| st.write("Updated FL Master Hub NFL SR IDs") | |
| time.sleep(3) | |
| sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1Xt3jJryQKnOiqB12PPZ2Psz5iANIcRzELpbzVA_0m0E/edit?pli=1&gid=725565114#gid=725565114') | |
| worksheet = sh.worksheet('SR_IDs') | |
| worksheet.batch_clear(['A:H']) | |
| worksheet.update([concat_df.columns.values.tolist()] + concat_df.values.tolist()) | |
| st.write("Updated Projection Model NFL SR IDs") | |
| time.sleep(3) | |
| sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1ZP07J6jkbRHdafrfVJSJW7pR07gqh_Oe7cBDCwXWb1A/edit?gid=2082797161#gid=2082797161') | |
| worksheet = sh.worksheet('SR_IDs') | |
| worksheet.batch_clear(['A:H']) | |
| worksheet.update([concat_df.columns.values.tolist()] + concat_df.values.tolist()) | |
| st.write("Updated Ranking Admin NFL SR IDs") | |
| time.sleep(3) | |
| sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1ywPj1cW3qd3tD3zl4lrnLOyXmNaQW_JXVB-dakd3wzw/edit?pli=1&gid=648912512#gid=648912512') | |
| worksheet = sh.worksheet('SR_IDs') | |
| worksheet.batch_clear(['A:H']) | |
| worksheet.update([concat_df.columns.values.tolist()] + concat_df.values.tolist()) | |
| st.write("Updated Dwain Season Long NFL SR IDs") | |
| time.sleep(3) | |
| sh = gc.open_by_url('https://docs.google.com/spreadsheets/d/1MmoL2cvDIzb85qcyFxrP1l6n1xK6zufbMdEzyUdHcnM/edit?gid=336076277#gid=336076277') | |
| worksheet = sh.worksheet('SR_IDs') | |
| worksheet.batch_clear(['A:H']) | |
| worksheet.update([concat_df.columns.values.tolist()] + concat_df.values.tolist()) | |
| st.write("Updated Freedman Season Long NFL SR IDs") | |
| st.success("SR IDs updated successfully") | |
| with tab4: | |
| st.info("We will add this functionality as soon as we have the endpoints to make it work!") | |
| with tab5: | |
| with st.container(): | |
| col1, col2, col3 = st.columns([3, 3, 3]) | |
| with col1: | |
| st.info("Update NCAAF schedule and ranks") | |
| if st.button("Update NCAAF", key='reset14'): | |
| st.write("Initiated") | |
| sh = gc.open_by_url(NCAAF_model_url) | |
| worksheet = sh.worksheet('ATLranks') | |
| ranks_df = DataFrame(worksheet.get_all_records()) | |
| ranks_dict = dict(zip(ranks_df.Team, ranks_df.ATL)) | |
| conf_dict = dict(zip(ranks_df.Team, ranks_df.Conference)) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('Injuries') | |
| injuries_df = DataFrame(worksheet.get_all_records()) | |
| wrong_names = injuries_df['Injury Name'].tolist() | |
| right_names = injuries_df['ATL Name'].tolist() | |
| for checkVar in range(len(wrong_names)): | |
| injuries_df['Team'] = injuries_df['Team'].replace(wrong_names, right_names) | |
| injuries_dict = dict(zip(injuries_df.Team, injuries_df.Team_Modifier)) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('HFA') | |
| hfa_df = DataFrame(worksheet.get_all_records()) | |
| hfa_dict = dict(zip(hfa_df.Team, hfa_df.HFA)) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('Odds') | |
| odds_df = DataFrame(worksheet.get_all_records()) | |
| odds_dict = dict(zip(odds_df.Point_Spread, odds_df.Favorite_Win_Chance)) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('Acronyms') | |
| acros_df = DataFrame(worksheet.get_all_records()) | |
| right_acro = acros_df['Team'].tolist() | |
| wrong_acro = acros_df['Acro'].tolist() | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('Add games') | |
| add_games_df = DataFrame(worksheet.get_all_records()) | |
| add_games_df.replace('', np.nan, inplace=True) | |
| neutral_dict = dict(zip(add_games_df.game_id, add_games_df.Neutral)) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('Completed games') | |
| comp_games_df = DataFrame(worksheet.get_all_records()) | |
| comp_games_df.replace('', np.nan, inplace=True) | |
| time.sleep(.5) | |
| worksheet = sh.worksheet('LY_scoring') | |
| lyscore_df = DataFrame(worksheet.get_all_records()) | |
| for checkVar in range(len(wrong_acro)): | |
| lyscore_df['Team'] = lyscore_df['Team'].replace(wrong_acro, right_acro) | |
| PFA_dict = dict(zip(lyscore_df.Team, lyscore_df.PF_G_adj)) | |
| PAA_dict = dict(zip(lyscore_df.Team, lyscore_df.PA_G_adj)) | |
| # Send a GET request to the API | |
| response = requests.get('https://www.pff.com/api/scoreboard/schedule?league=ncaa&season=2024') | |
| st.write("retreiving PFF data") | |
| # Check if the request was successful | |
| if response.status_code == 200: | |
| # Parse the JSON content | |
| data = response.json() | |
| # Extract the "weeks" object | |
| weeks = data.get('weeks', []) | |
| # Initialize an empty list to store game data | |
| games_list = [] | |
| team_list = [] | |
| # Iterate over each week and its games | |
| for week in weeks: | |
| week_number = week.get('week') | |
| for game in week.get('games', []): | |
| # Add week number to the game dictionary | |
| game['week'] = week_number | |
| away_franchise = game.get('away_franchise', {}) | |
| away_franchise_groups = away_franchise.get('groups', {}) | |
| away_conf = away_franchise_groups[0]['name'] | |
| home_franchise = game.get('home_franchise', {}) | |
| home_franchise_groups = home_franchise.get('groups', {}) | |
| home_conf = home_franchise_groups[0]['name'] | |
| # Flatten the away and home franchise data | |
| game_data = { | |
| 'game_id': game.get('external_game_id'), | |
| 'Day': game.get('kickoff_date'), | |
| 'CST': game.get('kickoff_raw'), | |
| 'away_id': away_franchise.get('abbreviation'), | |
| 'Away': away_franchise.get('city'), | |
| 'home_id': home_franchise.get('abbreviation'), | |
| 'Home': home_franchise.get('city') | |
| } | |
| home_data = { | |
| 'team': home_franchise.get('city'), | |
| 'conf': home_conf | |
| } | |
| away_data = { | |
| 'team': away_franchise.get('city'), | |
| 'conf': away_conf | |
| } | |
| merged_data = game_data | game | |
| team_data = home_data | away_data | |
| games_list.append(merged_data) | |
| team_list.append(home_data) | |
| team_list.append(away_data) | |
| # Create a DataFrame from the games list | |
| df = pd.DataFrame(games_list) | |
| team_df = pd.DataFrame(team_list) | |
| team_df = team_df.drop_duplicates(subset=['team', 'conf']) | |
| # Display the DataFrame | |
| print(df) | |
| else: | |
| print(f"Failed to retrieve data. HTTP Status code: {response.status_code}") | |
| st.write("Cleaning data") | |
| df_raw = df[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']] | |
| df_raw['conf_game'] = np.nan | |
| df_raw['Away_ATL'] = np.nan | |
| df_raw['Home_ATL'] = np.nan | |
| df_raw['Home Spread'] = np.nan | |
| df_raw['Proj Total'] = np.nan | |
| df_raw['Neutral'] = np.nan | |
| df_raw['Notes'] = np.nan | |
| df_raw['over_under'].fillna("", inplace=True) | |
| df_raw['over_under'] = pd.to_numeric(df_raw['over_under'], errors='coerce') | |
| df_raw = df_raw[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread', 'over_under', 'Proj Total', 'Day', 'CST', 'Neutral', 'Notes']] | |
| add_games_merge = add_games_df | |
| comp_games_merge = comp_games_df | |
| conf_adj = dict(zip(add_games_merge['game_id'], add_games_merge['conf_game'])) | |
| df_merge_1 = pd.concat([add_games_merge, df_raw]) | |
| df_cleaned = pd.concat([comp_games_merge, df_merge_1]) | |
| df_cleaned = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'point_spread', 'over_under', 'Day', 'CST']] | |
| df_cleaned = df_cleaned.drop_duplicates(subset=['game_id']) | |
| def cond_away_PFA(row, df): | |
| mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5) | |
| return df.loc[mask, 'Away_PFA'].mean() | |
| def cond_home_PFA(row, df): | |
| mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5) | |
| return df.loc[mask, 'Home_PFA'].mean() | |
| def cond_away_PAA(row, df): | |
| mask = (df['Away_ATL'] >= row['Away_ATL'] - 5) & (df['Away_ATL'] <= row['Away_ATL'] + 5) | |
| return df.loc[mask, 'Away_PAA'].mean() | |
| def cond_home_PAA(row, df): | |
| mask = (df['Home_ATL'] >= row['Home_ATL'] - 5) & (df['Home_ATL'] <= row['Home_ATL'] + 5) | |
| return df.loc[mask, 'Home_PAA'].mean() | |
| for checkVar in range(len(wrong_acro)): | |
| df_cleaned['Away'] = df_cleaned['Away'].replace(wrong_acro, right_acro) | |
| df_cleaned['Home'] = df_cleaned['Home'].replace(wrong_acro, right_acro) | |
| df_cleaned['Away_conf'] = df_cleaned['Away'].map(conf_dict) | |
| df_cleaned['Home_conf'] = df_cleaned['Home'].map(conf_dict) | |
| df_cleaned['conf_game_var'] = np.where((df_cleaned['Away_conf'] == df_cleaned['Home_conf']), 1, 0) | |
| df_cleaned['conf_game'] = df_cleaned.apply(lambda row: conf_adj.get(row['game_id'], row['conf_game_var']), axis=1) | |
| df_cleaned['Away_ATL'] = df_cleaned['Away'].map(ranks_dict) | |
| df_cleaned['Home_ATL'] = df_cleaned['Home'].map(ranks_dict) | |
| df_cleaned['Away_inj'] = df_cleaned['Away'].map(injuries_dict) | |
| df_cleaned['Home_inj'] = df_cleaned['Home'].map(injuries_dict) | |
| df_cleaned['Away_inj'] = df_cleaned['Away_inj'].replace(['', np.nan], 0) | |
| df_cleaned['Home_inj'] = df_cleaned['Home_inj'].replace(['', np.nan], 0) | |
| df_cleaned['inj_mod'] = df_cleaned['Away_inj'] - df_cleaned['Home_inj'] | |
| df_cleaned['Away_PFA'] = df_cleaned['Away'].map(PFA_dict) | |
| df_cleaned['Home_PFA'] = df_cleaned['Home'].map(PFA_dict) | |
| df_cleaned['Away_PAA'] = df_cleaned['Away'].map(PAA_dict) | |
| df_cleaned['Home_PAA'] = df_cleaned['Home'].map(PAA_dict) | |
| # Apply the function to each row in the DataFrame | |
| df_cleaned['cond_away_PFA'] = df_cleaned.apply(lambda row: cond_away_PFA(row, df_cleaned), axis=1) | |
| df_cleaned['cond_home_PFA'] = df_cleaned.apply(lambda row: cond_home_PFA(row, df_cleaned), axis=1) | |
| df_cleaned['cond_away_PAA'] = df_cleaned.apply(lambda row: cond_away_PAA(row, df_cleaned), axis=1) | |
| df_cleaned['cond_home_PAA'] = df_cleaned.apply(lambda row: cond_home_PAA(row, df_cleaned), axis=1) | |
| df_cleaned['cond_away_PFA'] = np.where((df_cleaned['Away_ATL'] <= 0), 18, df_cleaned['cond_away_PFA']) | |
| df_cleaned['cond_away_PAA'] = np.where((df_cleaned['Away_ATL'] <= 0), 36, df_cleaned['cond_away_PAA']) | |
| df_cleaned['cond_home_PFA'] = np.where((df_cleaned['Home_ATL'] <= 0), 18, df_cleaned['cond_home_PFA']) | |
| df_cleaned['cond_home_PAA'] = np.where((df_cleaned['Home_ATL'] <= 0), 36, df_cleaned['cond_home_PAA']) | |
| df_cleaned['Away_PFA'] = df_cleaned['Away_PFA'].fillna(df_cleaned['cond_away_PFA']) | |
| df_cleaned['Away_PAA'] = df_cleaned['Away_PAA'].fillna(df_cleaned['cond_away_PAA']) | |
| df_cleaned['Home_PFA'] = df_cleaned['Home_PFA'].fillna(df_cleaned['cond_home_PFA']) | |
| df_cleaned['Home_PAA'] = df_cleaned['Home_PAA'].fillna(df_cleaned['cond_home_PAA']) | |
| df_cleaned['Away_PFA_adj'] = (df_cleaned['Away_PFA'] * .75 + df_cleaned['Home_PAA'] * .25) | |
| df_cleaned['Home_PFA_adj'] = (df_cleaned['Home_PFA'] * .75 + df_cleaned['Away_PAA'] * .25) | |
| df_cleaned['Away_PFA_cond'] = (df_cleaned['cond_away_PFA'] * .75 + df_cleaned['cond_home_PAA'] * .25) | |
| df_cleaned['Home_PFA_cond'] = (df_cleaned['cond_home_PFA'] * .75 + df_cleaned['cond_away_PAA'] * .25) | |
| df_cleaned['Neutral'] = df_cleaned['game_id'].map(neutral_dict) | |
| df_cleaned['HFA'] = np.where(df_cleaned['Neutral'] == 1, 0, df_cleaned['Home'].map(hfa_dict)) | |
| df_cleaned['Neutral'] = np.nan | |
| df_cleaned['Home Spread'] = (((df_cleaned['Home_ATL'] - df_cleaned['Away_ATL']) + df_cleaned['HFA']) * -1) + df_cleaned['inj_mod'] | |
| df_cleaned['Win Prob'] = df_cleaned['Home Spread'].map(odds_dict) | |
| df_cleaned['Spread Adj'] = np.nan | |
| df_cleaned['Final Spread'] = np.nan | |
| df_cleaned['Proj Total'] = df_cleaned['Away_PFA_adj'] + df_cleaned['Home_PFA_adj'] | |
| df_cleaned['Proj Total (adj)'] = np.where(df_cleaned['over_under'] != np.nan, (df_cleaned['over_under'] * .66 + df_cleaned['Proj Total'] * .34), df_cleaned['Proj Total']) | |
| df_cleaned['Proj Total (adj)'] = df_cleaned['Proj Total (adj)'].fillna(df_cleaned['Proj Total']) | |
| df_cleaned['Total Adj'] = np.nan | |
| df_cleaned['Final Total'] = np.nan | |
| df_cleaned['Notes'] = np.nan | |
| export_df_1 = df_cleaned[['pff_week', 'game_id', 'away_id', 'home_id', 'Away', 'Home', 'conf_game', 'Away_ATL', 'Home_ATL', 'point_spread', 'Home Spread', | |
| 'over_under', 'Proj Total (adj)', 'Day', 'CST', 'Neutral', 'Notes']] | |
| export_df_1.rename(columns={"pff_week": "week", "point_spread": "Vegas Spread", "over_under": "Vegas Total", "Proj Total (adj)": "Proj Total"}, inplace = True) | |
| export_df_2 = add_games_df | |
| export_df = export_df_1 | |
| export_df['week'] = pd.to_numeric(export_df['week'], errors='coerce') | |
| export_df = export_df.drop_duplicates(subset=['week', 'Away', 'Home']) | |
| export_df = export_df.sort_values(by='week', ascending=True) | |
| export_df['Vegas Spread'] = pd.to_numeric(export_df['Vegas Spread'], errors='coerce') | |
| export_df['Vegas Total'] = pd.to_numeric(export_df['Vegas Total'], errors='coerce') | |
| export_df['Proj Total'] = pd.to_numeric(export_df['Proj Total'], errors='coerce') | |
| export_df['Home Spread'] = pd.to_numeric(export_df['Home Spread'], errors='coerce') | |
| export_df.replace([np.nan, np.inf, -np.inf], '', inplace=True) | |
| export_df = export_df.drop_duplicates(subset=['week', 'away_id', 'home_id']) | |
| sh = gc.open_by_url(NCAAF_model_url) | |
| worksheet = sh.worksheet('Master_sched') | |
| worksheet.batch_clear(['A:P']) | |
| worksheet.update([export_df.columns.values.tolist()] + export_df.values.tolist()) | |
| st.write("Uploaded Master Schedule") | |
| st.write("Finished NCAAF Script!") | |
| with tab6: | |
| with st.container(): | |
| st.header("Update Utilization Report Data") | |
| st.info("Hitting this button will update all UR data (player, season, team styles) across the site") | |
| if st.button("Update Scores", key='scores'): | |
| response = requests.post(f"https://api.fantasylife.com/api/utilization/v1/scores/2025", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading Player Feed!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error updating scores") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| time.sleep(3) | |
| response = requests.post(f"https://api.fantasylife.com/api/utilization/v1/current-season/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading Current Season!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error updating current season") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| time.sleep(3) | |
| response = requests.post(f"https://api.fantasylife.com/api/ur-team-styles/v1/current-season/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading Team Styles!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error updating current season") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| with tab7: | |
| with st.container(): | |
| st.header("Update Data Sources") | |
| st.info("Select a specific data source to update and hit the button") | |
| choose_data_source = st.selectbox("Choose Data Source", data_update_options, key='choose_data_source') | |
| if st.button("Update Data", key='site_data_update'): | |
| if choose_data_source == 'DFS': | |
| response = requests.post(f"https://api.fantasylife.com/api/dfs/v1/force-update", headers=headers, timeout=60) | |
| elif choose_data_source == 'DVP': | |
| response = requests.post(f"https://api.fantasylife.com/api/dvp/v1/force-update", headers=headers, timeout=60) | |
| elif choose_data_source == 'Air Yards': | |
| response = requests.post(f"https://api.fantasylife.com/api/air-yards/v1/force-update", headers=headers, timeout=60) | |
| elif choose_data_source == 'FAAB': | |
| response = requests.post(f"https://api.fantasylife.com/api/waivers/v1/force-update", headers=headers, timeout=60) | |
| elif choose_data_source == 'Strength of Sched': | |
| response = requests.post(f"https://api.fantasylife.com/api/sos/v1/force-update", headers=headers, timeout=60) | |
| elif choose_data_source == 'Betting Model': | |
| response = requests.post(f"https://api.fantasylife.com/api/nfl/v1/season/odds/update", headers=headers, timeout=60) | |
| if response.status_code == 200: | |
| st.write("Uploading!") | |
| st.write(response.json()) | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] | |
| else: | |
| st.write("Error updating site data") | |
| try: | |
| if response.text.strip(): # Check if response has content | |
| st.write(response.json()) | |
| else: | |
| st.write(f"Empty response. Status code: {response.status_code}") | |
| except ValueError: | |
| st.write(f"Invalid JSON response. Status code: {response.status_code}") | |
| st.write(f"Response text: {response.text}") | |
| st.cache_data.clear() | |
| for key in st.session_state.keys(): | |
| del st.session_state[key] |