NFL_Stack_Finder / src /streamlit_app.py
James McCool
ping
24c7e4d
import streamlit as st
import numpy as np
import pandas as pd
import pymongo
import re
import os
from itertools import combinations
st.set_page_config(layout="wide")
@st.cache_resource
def init_conn():
uri = os.getenv('MONGO_URI')
if not uri:
uri = st.secrets['mongo_uri']
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000)
db = client["NFL_Database"]
return db
db = init_conn()
game_format = {'Win Percentage': '{:.2%}','First Inning Lead Percentage': '{:.2%}',
'Fifth Inning Lead Percentage': '{:.2%}', '8+ runs': '{:.2%}', 'DK LevX': '{:.2%}', 'FD LevX': '{:.2%}'}
team_roo_format = {'Top Score%': '{:.2%}','0 Runs': '{:.2%}', '1 Run': '{:.2%}', '2 Runs': '{:.2%}', '3 Runs': '{:.2%}', '4 Runs': '{:.2%}',
'5 Runs': '{:.2%}','6 Runs': '{:.2%}', '7 Runs': '{:.2%}', '8 Runs': '{:.2%}', '9 Runs': '{:.2%}', '10 Runs': '{:.2%}'}
wrong_acro = ['WSH', 'AZ', 'CHW']
right_acro = ['WAS', 'ARI', 'CWS']
st.markdown("""
<style>
/* Tab styling */
.stElementContainer [data-baseweb="button-group"] {
gap: 2.000rem;
padding: 4px;
}
.stElementContainer [kind="segmented_control"] {
height: 2.000rem;
white-space: pre-wrap;
background-color: #DAA520;
color: white;
border-radius: 20px;
gap: 1px;
padding: 10px 20px;
font-weight: bold;
transition: all 0.3s ease;
}
.stElementContainer [kind="segmented_controlActive"] {
height: 3.000rem;
background-color: #DAA520;
border: 3px solid #FFD700;
border-radius: 10px;
color: black;
}
.stElementContainer [kind="segmented_control"]:hover {
background-color: #FFD700;
cursor: pointer;
}
div[data-baseweb="select"] > div {
background-color: #DAA520;
color: white;
}
</style>""", unsafe_allow_html=True)
@st.cache_resource(ttl=600)
def init_baselines():
collection = db["Player_Baselines"]
cursor = collection.find()
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display[['name', 'Team', 'Opp', 'Position', 'Salary', 'team_plays', 'team_pass', 'team_rush', 'team_tds', 'team_pass_tds', 'team_rush_tds', 'dropbacks', 'pass_yards', 'pass_tds',
'rush_att', 'rush_yards', 'rush_tds', 'targets', 'rec', 'rec_yards', 'rec_tds', 'PPR', 'Half_PPR', 'Own']]
player_stats = raw_display[raw_display['Position'] != 'K']
collection = db["DK_NFL_ROO"]
cursor = collection.find()
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
load_display = raw_display[raw_display['Position'] != 'K']
dk_roo_raw = load_display.dropna(subset=['Median'])
collection = db["FD_NFL_ROO"]
cursor = collection.find()
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display.rename(columns={'player_ID': 'player_id'})
raw_display = raw_display[['Player', 'Position', 'Team', 'Opp', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '20+%', '2x%', '3x%', '4x%',
'Own', 'Small_Field_Own', 'Large_Field_Own', 'Cash_Field_Own', 'CPT_Own', 'LevX', 'version', 'slate', 'timestamp', 'player_id', 'site']]
load_display = raw_display[raw_display['Position'] != 'K']
fd_roo_raw = load_display.dropna(subset=['Median'])
collection = db["DK_DFS_Stacks"]
cursor = collection.find()
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
dk_stacks_raw = raw_display.copy()
collection = db["FD_DFS_Stacks"]
cursor = collection.find()
raw_display = pd.DataFrame(list(cursor))
raw_display = raw_display[['Team', 'QB', 'WR1_TE', 'WR2_TE', 'Total', 'Salary', 'Floor', 'Median', 'Ceiling', 'Top_finish', 'Top_5_finish', 'Top_10_finish', '60+%', '2x%', '3x%', '4x%', 'Own', 'LevX', 'slate', 'version']]
fd_stacks_raw = raw_display.copy()
return player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw
@st.cache_data
def convert_df_to_csv(df):
return df.to_csv().encode('utf-8')
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
app_load_reset_column, app_view_site_column = st.columns([1, 9])
with app_load_reset_column:
if st.button("Load/Reset Data", key='reset_data_button'):
st.cache_data.clear()
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
for key in st.session_state.keys():
del st.session_state[key]
with app_view_site_column:
with st.container():
app_view_column, app_site_column = st.columns([3, 3])
with app_view_column:
view_var = st.selectbox("Select view", ["Simple", "Advanced"], key='view_selectbox')
with app_site_column:
site_var = st.selectbox("What site do you want to view?", ('Draftkings', 'Fanduel'), key='site_selectbox')
selected_tab = st.segmented_control(
"Select Tab",
options=["Stack Finder", "User Upload"],
selection_mode='single',
default='Stack Finder',
width='stretch',
label_visibility='collapsed',
key='tab_selector'
)
if selected_tab == 'Stack Finder':
with st.expander("Info and Filters"):
app_info_column, slate_choice_column, filtering_column, stack_info_column = st.columns(4)
with app_info_column:
if st.button("Load/Reset Data", key='reset1'):
st.cache_data.clear()
player_stats, dk_stacks_raw, fd_stacks_raw, dk_roo_raw, fd_roo_raw = init_baselines()
for key in st.session_state.keys():
del st.session_state[key]
st.info(f"Last Update: " + str(dk_roo_raw['timestamp'][0]) + f" CST")
with slate_choice_column:
slate_var1 = st.radio("What slate are you working with?", ('Main Slate', 'Secondary Slate', 'Late Slate', 'Thurs-Mon Slate', 'User Upload'), key='slate_var1')
if slate_var1 == 'User Upload':
slate_var1 = st.session_state['proj_dataframe']
else:
if site_var == 'Draftkings':
raw_baselines = dk_roo_raw
if slate_var1 == 'Main Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Main Slate']
elif slate_var1 == 'Secondary Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Secondary Slate']
elif slate_var1 == 'Late Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Late Slate']
elif slate_var1 == 'Thurs-Mon Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Thurs-Mon Slate']
raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
elif site_var == 'Fanduel':
raw_baselines = fd_roo_raw
if slate_var1 == 'Main Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Main Slate']
elif slate_var1 == 'Secondary Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Secondary Slate']
elif slate_var1 == 'Late Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Late Slate']
elif slate_var1 == 'Thurs-Mon Slate':
raw_baselines = raw_baselines[raw_baselines['slate'] == 'Thurs-Mon Slate']
raw_baselines = raw_baselines.sort_values(by='Own', ascending=False)
qb_lookup = raw_baselines[raw_baselines['Position'] == 'QB']
with filtering_column:
split_var2 = st.radio("Would you like to run stack analysis for the full slate or individual teams?", ('Full Slate Run', 'Specific Teams'), key='split_var2')
if split_var2 == 'Specific Teams':
team_var2 = st.multiselect('Which teams would you like to include in the analysis?', options = raw_baselines['Team'].unique(), key='team_var2')
elif split_var2 == 'Full Slate Run':
team_var2 = raw_baselines.Team.unique().tolist()
pos_var2 = st.multiselect('What Positions would you like to view?', options = ['WR', 'TE', 'RB'], default = ['WR', 'TE', 'RB'], key='pos_var2')
with stack_info_column:
if site_var == 'Draftkings':
max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 50000, value = 35000, step = 100, key='max_sal2')
elif site_var == 'Fanduel':
max_sal2 = st.number_input('Max Salary', min_value = 5000, max_value = 35000, value = 25000, step = 100, key='max_sal2')
size_var2 = st.selectbox('What size of stacks are you analyzing?', options = ['QB+1', 'QB+2', 'QB+3'])
if size_var2 == 'QB+1':
stack_size = 2
if size_var2 == 'QB+2':
stack_size = 3
if size_var2 == 'QB+3':
stack_size = 4
team_dict = dict(zip(raw_baselines.Player, raw_baselines.Team))
proj_dict = dict(zip(raw_baselines.Player, raw_baselines.Median))
own_dict = dict(zip(raw_baselines.Player, raw_baselines.Own))
cost_dict = dict(zip(raw_baselines.Player, raw_baselines.Salary))
qb_dict = dict(zip(qb_lookup.Team, qb_lookup.Player))
if st.button("Run Stack Analysis", key='run_stack_analysis'):
if site_var == 'Draftkings':
position_limits = {
'QB': 1,
'RB': 2,
'WR': 3,
'TE': 1,
'UTIL': 1,
'DST': 1,
}
max_salary = max_sal2
max_players = 9
else:
position_limits = {
'QB': 1,
'RB': 2,
'WR': 3,
'TE': 1,
'UTIL': 1,
'DST': 1,
}
max_salary = max_sal2
max_players = 9
stack_hold_container = st.empty()
comb_list = []
raw_baselines = raw_baselines[raw_baselines['Position'].str.contains('|'.join(pos_var2 + ['QB']))]
# Create a position dictionary mapping players to their eligible positions
pos_dict = dict(zip(raw_baselines.Player, raw_baselines.Position))
def is_valid_combination(combo):
# Count positions in this combination
position_counts = {pos: 0 for pos in position_limits.keys()}
# For each player in the combination
for player in combo:
# Get their eligible positions
player_positions = pos_dict[player].split('/')
for pos in player_positions:
position_counts[pos] += 1
# Check if any position exceeds its limit
for pos, limit in position_limits.items():
if position_counts[pos] > limit:
return False
return True
# Modify the combination generation code
comb_list = []
for cur_team in team_var2:
working_baselines = raw_baselines
working_baselines = working_baselines[working_baselines['Team'] == cur_team]
working_baselines = working_baselines[working_baselines['Position'] != 'DST']
working_baselines = working_baselines[working_baselines['Position'] != 'K']
qb_var = qb_dict[cur_team]
order_list = working_baselines['Player'].unique()
comb = combinations(order_list, stack_size)
for i in list(comb):
if qb_var in i:
comb_list.append(i)
for i in list(comb):
if is_valid_combination(i):
comb_list.append(i)
comb_DF = pd.DataFrame(comb_list)
print(comb_DF.head(10))
if stack_size == 2:
comb_DF['Team'] = comb_DF[0].map(team_dict)
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
comb_DF[1].map(proj_dict)])
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
comb_DF[1].map(cost_dict)])
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
comb_DF[1].map(own_dict)])
elif stack_size == 3:
comb_DF['Team'] = comb_DF[0].map(team_dict)
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
comb_DF[1].map(proj_dict),
comb_DF[2].map(proj_dict)])
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
comb_DF[1].map(cost_dict),
comb_DF[2].map(cost_dict)])
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
comb_DF[1].map(own_dict),
comb_DF[2].map(own_dict)])
elif stack_size == 4:
comb_DF['Team'] = comb_DF[0].map(team_dict)
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
comb_DF[1].map(proj_dict),
comb_DF[2].map(proj_dict),
comb_DF[3].map(proj_dict)])
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
comb_DF[1].map(cost_dict),
comb_DF[2].map(cost_dict),
comb_DF[3].map(cost_dict)])
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
comb_DF[1].map(own_dict),
comb_DF[2].map(own_dict),
comb_DF[3].map(own_dict)])
elif stack_size == 5:
comb_DF['Team'] = comb_DF[0].map(team_dict)
comb_DF['Proj'] = sum([comb_DF[0].map(proj_dict),
comb_DF[1].map(proj_dict),
comb_DF[2].map(proj_dict),
comb_DF[3].map(proj_dict),
comb_DF[4].map(proj_dict)])
comb_DF['Salary'] = sum([comb_DF[0].map(cost_dict),
comb_DF[1].map(cost_dict),
comb_DF[2].map(cost_dict),
comb_DF[3].map(cost_dict),
comb_DF[4].map(cost_dict)])
comb_DF['Own%'] = sum([comb_DF[0].map(own_dict),
comb_DF[1].map(own_dict),
comb_DF[2].map(own_dict),
comb_DF[3].map(own_dict),
comb_DF[4].map(own_dict)])
comb_DF = comb_DF.sort_values(by='Proj', ascending=False)
comb_DF = comb_DF.loc[comb_DF['Salary'] <= max_sal2]
cut_var = 0
if stack_size == 2:
while cut_var <= int(len(comb_DF)):
try:
if int(cut_var) == 0:
cur_proj = float(comb_DF.iat[cut_var, 3])
cur_own = float(comb_DF.iat[cut_var, 5])
elif int(cut_var) >= 1:
check_own = float(comb_DF.iat[cut_var, 5])
if check_own > cur_own:
comb_DF = comb_DF.drop([cut_var])
cur_own = cur_own
cut_var = cut_var - 1
comb_DF = comb_DF.reset_index()
comb_DF = comb_DF.drop(['index'], axis=1)
elif check_own <= cur_own:
cur_own = float(comb_DF.iat[cut_var, 5])
cut_var = cut_var
cut_var += 1
except:
cut_var += 1
elif stack_size == 3:
while cut_var <= int(len(comb_DF)):
try:
if int(cut_var) == 0:
cur_proj = float(comb_DF.iat[cut_var,4])
cur_own = float(comb_DF.iat[cut_var,6])
elif int(cut_var) >= 1:
check_own = float(comb_DF.iat[cut_var,6])
if check_own > cur_own:
comb_DF = comb_DF.drop([cut_var])
cur_own = cur_own
cut_var = cut_var - 1
comb_DF = comb_DF.reset_index()
comb_DF = comb_DF.drop(['index'], axis=1)
elif check_own <= cur_own:
cur_own = float(comb_DF.iat[cut_var,6])
cut_var = cut_var
cut_var += 1
except:
cut_var += 1
elif stack_size == 4:
while cut_var <= int(len(comb_DF)):
try:
if int(cut_var) == 0:
cur_proj = float(comb_DF.iat[cut_var,5])
cur_own = float(comb_DF.iat[cut_var,7])
elif int(cut_var) >= 1:
check_own = float(comb_DF.iat[cut_var,7])
if check_own > cur_own:
comb_DF = comb_DF.drop([cut_var])
cur_own = cur_own
cut_var = cut_var - 1
comb_DF = comb_DF.reset_index()
comb_DF = comb_DF.drop(['index'], axis=1)
elif check_own <= cur_own:
cur_own = float(comb_DF.iat[cut_var,7])
cut_var = cut_var
cut_var += 1
except:
cut_var += 1
elif stack_size == 5:
while cut_var <= int(len(comb_DF)):
try:
if int(cut_var) == 0:
cur_proj = float(comb_DF.iat[cut_var,6])
cur_own = float(comb_DF.iat[cut_var,8])
elif int(cut_var) >= 1:
check_own = float(comb_DF.iat[cut_var,8])
if check_own > cur_own:
comb_DF = comb_DF.drop([cut_var])
cur_own = cur_own
cut_var = cut_var - 1
comb_DF = comb_DF.reset_index()
comb_DF = comb_DF.drop(['index'], axis=1)
elif check_own <= cur_own:
cur_own = float(comb_DF.iat[cut_var,8])
cut_var = cut_var
cut_var += 1
except:
cut_var += 1
st.session_state['display_frame'] = comb_DF
if 'display_frame' in st.session_state:
st.dataframe(st.session_state['display_frame'].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), hide_index=True, use_container_width = True)
st.download_button(
label="Export Tables",
data=convert_df_to_csv(st.session_state['display_frame']),
file_name='NFL_Stack_Options_export.csv',
mime='text/csv',
)
else:
st.info("When you run the stack analysis, the results will be displayed here. Open up the 'Info and Filters' tab to check the settings.")
if selected_tab == 'User Upload':
st.info("The Projections file can have any columns in any order, but must contain columns explicitly named: 'Player', 'Salary', 'Position', 'Team', 'Opp', 'Median', and 'Own'.")
col1, col2 = st.columns([1, 5])
with col1:
proj_file = st.file_uploader("Upload Projections", key = 'proj_uploader')
if proj_file is not None:
try:
st.session_state['proj_dataframe'] = pd.read_csv(proj_file)
except:
st.session_state['proj_dataframe'] = pd.read_excel(proj_file)
with col2:
if proj_file is not None:
st.dataframe(st.session_state['proj_dataframe'].style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True)