Spaces:
Sleeping
Sleeping
import streamlit as st | |
st.set_page_config(layout="wide") | |
import numpy as np | |
import pandas as pd | |
import gspread | |
import pymongo | |
import time | |
def init_conn(): | |
scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] | |
credentials = { | |
"type": "service_account", | |
"project_id": "model-sheets-connect", | |
"private_key_id": "0e0bc2fdef04e771172fe5807392b9d6639d945e", | |
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDiu1v/e6KBKOcK\ncx0KQ23nZK3ZVvADYy8u/RUn/EDI82QKxTd/DizRLIV81JiNQxDJXSzgkbwKYEDm\n48E8zGvupU8+Nk76xNPakrQKy2Y8+VJlq5psBtGchJTuUSHcXU5Mg2JhQsB376PJ\nsCw552K6Pw8fpeMDJDZuxpKSkaJR6k9G5Dhf5q8HDXnC5Rh/PRFuKJ2GGRpX7n+2\nhT/sCax0J8jfdTy/MDGiDfJqfQrOPrMKELtsGHR9Iv6F4vKiDqXpKfqH+02E9ptz\nBk+MNcbZ3m90M8ShfRu28ebebsASfarNMzc3dk7tb3utHOGXKCf4tF8yYKo7x8BZ\noO9X4gSfAgMBAAECggEAU8ByyMpSKlTCF32TJhXnVJi/kS+IhC/Qn5JUDMuk4LXr\naAEWsWO6kV/ZRVXArjmuSzuUVrXumISapM9Ps5Ytbl95CJmGDiLDwRL815nvv6k3\nUyAS8EGKjz74RpoIoH6E7EWCAzxlnUgTn+5oP9Flije97epYk3H+e2f1f5e1Nn1d\nYNe8U+1HqJgILcxA1TAUsARBfoD7+K3z/8DVPHI8IpzAh6kTHqhqC23Rram4XoQ6\nzj/ZdVBjvnKuazETfsD+Vl3jGLQA8cKQVV70xdz3xwLcNeHsbPbpGBpZUoF73c65\nkAXOrjYl0JD5yAk+hmYhXr6H9c6z5AieuZGDrhmlFQKBgQDzV6LRXmjn4854DP/J\nI82oX2GcI4eioDZPRukhiQLzYerMQBmyqZIRC+/LTCAhYQSjNgMa+ZKyvLqv48M0\n/x398op/+n3xTs+8L49SPI48/iV+mnH7k0WI/ycd4OOKh8rrmhl/0EWb9iitwJYe\nMjTV/QxNEpPBEXfR1/mvrN/lVQKBgQDuhomOxUhWVRVH6x03slmyRBn0Oiw4MW+r\nrt1hlNgtVmTc5Mu+4G0USMZwYuOB7F8xG4Foc7rIlwS7Ic83jMJxemtqAelwOLdV\nXRLrLWJfX8+O1z/UE15l2q3SUEnQ4esPHbQnZowHLm0mdL14qSVMl1mu1XfsoZ3z\nJZTQb48CIwKBgEWbzQRtKD8lKDupJEYqSrseRbK/ax43DDITS77/DWwHl33D3FYC\nMblUm8ygwxQpR4VUfwDpYXBlklWcJovzamXpSnsfcYVkkQH47NuOXPXPkXQsw+w+\nDYcJzeu7F/vZqk9I7oBkWHUrrik9zPNoUzrfPvSRGtkAoTDSwibhoc5dAoGBAMHE\nK0T/ANeZQLNuzQps6S7G4eqjwz5W8qeeYxsdZkvWThOgDd/ewt3ijMnJm5X05hOn\ni4XF1euTuvUl7wbqYx76Wv3/1ZojiNNgy7ie4rYlyB/6vlBS97F4ZxJdxMlabbCW\n6b3EMWa4EVVXKoA1sCY7IVDE+yoQ1JYsZmq45YzPAoGBANWWHuVueFGZRDZlkNlK\nh5OmySmA0NdNug3G1upaTthyaTZ+CxGliwBqMHAwpkIRPwxUJpUwBTSEGztGTAxs\nWsUOVWlD2/1JaKSmHE8JbNg6sxLilcG6WEDzxjC5dLL1OrGOXj9WhC9KX3sq6qb6\nF/j9eUXfXjAlb042MphoF3ZC\n-----END PRIVATE KEY-----\n", | |
"client_email": "gspread-connection@model-sheets-connect.iam.gserviceaccount.com", | |
"client_id": "100369174533302798535", | |
"auth_uri": "https://accounts.google.com/o/oauth2/auth", | |
"token_uri": "https://oauth2.googleapis.com/token", | |
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", | |
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/gspread-connection%40model-sheets-connect.iam.gserviceaccount.com" | |
} | |
uri = "mongodb+srv://multichem:Xr1q5wZdXPbxdUmJ@testcluster.lgwtp5i.mongodb.net/?retryWrites=true&w=majority&appName=TestCluster" | |
client = pymongo.MongoClient(uri, retryWrites=True, serverSelectionTimeoutMS=500000) | |
db = client["testing_db"] | |
MLB_Data = 'https://docs.google.com/spreadsheets/d/1f42Ergav8K1VsOLOK9MUn7DM_MLMvv4GR2Fy7EfnZTc/edit#gid=340831852' | |
gc_con = gspread.service_account_from_dict(credentials, scope) | |
return gc_con, db, MLB_Data | |
gcservice_account, db, MLB_Data = init_conn() | |
percentages_format = {'Exposure': '{:.2%}'} | |
freq_format = {'Exposure': '{:.2%}', 'Proj Own': '{:.2%}', 'Edge': '{:.2%}'} | |
dk_columns = ['SP1', 'SP2', 'C', '1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count'] | |
fd_columns = ['P', 'C_1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3', 'UTIL', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count'] | |
def init_DK_seed_frames(): | |
collection = db["DK_MLB_seed_frame"] | |
cursor = collection.find() | |
raw_display = pd.DataFrame(list(cursor)) | |
raw_display = raw_display[['SP1', 'SP2', 'C', '1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']] | |
DK_seed = raw_display.to_numpy() | |
return DK_seed | |
def init_FD_seed_frames(): | |
collection = db["FD_MLB_seed_frame"] | |
cursor = collection.find() | |
raw_display = pd.DataFrame(list(cursor)) | |
raw_display = raw_display[['P', 'C_1B', '2B', '3B', 'SS', 'OF1', 'OF2', 'OF3', 'UTIL', 'salary', 'proj', 'Team', 'Team_count', 'Secondary', 'Secondary_count']] | |
FD_seed = raw_display.to_numpy() | |
return FD_seed | |
def init_baselines(): | |
sh = gcservice_account.open_by_url(MLB_Data) | |
worksheet = sh.worksheet('Merged_ROO') | |
load_display = pd.DataFrame(worksheet.get_all_records()) | |
load_display.replace('', np.nan, inplace=True) | |
load_display['STDev'] = load_display['Median'] / 3 | |
load_display = load_display.drop_duplicates(subset=['Player'], keep='first') | |
dk_raw = load_display.dropna(subset=['Median']) | |
worksheet = sh.worksheet('Merged_FD_ROO') | |
load_display = pd.DataFrame(worksheet.get_all_records()) | |
load_display.replace('', np.nan, inplace=True) | |
load_display['STDev'] = load_display['Median'] / 3 | |
load_display = load_display.drop_duplicates(subset=['Player'], keep='first') | |
fd_raw = load_display.dropna(subset=['Median']) | |
return dk_raw, fd_raw | |
def convert_df(array): | |
array = pd.DataFrame(array, columns=column_names) | |
return array.to_csv().encode('utf-8') | |
def calculate_DK_value_frequencies(np_array): | |
unique, counts = np.unique(np_array[:, :9], return_counts=True) | |
frequencies = counts / len(np_array) # Normalize by the number of rows | |
combined_array = np.column_stack((unique, frequencies)) | |
return combined_array | |
def calculate_FD_value_frequencies(np_array): | |
unique, counts = np.unique(np_array[:, :8], return_counts=True) | |
frequencies = counts / len(np_array) # Normalize by the number of rows | |
combined_array = np.column_stack((unique, frequencies)) | |
return combined_array | |
def sim_contest(Sim_size, seed_frame, maps_dict, sharp_split, Contest_Size): | |
SimVar = 1 | |
Sim_Winners = [] | |
fp_array = seed_frame[:sharp_split, :] | |
# Pre-vectorize functions | |
vec_projection_map = np.vectorize(maps_dict['Projection_map'].__getitem__) | |
vec_stdev_map = np.vectorize(maps_dict['STDev_map'].__getitem__) | |
st.write('Simulating contest on frames') | |
while SimVar <= Sim_size: | |
fp_random = fp_array[np.random.choice(fp_array.shape[0], Contest_Size)] | |
sample_arrays1 = np.c_[ | |
fp_random, | |
np.sum(np.random.normal( | |
loc=vec_projection_map(fp_random[:, :-6]), | |
scale=vec_stdev_map(fp_random[:, :-6])), | |
axis=1) | |
] | |
sample_arrays = sample_arrays1 | |
final_array = sample_arrays[sample_arrays[:, 10].argsort()[::-1]] | |
best_lineup = final_array[final_array[:, -1].argsort(kind='stable')[::-1][:1]] | |
Sim_Winners.append(best_lineup) | |
SimVar += 1 | |
return Sim_Winners | |
DK_seed = init_DK_seed_frames() | |
FD_seed = init_FD_seed_frames() | |
dk_raw, fd_raw = init_baselines() | |
tab1, tab2 = st.tabs(['Contest Sims', 'Data Export']) | |
with tab2: | |
col1, col2 = st.columns([1, 7]) | |
with col1: | |
if st.button("Load/Reset Data", key='reset1'): | |
st.cache_data.clear() | |
for key in st.session_state.keys(): | |
del st.session_state[key] | |
DK_seed = init_DK_seed_frames() | |
FD_seed = init_FD_seed_frames() | |
dk_raw, fd_raw = init_baselines() | |
slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Other Main Slate')) | |
site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel')) | |
if site_var1 == 'Draftkings': | |
raw_baselines = dk_raw | |
column_names = dk_columns | |
team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1') | |
if team_var1 == 'Specific Teams': | |
team_var2 = st.multiselect('Which teams do you want?', options = dk_raw['Team'].unique()) | |
elif team_var1 == 'Full Slate': | |
team_var2 = dk_raw.Team.values.tolist() | |
stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1') | |
if stack_var1 == 'Specific Stack Sizes': | |
stack_var2 = st.multiselect('Which stack sizes do you want?', options = [5, 4, 3, 2, 1, 0]) | |
elif stack_var1 == 'Full Slate': | |
stack_var2 = [5, 4, 3, 2, 1, 0] | |
elif site_var1 == 'Fanduel': | |
raw_baselines = fd_raw | |
column_names = fd_columns | |
team_var1 = st.radio("Do you want a frame with specific teams?", ('Full Slate', 'Specific Teams'), key='team_var1') | |
if team_var1 == 'Specific Teams': | |
team_var2 = st.multiselect('Which teams do you want?', options = fd_raw['Team'].unique()) | |
elif team_var1 == 'Full Slate': | |
team_var2 = fd_raw.Team.values.tolist() | |
stack_var1 = st.radio("Do you want a frame with specific stack sizes?", ('Full Slate', 'Specific Stack Sizes'), key='stack_var1') | |
if stack_var1 == 'Specific Stack Sizes': | |
stack_var2 = st.multiselect('Which stack sizes do you want?', options = [4, 3, 2, 1, 0]) | |
elif stack_var1 == 'Full Slate': | |
stack_var2 = [4, 3, 2, 1, 0] | |
if st.button("Prepare data export", key='data_export'): | |
data_export = st.session_state.working_seed.copy() | |
st.download_button( | |
label="Export optimals set", | |
data=convert_df(data_export), | |
file_name='MLB_optimals_export.csv', | |
mime='text/csv', | |
) | |
with col2: | |
if st.button("Load Data", key='load_data'): | |
if site_var1 == 'Draftkings': | |
if 'working_seed' in st.session_state: | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], team_var2)] | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 13], stack_var2)] | |
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names) | |
elif 'working_seed' not in st.session_state: | |
st.session_state.working_seed = DK_seed.copy() | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], team_var2)] | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 13], stack_var2)] | |
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names) | |
elif site_var1 == 'Fanduel': | |
if 'working_seed' in st.session_state: | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)] | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)] | |
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names) | |
elif 'working_seed' not in st.session_state: | |
st.session_state.working_seed = FD_seed.copy() | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 11], team_var2)] | |
st.session_state.working_seed = st.session_state.working_seed[np.isin(st.session_state.working_seed[:, 12], stack_var2)] | |
st.session_state.data_export_display = pd.DataFrame(st.session_state.working_seed[0:1000], columns=column_names) | |
with st.container(): | |
if 'data_export_display' in st.session_state: | |
st.dataframe(st.session_state.data_export_display.style.format(freq_format, precision=2), use_container_width = True) | |
with tab1: | |
col1, col2 = st.columns([1, 7]) | |
with col1: | |
if st.button("Load/Reset Data", key='reset2'): | |
st.cache_data.clear() | |
for key in st.session_state.keys(): | |
del st.session_state[key] | |
DK_seed = init_DK_seed_frames() | |
FD_seed = init_FD_seed_frames() | |
dk_raw, fd_raw = init_baselines() | |
sim_slate_var1 = st.radio("Which data are you loading?", ('Main Slate', 'Other Main Slate'), key='sim_slate_var1') | |
sim_site_var1 = st.radio("What site are you working with?", ('Draftkings', 'Fanduel'), key='sim_site_var1') | |
if sim_site_var1 == 'Draftkings': | |
raw_baselines = dk_raw | |
column_names = dk_columns | |
elif sim_site_var1 == 'Fanduel': | |
raw_baselines = fd_raw | |
column_names = fd_columns | |
contest_var1 = st.selectbox("What contest size are you simulating?", ('Small', 'Medium', 'Large', 'Custom')) | |
if contest_var1 == 'Small': | |
Contest_Size = 1000 | |
elif contest_var1 == 'Medium': | |
Contest_Size = 5000 | |
elif contest_var1 == 'Large': | |
Contest_Size = 10000 | |
elif contest_var1 == 'Custom': | |
Contest_Size = st.number_input("Insert contest size", value=100, placeholder="Type a number under 10,000...") | |
strength_var1 = st.selectbox("How sharp is the field in the contest?", ('Very', 'Above Average', 'Average', 'Below Average', 'Not Very')) | |
if strength_var1 == 'Not Very': | |
sharp_split = 500000 | |
elif strength_var1 == 'Below Average': | |
sharp_split = 400000 | |
elif strength_var1 == 'Average': | |
sharp_split = 300000 | |
elif strength_var1 == 'Above Average': | |
sharp_split = 200000 | |
elif strength_var1 == 'Very': | |
sharp_split = 100000 | |
with col2: | |
if st.button("Run Contest Sim"): | |
if 'working_seed' in st.session_state: | |
maps_dict = { | |
'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)), | |
'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)), | |
'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)), | |
'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own%'])), | |
'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)), | |
'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev)) | |
} | |
Sim_Winners = sim_contest(1000, st.session_state.working_seed, maps_dict, sharp_split, Contest_Size) | |
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners)) | |
#st.table(Sim_Winner_Frame) | |
# Initial setup | |
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy']) | |
Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2 | |
Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str) | |
Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts())) | |
# Type Casting | |
type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32} | |
Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict) | |
# Sorting | |
st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100) | |
st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True) | |
# Data Copying | |
st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy() | |
# Data Copying | |
st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy() | |
else: | |
if sim_site_var1 == 'Draftkings': | |
st.session_state.working_seed = DK_seed.copy() | |
elif sim_site_var1 == 'Fanduel': | |
st.session_state.working_seed = FD_seed.copy() | |
maps_dict = { | |
'Projection_map':dict(zip(raw_baselines.Player,raw_baselines.Median)), | |
'Salary_map':dict(zip(raw_baselines.Player,raw_baselines.Salary)), | |
'Pos_map':dict(zip(raw_baselines.Player,raw_baselines.Position)), | |
'Own_map':dict(zip(raw_baselines.Player,raw_baselines['Own%'])), | |
'Team_map':dict(zip(raw_baselines.Player,raw_baselines.Team)), | |
'STDev_map':dict(zip(raw_baselines.Player,raw_baselines.STDev)) | |
} | |
Sim_Winners = sim_contest(1000, st.session_state.working_seed, maps_dict, sharp_split, Contest_Size) | |
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners)) | |
#st.table(Sim_Winner_Frame) | |
# Initial setup | |
Sim_Winner_Frame = pd.DataFrame(np.concatenate(Sim_Winners), columns=column_names + ['Fantasy']) | |
Sim_Winner_Frame['GPP_Proj'] = (Sim_Winner_Frame['proj'] + Sim_Winner_Frame['Fantasy']) / 2 | |
Sim_Winner_Frame['unique_id'] = Sim_Winner_Frame['proj'].astype(str) + Sim_Winner_Frame['salary'].astype(str) + Sim_Winner_Frame['Team'].astype(str) + Sim_Winner_Frame['Secondary'].astype(str) | |
Sim_Winner_Frame = Sim_Winner_Frame.assign(win_count=Sim_Winner_Frame['unique_id'].map(Sim_Winner_Frame['unique_id'].value_counts())) | |
# Type Casting | |
type_cast_dict = {'salary': int, 'proj': np.float16, 'Fantasy': np.float16, 'GPP_Proj': np.float32} | |
Sim_Winner_Frame = Sim_Winner_Frame.astype(type_cast_dict) | |
# Sorting | |
st.session_state.Sim_Winner_Frame = Sim_Winner_Frame.sort_values(by=['win_count', 'GPP_Proj'], ascending= [False, False]).copy().drop_duplicates(subset='unique_id').head(100) | |
st.session_state.Sim_Winner_Frame.drop(columns='unique_id', inplace=True) | |
# Data Copying | |
st.session_state.Sim_Winner_Export = Sim_Winner_Frame.copy() | |
# Data Copying | |
st.session_state.Sim_Winner_Display = Sim_Winner_Frame.copy() | |
freq_copy = st.session_state.Sim_Winner_Display | |
if sim_site_var1 == 'Draftkings': | |
freq_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:10].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
elif sim_site_var1 == 'Fanduel': | |
freq_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:9].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
freq_working['Freq'] = freq_working['Freq'].astype(int) | |
freq_working['Position'] = freq_working['Player'].map(maps_dict['Pos_map']) | |
freq_working['Salary'] = freq_working['Player'].map(maps_dict['Salary_map']) | |
freq_working['Proj Own'] = freq_working['Player'].map(maps_dict['Own_map']) / 100 | |
freq_working['Exposure'] = freq_working['Freq']/(1000) | |
freq_working['Edge'] = freq_working['Exposure'] - freq_working['Proj Own'] | |
freq_working['Team'] = freq_working['Player'].map(maps_dict['Team_map']) | |
st.session_state.player_freq = freq_working.copy() | |
if sim_site_var1 == 'Draftkings': | |
sp_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:2].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
elif sim_site_var1 == 'Fanduel': | |
sp_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,0:1].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
sp_working['Freq'] = sp_working['Freq'].astype(int) | |
sp_working['Position'] = sp_working['Player'].map(maps_dict['Pos_map']) | |
sp_working['Salary'] = sp_working['Player'].map(maps_dict['Salary_map']) | |
sp_working['Proj Own'] = sp_working['Player'].map(maps_dict['Own_map']) / 100 | |
sp_working['Exposure'] = sp_working['Freq']/(1000) | |
sp_working['Edge'] = sp_working['Exposure'] - sp_working['Proj Own'] | |
sp_working['Team'] = sp_working['Player'].map(maps_dict['Team_map']) | |
st.session_state.sp_freq = sp_working.copy() | |
if sim_site_var1 == 'Draftkings': | |
team_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,12:13].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
elif sim_site_var1 == 'Fanduel': | |
team_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,11:12].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
team_working['Freq'] = team_working['Freq'].astype(int) | |
team_working['Exposure'] = team_working['Freq']/(1000) | |
st.session_state.team_freq = team_working.copy() | |
if sim_site_var1 == 'Draftkings': | |
stack_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,13:14].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
elif sim_site_var1 == 'Fanduel': | |
stack_working = pd.DataFrame(np.column_stack(np.unique(freq_copy.iloc[:,12:13].values, return_counts=True)), | |
columns=['Player','Freq']).sort_values('Freq', ascending=False).reset_index(drop=True) | |
stack_working['Freq'] = stack_working['Freq'].astype(int) | |
stack_working['Exposure'] = stack_working['Freq']/(1000) | |
st.session_state.stack_freq = stack_working.copy() | |
with st.container(): | |
if st.button("Reset Sim", key='reset_sim'): | |
for key in st.session_state.keys(): | |
del st.session_state[key] | |
if 'player_freq' in st.session_state: | |
player_split_var2 = st.radio("Are you wanting to isolate any lineups with specific players?", ('Full Players', 'Specific Players'), key='player_split_var2') | |
if player_split_var2 == 'Specific Players': | |
find_var2 = st.multiselect('Which players must be included in the lineups?', options = st.session_state.player_freq['Player'].unique()) | |
elif player_split_var2 == 'Full Players': | |
find_var2 = st.session_state.player_freq.Player.values.tolist() | |
if player_split_var2 == 'Specific Players': | |
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame[np.equal.outer(st.session_state.Sim_Winner_Frame.to_numpy(), find_var2).any(axis=1).all(axis=1)] | |
if player_split_var2 == 'Full Players': | |
st.session_state.Sim_Winner_Display = st.session_state.Sim_Winner_Frame | |
if 'Sim_Winner_Display' in st.session_state: | |
st.dataframe(st.session_state.Sim_Winner_Display.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(precision=2), use_container_width = True) | |
if 'Sim_Winner_Export' in st.session_state: | |
st.download_button( | |
label="Export Full Frame", | |
data=st.session_state.Sim_Winner_Export.to_csv().encode('utf-8'), | |
file_name='MLB_consim_export.csv', | |
mime='text/csv', | |
) | |
with st.container(): | |
tab1, tab2, tab3, tab4 = st.tabs(['Overall Exposures', 'SP Exposures', 'Team Exposures', 'Stack Size Exposures']) | |
with tab1: | |
if 'player_freq' in st.session_state: | |
st.dataframe(st.session_state.player_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True) | |
st.download_button( | |
label="Export Exposures", | |
data=st.session_state.player_freq.to_csv().encode('utf-8'), | |
file_name='player_freq_export.csv', | |
mime='text/csv', | |
key='overall' | |
) | |
with tab2: | |
if 'sp_freq' in st.session_state: | |
st.dataframe(st.session_state.sp_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(freq_format, precision=2), use_container_width = True) | |
st.download_button( | |
label="Export Exposures", | |
data=st.session_state.sp_freq.to_csv().encode('utf-8'), | |
file_name='sp_freq.csv', | |
mime='text/csv', | |
key='sp' | |
) | |
with tab3: | |
if 'team_freq' in st.session_state: | |
st.dataframe(st.session_state.team_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(percentages_format, precision=2), use_container_width = True) | |
st.download_button( | |
label="Export Exposures", | |
data=st.session_state.team_freq.to_csv().encode('utf-8'), | |
file_name='team_freq.csv', | |
mime='text/csv', | |
key='team' | |
) | |
with tab4: | |
if 'stack_freq' in st.session_state: | |
st.dataframe(st.session_state.stack_freq.style.background_gradient(axis=0).background_gradient(cmap='RdYlGn').format(percentages_format, precision=2), use_container_width = True) | |
st.download_button( | |
label="Export Exposures", | |
data=st.session_state.stack_freq.to_csv().encode('utf-8'), | |
file_name='stack_freq.csv', | |
mime='text/csv', | |
key='stack' | |
) |