James McCool commited on
Commit
4f967b3
·
1 Parent(s): fa35534

Refactor app.py to streamline data handling by removing 'display_frame_source' checks. Update working_frame and export_merge assignments to ensure consistent data processing across functionalities, including filtering, trimming, and exposure management, enhancing overall code clarity and maintainability.

Browse files
Files changed (1) hide show
  1. app.py +86 -158
app.py CHANGED
@@ -913,7 +913,6 @@ with tab1:
913
 
914
  with tab2:
915
  if 'origin_portfolio' in st.session_state and 'projections_df' in st.session_state:
916
- st.session_state['display_frame_source'] = 'Portfolio'
917
  with st.container():
918
  col1, col2 = st.columns(2)
919
  with col1:
@@ -1034,9 +1033,13 @@ with tab2:
1034
  stack_remove_toggle = st.selectbox("Remove specific stacks?", options=['No', 'Yes'], index=0)
1035
  stack_remove = st.multiselect("If Specific Stacks, Which to remove?", options=sorted(list(set(st.session_state['stack_dict'].values()))), default=[])
1036
 
1037
- submitted = st.form_submit_button("Submit")
 
 
 
 
1038
 
1039
- if submitted:
1040
  st.session_state['settings_base'] = False
1041
  parsed_frame = st.session_state['working_frame'].copy()
1042
  parsed_frame = parsed_frame[parsed_frame['Dupes'] <= max_dupes]
@@ -1053,12 +1056,27 @@ with tab2:
1053
  parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
1054
  else:
1055
  parsed_frame = parsed_frame
1056
- if st.session_state['display_frame_source'] == 'Portfolio':
1057
- st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1058
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1059
- else:
1060
- st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1061
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1062
 
1063
  with st.expander('Micro Filter Options'):
1064
  with st.form(key='micro_filter_form'):
@@ -1122,12 +1140,8 @@ with tab2:
1122
 
1123
  if size_include:
1124
  parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
1125
- if st.session_state['display_frame_source'] == 'Portfolio':
1126
- st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1127
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1128
- else:
1129
- st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1130
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
1131
 
1132
  with st.expander('Trimming Options'):
1133
  with st.form(key='trim_form'):
@@ -1159,14 +1173,9 @@ with tab2:
1159
  st.session_state['settings_base'] = False
1160
  st.write('initiated')
1161
  parsed_frame = st.session_state['working_frame'].copy()
1162
- if st.session_state['display_frame_source'] == 'Portfolio':
1163
- parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1164
- st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False)
1165
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1166
- else:
1167
- parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1168
- st.session_state['export_base'] = parsed_frame.copy()
1169
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
1170
  with st.expander('Presets'):
1171
  st.info("Still heavily in testing here, I'll announce when they are ready for use.")
1172
  with st.form(key='Small Field Preset'):
@@ -1175,33 +1184,19 @@ with tab2:
1175
  submitted = st.form_submit_button("Submit")
1176
  if submitted:
1177
  st.session_state['settings_base'] = False
1178
- if st.session_state['display_frame_source'] == 'Portfolio':
1179
- if preset_choice == 'Small Field (Heavy Own)':
1180
- parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1181
- elif preset_choice == 'Large Field (Manage Diversity)':
1182
- parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1183
- elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1184
- parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1185
- elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1186
- parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1187
- elif preset_choice == 'Reduce Volatility (Manage Own)':
1188
- parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1189
-
1190
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1191
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1192
- else:
1193
- if preset_choice == 'Small Field (Heavy Own)':
1194
- parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1195
- elif preset_choice == 'Large Field (Manage Diversity)':
1196
- parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1197
- elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1198
- parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1199
- elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1200
- parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1201
- elif preset_choice == 'Reduce Volatility (Manage Own)':
1202
- parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1203
- st.session_state['export_base'] = parsed_frame.copy()
1204
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
1205
  with st.expander('Stratify'):
1206
  with st.form(key='Stratification'):
1207
  sorting_choice = st.selectbox("Stat Choice", options=['median', 'Own', 'Weighted Own', 'Geomean', 'Lineup Edge', 'Finish_percentile', 'Diversity'], index=0)
@@ -1209,14 +1204,9 @@ with tab2:
1209
  submitted = st.form_submit_button("Submit")
1210
  if submitted:
1211
  st.session_state['settings_base'] = False
1212
- if st.session_state['display_frame_source'] == 'Portfolio':
1213
- parsed_frame = stratification_function(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var, sorting_choice)
1214
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1215
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1216
- else:
1217
- parsed_frame = stratification_function(st.session_state['export_base'], lineup_target, excluded_cols, sport_var, sorting_choice)
1218
- st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
1219
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
1220
  with st.expander('Exposure Management'):
1221
  with st.form(key='Exposures'):
1222
  exposure_player = st.selectbox("Player", options=sorted(list(player_names)))
@@ -1229,40 +1219,10 @@ with tab2:
1229
  submitted = st.form_submit_button("Submit")
1230
  if submitted:
1231
  st.session_state['settings_base'] = False
1232
- if st.session_state['display_frame_source'] == 'Portfolio':
1233
- parsed_frame = exposure_spread(st.session_state['working_frame'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1234
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1235
- if type_var == 'Classic':
1236
- if sport_var == 'CS2' or sport_var == 'LOL':
1237
- # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1238
- st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1239
- lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1240
- sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1241
- axis=1
1242
- )
1243
-
1244
- # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1245
- st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(
1246
- lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1247
- sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1248
- axis=1
1249
- )
1250
-
1251
- # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1252
- st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(
1253
- lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1254
- sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1255
- axis=1
1256
- )
1257
-
1258
- elif sport_var != 'CS2' and sport_var != 'LOL':
1259
- st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1260
- st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1261
- st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1262
- if 'stack_dict' in st.session_state:
1263
- st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].index.map(st.session_state['stack_dict'])
1264
- st.session_state['working_frame']['Size'] = st.session_state['working_frame'].index.map(st.session_state['size_dict'])
1265
- elif type_var == 'Showdown':
1266
  # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1267
  st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1268
  lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
@@ -1283,74 +1243,42 @@ with tab2:
1283
  sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1284
  axis=1
1285
  )
1286
- # st.session_state['working_frame']['Own'] = st.session_state['working_frame']['Own'].astype('float32')
1287
- st.session_state['working_frame']['median'] = st.session_state['working_frame']['median'].astype('float32')
1288
- st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
 
 
 
 
 
 
 
 
 
 
 
 
1289
 
1290
- print(st.session_state['working_frame'].head(10))
1291
- st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1292
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1293
- else:
1294
- parsed_frame = exposure_spread(st.session_state['export_base'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1295
- st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
1296
- if type_var == 'Classic':
1297
- if sport_var == 'CS2' or sport_var == 'LOL':
1298
- # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1299
- st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(
1300
- lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1301
- sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1302
- axis=1
1303
- )
1304
-
1305
- # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1306
- st.session_state['export_base']['median'] = st.session_state['export_base'].apply(
1307
- lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1308
- sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1309
- axis=1
1310
- )
1311
-
1312
- # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1313
- st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(
1314
- lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1315
- sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1316
- axis=1
1317
- )
1318
-
1319
- elif sport_var != 'CS2' and sport_var != 'LOL':
1320
- st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1321
- st.session_state['export_base']['median'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1322
- st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1323
- if 'stack_dict' in st.session_state:
1324
- st.session_state['export_base']['Stack'] = st.session_state['export_base'].index.map(st.session_state['stack_dict'])
1325
- st.session_state['export_base']['Size'] = st.session_state['export_base'].index.map(st.session_state['size_dict'])
1326
- elif type_var == 'Showdown':
1327
- # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1328
- st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(
1329
- lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1330
- sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1331
- axis=1
1332
- )
1333
-
1334
- # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1335
- st.session_state['export_base']['median'] = st.session_state['export_base'].apply(
1336
- lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1337
- sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1338
- axis=1
1339
- )
1340
-
1341
- # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1342
- st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(
1343
- lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1344
- sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1345
- axis=1
1346
- )
1347
- # st.session_state['export_base']['Own'] = st.session_state['export_base']['Own'].astype('float32')
1348
- st.session_state['export_base']['median'] = st.session_state['export_base']['median'].astype('float32')
1349
- st.session_state['export_base']['salary'] = st.session_state['export_base']['salary'].astype('uint16')
1350
 
1351
- print(st.session_state['export_base'].head(10))
1352
- st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1353
- st.session_state['export_merge'] = st.session_state['export_base'].copy()
 
 
 
 
 
 
 
 
 
 
1354
 
1355
  with st.container():
1356
  if 'export_base' not in st.session_state:
 
913
 
914
  with tab2:
915
  if 'origin_portfolio' in st.session_state and 'projections_df' in st.session_state:
 
916
  with st.container():
917
  col1, col2 = st.columns(2)
918
  with col1:
 
1033
  stack_remove_toggle = st.selectbox("Remove specific stacks?", options=['No', 'Yes'], index=0)
1034
  stack_remove = st.multiselect("If Specific Stacks, Which to remove?", options=sorted(list(set(st.session_state['stack_dict'].values()))), default=[])
1035
 
1036
+ submitted_col, export_col = st.columns(2)
1037
+ with submitted_col:
1038
+ reg_submitted = st.form_submit_button("regSubmit")
1039
+ with export_col:
1040
+ exp_submitted = st.form_submit_button("expSubmit")
1041
 
1042
+ if reg_submitted:
1043
  st.session_state['settings_base'] = False
1044
  parsed_frame = st.session_state['working_frame'].copy()
1045
  parsed_frame = parsed_frame[parsed_frame['Dupes'] <= max_dupes]
 
1056
  parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
1057
  else:
1058
  parsed_frame = parsed_frame
1059
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1060
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1061
+ if exp_submitted:
1062
+ st.session_state['settings_base'] = False
1063
+ parsed_frame = st.session_state['working_frame'].copy()
1064
+ parsed_frame = parsed_frame[parsed_frame['Dupes'] <= max_dupes]
1065
+ parsed_frame = parsed_frame[parsed_frame['salary'] >= min_salary]
1066
+ parsed_frame = parsed_frame[parsed_frame['salary'] <= max_salary]
1067
+ parsed_frame = parsed_frame[parsed_frame['Finish_percentile'] <= max_finish_percentile]
1068
+ parsed_frame = parsed_frame[parsed_frame['Lineup Edge'] >= min_lineup_edge]
1069
+ if 'Stack' in parsed_frame.columns:
1070
+ if stack_include_toggle == 'All Stacks':
1071
+ parsed_frame = parsed_frame
1072
+ else:
1073
+ parsed_frame = parsed_frame[parsed_frame['Stack'].isin(stack_selections)]
1074
+ if stack_remove_toggle == 'Yes':
1075
+ parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
1076
+ else:
1077
+ parsed_frame = parsed_frame
1078
+ st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1079
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1080
 
1081
  with st.expander('Micro Filter Options'):
1082
  with st.form(key='micro_filter_form'):
 
1140
 
1141
  if size_include:
1142
  parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
1143
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1144
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
1145
 
1146
  with st.expander('Trimming Options'):
1147
  with st.form(key='trim_form'):
 
1173
  st.session_state['settings_base'] = False
1174
  st.write('initiated')
1175
  parsed_frame = st.session_state['working_frame'].copy()
1176
+ parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1177
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False)
1178
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
 
1179
  with st.expander('Presets'):
1180
  st.info("Still heavily in testing here, I'll announce when they are ready for use.")
1181
  with st.form(key='Small Field Preset'):
 
1184
  submitted = st.form_submit_button("Submit")
1185
  if submitted:
1186
  st.session_state['settings_base'] = False
1187
+ if preset_choice == 'Small Field (Heavy Own)':
1188
+ parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1189
+ elif preset_choice == 'Large Field (Manage Diversity)':
1190
+ parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1191
+ elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1192
+ parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1193
+ elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1194
+ parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1195
+ elif preset_choice == 'Reduce Volatility (Manage Own)':
1196
+ parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1197
+
1198
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1199
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1200
  with st.expander('Stratify'):
1201
  with st.form(key='Stratification'):
1202
  sorting_choice = st.selectbox("Stat Choice", options=['median', 'Own', 'Weighted Own', 'Geomean', 'Lineup Edge', 'Finish_percentile', 'Diversity'], index=0)
 
1204
  submitted = st.form_submit_button("Submit")
1205
  if submitted:
1206
  st.session_state['settings_base'] = False
1207
+ parsed_frame = stratification_function(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var, sorting_choice)
1208
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1209
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
 
1210
  with st.expander('Exposure Management'):
1211
  with st.form(key='Exposures'):
1212
  exposure_player = st.selectbox("Player", options=sorted(list(player_names)))
 
1219
  submitted = st.form_submit_button("Submit")
1220
  if submitted:
1221
  st.session_state['settings_base'] = False
1222
+ parsed_frame = exposure_spread(st.session_state['working_frame'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1223
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1224
+ if type_var == 'Classic':
1225
+ if sport_var == 'CS2' or sport_var == 'LOL':
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1226
  # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1227
  st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1228
  lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
 
1243
  sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1244
  axis=1
1245
  )
1246
+
1247
+ elif sport_var != 'CS2' and sport_var != 'LOL':
1248
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1249
+ st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1250
+ st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1251
+ if 'stack_dict' in st.session_state:
1252
+ st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].index.map(st.session_state['stack_dict'])
1253
+ st.session_state['working_frame']['Size'] = st.session_state['working_frame'].index.map(st.session_state['size_dict'])
1254
+ elif type_var == 'Showdown':
1255
+ # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1256
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1257
+ lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1258
+ sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1259
+ axis=1
1260
+ )
1261
 
1262
+ # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1263
+ st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(
1264
+ lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1265
+ sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1266
+ axis=1
1267
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1268
 
1269
+ # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1270
+ st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(
1271
+ lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1272
+ sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1273
+ axis=1
1274
+ )
1275
+ # st.session_state['working_frame']['Own'] = st.session_state['working_frame']['Own'].astype('float32')
1276
+ st.session_state['working_frame']['median'] = st.session_state['working_frame']['median'].astype('float32')
1277
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
1278
+
1279
+ print(st.session_state['working_frame'].head(10))
1280
+ st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1281
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1282
 
1283
  with st.container():
1284
  if 'export_base' not in st.session_state: