James McCool commited on
Commit
fa35534
·
1 Parent(s): 57d6a24

Enhance app.py to differentiate data handling based on 'display_frame_source'. Implement conditional logic for working_frame and export_merge updates in various functionalities, including filtering, trimming, and exposure management, ensuring accurate data processing for both Portfolio and Base sources.

Browse files
Files changed (1) hide show
  1. app.py +156 -61
app.py CHANGED
@@ -913,6 +913,7 @@ with tab1:
913
 
914
  with tab2:
915
  if 'origin_portfolio' in st.session_state and 'projections_df' in st.session_state:
 
916
  with st.container():
917
  col1, col2 = st.columns(2)
918
  with col1:
@@ -1052,8 +1053,12 @@ with tab2:
1052
  parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
1053
  else:
1054
  parsed_frame = parsed_frame
1055
- st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1056
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
1057
 
1058
  with st.expander('Micro Filter Options'):
1059
  with st.form(key='micro_filter_form'):
@@ -1117,9 +1122,12 @@ with tab2:
1117
 
1118
  if size_include:
1119
  parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
1120
-
1121
- st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1122
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
1123
 
1124
  with st.expander('Trimming Options'):
1125
  with st.form(key='trim_form'):
@@ -1151,10 +1159,14 @@ with tab2:
1151
  st.session_state['settings_base'] = False
1152
  st.write('initiated')
1153
  parsed_frame = st.session_state['working_frame'].copy()
1154
-
1155
- st.session_state['working_frame'] = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1156
- st.session_state['working_frame'] = st.session_state['working_frame'].sort_values(by='median', ascending=False)
1157
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
1158
  with st.expander('Presets'):
1159
  st.info("Still heavily in testing here, I'll announce when they are ready for use.")
1160
  with st.form(key='Small Field Preset'):
@@ -1163,18 +1175,33 @@ with tab2:
1163
  submitted = st.form_submit_button("Submit")
1164
  if submitted:
1165
  st.session_state['settings_base'] = False
1166
- if preset_choice == 'Small Field (Heavy Own)':
1167
- parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1168
- elif preset_choice == 'Large Field (Manage Diversity)':
1169
- parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1170
- elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1171
- parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1172
- elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1173
- parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1174
- elif preset_choice == 'Reduce Volatility (Manage Own)':
1175
- parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1176
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1177
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1178
  with st.expander('Stratify'):
1179
  with st.form(key='Stratification'):
1180
  sorting_choice = st.selectbox("Stat Choice", options=['median', 'Own', 'Weighted Own', 'Geomean', 'Lineup Edge', 'Finish_percentile', 'Diversity'], index=0)
@@ -1182,9 +1209,14 @@ with tab2:
1182
  submitted = st.form_submit_button("Submit")
1183
  if submitted:
1184
  st.session_state['settings_base'] = False
1185
- parsed_frame = stratification_function(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var, sorting_choice)
1186
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1187
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
 
 
 
 
 
1188
  with st.expander('Exposure Management'):
1189
  with st.form(key='Exposures'):
1190
  exposure_player = st.selectbox("Player", options=sorted(list(player_names)))
@@ -1197,10 +1229,40 @@ with tab2:
1197
  submitted = st.form_submit_button("Submit")
1198
  if submitted:
1199
  st.session_state['settings_base'] = False
1200
- parsed_frame = exposure_spread(st.session_state['working_frame'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1201
- st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1202
- if type_var == 'Classic':
1203
- if sport_var == 'CS2' or sport_var == 'LOL':
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1204
  # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1205
  st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1206
  lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
@@ -1221,42 +1283,75 @@ with tab2:
1221
  sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1222
  axis=1
1223
  )
1224
-
1225
- elif sport_var != 'CS2' and sport_var != 'LOL':
1226
- st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1227
- st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1228
- st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1229
- if 'stack_dict' in st.session_state:
1230
- st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].index.map(st.session_state['stack_dict'])
1231
- st.session_state['working_frame']['Size'] = st.session_state['working_frame'].index.map(st.session_state['size_dict'])
1232
- elif type_var == 'Showdown':
1233
- # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1234
- st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1235
- lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1236
- sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1237
- axis=1
1238
- )
1239
 
1240
- # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1241
- st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(
1242
- lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1243
- sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1244
- axis=1
1245
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1246
 
1247
- # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1248
- st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(
1249
- lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1250
- sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1251
- axis=1
1252
- )
1253
- # st.session_state['working_frame']['Own'] = st.session_state['working_frame']['Own'].astype('float32')
1254
- st.session_state['working_frame']['median'] = st.session_state['working_frame']['median'].astype('float32')
1255
- st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
1256
 
1257
- print(st.session_state['working_frame'].head(10))
1258
- st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1259
- st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1260
  with st.container():
1261
  if 'export_base' not in st.session_state:
1262
  st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns)
 
913
 
914
  with tab2:
915
  if 'origin_portfolio' in st.session_state and 'projections_df' in st.session_state:
916
+ st.session_state['display_frame_source'] = 'Portfolio'
917
  with st.container():
918
  col1, col2 = st.columns(2)
919
  with col1:
 
1053
  parsed_frame = parsed_frame[~parsed_frame['Stack'].isin(stack_remove)]
1054
  else:
1055
  parsed_frame = parsed_frame
1056
+ if st.session_state['display_frame_source'] == 'Portfolio':
1057
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1058
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1059
+ else:
1060
+ st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1061
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1062
 
1063
  with st.expander('Micro Filter Options'):
1064
  with st.form(key='micro_filter_form'):
 
1122
 
1123
  if size_include:
1124
  parsed_frame = parsed_frame[parsed_frame['Size'].isin(size_include)]
1125
+ if st.session_state['display_frame_source'] == 'Portfolio':
1126
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1127
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1128
+ else:
1129
+ st.session_state['export_base'] = parsed_frame.sort_values(by='median', ascending=False).reset_index(drop=True)
1130
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1131
 
1132
  with st.expander('Trimming Options'):
1133
  with st.form(key='trim_form'):
 
1159
  st.session_state['settings_base'] = False
1160
  st.write('initiated')
1161
  parsed_frame = st.session_state['working_frame'].copy()
1162
+ if st.session_state['display_frame_source'] == 'Portfolio':
1163
+ parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1164
+ st.session_state['working_frame'] = parsed_frame.sort_values(by='median', ascending=False)
1165
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1166
+ else:
1167
+ parsed_frame = trim_portfolio(parsed_frame, trim_slack_var, performance_type, own_type, performance_threshold_high, performance_threshold_low, own_threshold_high, own_threshold_low)
1168
+ st.session_state['export_base'] = parsed_frame.copy()
1169
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1170
  with st.expander('Presets'):
1171
  st.info("Still heavily in testing here, I'll announce when they are ready for use.")
1172
  with st.form(key='Small Field Preset'):
 
1175
  submitted = st.form_submit_button("Submit")
1176
  if submitted:
1177
  st.session_state['settings_base'] = False
1178
+ if st.session_state['display_frame_source'] == 'Portfolio':
1179
+ if preset_choice == 'Small Field (Heavy Own)':
1180
+ parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1181
+ elif preset_choice == 'Large Field (Manage Diversity)':
1182
+ parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1183
+ elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1184
+ parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1185
+ elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1186
+ parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1187
+ elif preset_choice == 'Reduce Volatility (Manage Own)':
1188
+ parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1189
+
1190
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1191
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1192
+ else:
1193
+ if preset_choice == 'Small Field (Heavy Own)':
1194
+ parsed_frame = small_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1195
+ elif preset_choice == 'Large Field (Manage Diversity)':
1196
+ parsed_frame = large_field_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1197
+ elif preset_choice == 'Volatility (Heavy Lineup Edge)':
1198
+ parsed_frame = volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1199
+ elif preset_choice == 'Hedge Chalk (Manage Leverage)':
1200
+ parsed_frame = hedging_preset(st.session_state['working_frame'], lineup_target, st.session_state['projections_df'], sport_var)
1201
+ elif preset_choice == 'Reduce Volatility (Manage Own)':
1202
+ parsed_frame = reduce_volatility_preset(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var)
1203
+ st.session_state['export_base'] = parsed_frame.copy()
1204
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1205
  with st.expander('Stratify'):
1206
  with st.form(key='Stratification'):
1207
  sorting_choice = st.selectbox("Stat Choice", options=['median', 'Own', 'Weighted Own', 'Geomean', 'Lineup Edge', 'Finish_percentile', 'Diversity'], index=0)
 
1209
  submitted = st.form_submit_button("Submit")
1210
  if submitted:
1211
  st.session_state['settings_base'] = False
1212
+ if st.session_state['display_frame_source'] == 'Portfolio':
1213
+ parsed_frame = stratification_function(st.session_state['working_frame'], lineup_target, excluded_cols, sport_var, sorting_choice)
1214
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1215
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1216
+ else:
1217
+ parsed_frame = stratification_function(st.session_state['export_base'], lineup_target, excluded_cols, sport_var, sorting_choice)
1218
+ st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
1219
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
1220
  with st.expander('Exposure Management'):
1221
  with st.form(key='Exposures'):
1222
  exposure_player = st.selectbox("Player", options=sorted(list(player_names)))
 
1229
  submitted = st.form_submit_button("Submit")
1230
  if submitted:
1231
  st.session_state['settings_base'] = False
1232
+ if st.session_state['display_frame_source'] == 'Portfolio':
1233
+ parsed_frame = exposure_spread(st.session_state['working_frame'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1234
+ st.session_state['working_frame'] = parsed_frame.reset_index(drop=True)
1235
+ if type_var == 'Classic':
1236
+ if sport_var == 'CS2' or sport_var == 'LOL':
1237
+ # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1238
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1239
+ lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1240
+ sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1241
+ axis=1
1242
+ )
1243
+
1244
+ # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1245
+ st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(
1246
+ lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1247
+ sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1248
+ axis=1
1249
+ )
1250
+
1251
+ # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1252
+ st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(
1253
+ lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1254
+ sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1255
+ axis=1
1256
+ )
1257
+
1258
+ elif sport_var != 'CS2' and sport_var != 'LOL':
1259
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1260
+ st.session_state['working_frame']['median'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1261
+ st.session_state['working_frame']['Own'] = st.session_state['working_frame'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1262
+ if 'stack_dict' in st.session_state:
1263
+ st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].index.map(st.session_state['stack_dict'])
1264
+ st.session_state['working_frame']['Size'] = st.session_state['working_frame'].index.map(st.session_state['size_dict'])
1265
+ elif type_var == 'Showdown':
1266
  # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1267
  st.session_state['working_frame']['salary'] = st.session_state['working_frame'].apply(
1268
  lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
 
1283
  sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1284
  axis=1
1285
  )
1286
+ # st.session_state['working_frame']['Own'] = st.session_state['working_frame']['Own'].astype('float32')
1287
+ st.session_state['working_frame']['median'] = st.session_state['working_frame']['median'].astype('float32')
1288
+ st.session_state['working_frame']['salary'] = st.session_state['working_frame']['salary'].astype('uint16')
 
 
 
 
 
 
 
 
 
 
 
 
1289
 
1290
+ print(st.session_state['working_frame'].head(10))
1291
+ st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1292
+ st.session_state['export_merge'] = st.session_state['working_frame'].copy()
1293
+ else:
1294
+ parsed_frame = exposure_spread(st.session_state['export_base'], exposure_player, exposure_target, exposure_stack_bool, remove_teams_exposure, st.session_state['projections_df'], sport_var, type_var, salary_max)
1295
+ st.session_state['export_base'] = parsed_frame.reset_index(drop=True)
1296
+ if type_var == 'Classic':
1297
+ if sport_var == 'CS2' or sport_var == 'LOL':
1298
+ # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1299
+ st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(
1300
+ lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1301
+ sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1302
+ axis=1
1303
+ )
1304
+
1305
+ # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1306
+ st.session_state['export_base']['median'] = st.session_state['export_base'].apply(
1307
+ lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1308
+ sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1309
+ axis=1
1310
+ )
1311
+
1312
+ # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1313
+ st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(
1314
+ lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1315
+ sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1316
+ axis=1
1317
+ )
1318
+
1319
+ elif sport_var != 'CS2' and sport_var != 'LOL':
1320
+ st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row), axis=1)
1321
+ st.session_state['export_base']['median'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row), axis=1)
1322
+ st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(lambda row: sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row), axis=1)
1323
+ if 'stack_dict' in st.session_state:
1324
+ st.session_state['export_base']['Stack'] = st.session_state['export_base'].index.map(st.session_state['stack_dict'])
1325
+ st.session_state['export_base']['Size'] = st.session_state['export_base'].index.map(st.session_state['size_dict'])
1326
+ elif type_var == 'Showdown':
1327
+ # Calculate salary (CPT uses cpt_salary_map, others use salary_map)
1328
+ st.session_state['export_base']['salary'] = st.session_state['export_base'].apply(
1329
+ lambda row: st.session_state['map_dict']['cpt_salary_map'].get(row.iloc[0], 0) +
1330
+ sum(st.session_state['map_dict']['salary_map'].get(player, 0) for player in row.iloc[1:]),
1331
+ axis=1
1332
+ )
1333
+
1334
+ # Calculate median (CPT uses cpt_proj_map, others use proj_map)
1335
+ st.session_state['export_base']['median'] = st.session_state['export_base'].apply(
1336
+ lambda row: st.session_state['map_dict']['cpt_proj_map'].get(row.iloc[0], 0) +
1337
+ sum(st.session_state['map_dict']['proj_map'].get(player, 0) for player in row.iloc[1:]),
1338
+ axis=1
1339
+ )
1340
+
1341
+ # Calculate ownership (CPT uses cpt_own_map, others use own_map)
1342
+ st.session_state['export_base']['Own'] = st.session_state['export_base'].apply(
1343
+ lambda row: st.session_state['map_dict']['cpt_own_map'].get(row.iloc[0], 0) +
1344
+ sum(st.session_state['map_dict']['own_map'].get(player, 0) for player in row.iloc[1:]),
1345
+ axis=1
1346
+ )
1347
+ # st.session_state['export_base']['Own'] = st.session_state['export_base']['Own'].astype('float32')
1348
+ st.session_state['export_base']['median'] = st.session_state['export_base']['median'].astype('float32')
1349
+ st.session_state['export_base']['salary'] = st.session_state['export_base']['salary'].astype('uint16')
1350
 
1351
+ print(st.session_state['export_base'].head(10))
1352
+ st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
1353
+ st.session_state['export_merge'] = st.session_state['export_base'].copy()
 
 
 
 
 
 
1354
 
 
 
 
1355
  with st.container():
1356
  if 'export_base' not in st.session_state:
1357
  st.session_state['export_base'] = pd.DataFrame(columns=st.session_state['working_frame'].columns)