James McCool commited on
Commit
8d876b3
·
1 Parent(s): d94819e

Refactor stacking logic in app.py to streamline team assignment calculations for player lineups. This update removes legacy stacking code and optimizes the application of team data, enhancing performance and maintainability.

Browse files
Files changed (1) hide show
  1. app.py +30 -50
app.py CHANGED
@@ -310,48 +310,6 @@ def calculate_lineup_metrics(df, player_columns, map_dict, type_var, sport_var,
310
  df['median'] = calculate_median_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var)
311
  df['Own'] = calculate_ownership_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var)
312
 
313
- # Handle stacking for specific sports
314
- if projections_df is not None and 'team' in projections_df.columns:
315
- team_dict = dict(zip(projections_df['player_names'], projections_df['team']))
316
-
317
- if type_var == 'Classic' and sport_var not in ['CS2', 'LOL', 'GOLF']:
318
- # Stack calculation for classic sports (excluding first 2 columns for pitchers)
319
- stack_columns = player_columns[2:] if len(player_columns) > 2 else player_columns
320
- df['Stack'] = df[stack_columns].apply(
321
- lambda row: Counter(
322
- team_dict.get(player, '') for player in row
323
- if team_dict.get(player, '') != ''
324
- ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row) else '',
325
- axis=1
326
- )
327
- df['Size'] = df[stack_columns].apply(
328
- lambda row: Counter(
329
- team_dict.get(player, '') for player in row
330
- if team_dict.get(player, '') != ''
331
- ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row) else 0,
332
- axis=1
333
- )
334
- elif sport_var == 'LOL':
335
- # LOL uses all player columns for stacking
336
- df['Stack'] = df[player_columns].apply(
337
- lambda row: Counter(
338
- team_dict.get(player, '') for player in row
339
- if team_dict.get(player, '') != ''
340
- ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row) else '',
341
- axis=1
342
- )
343
- df['Size'] = df[player_columns].apply(
344
- lambda row: Counter(
345
- team_dict.get(player, '') for player in row
346
- if team_dict.get(player, '') != ''
347
- ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row) else 0,
348
- axis=1
349
- )
350
-
351
- # Optimize data types
352
- df['salary'] = df['salary'].astype('uint16')
353
- df['median'] = df['median'].astype('float32')
354
-
355
  return df
356
 
357
  def create_team_filter_mask(df, player_columns, team_map, teams_to_filter, focus_type='Overall', type_var='Classic'):
@@ -615,14 +573,6 @@ if selected_tab == 'Data Load':
615
 
616
  st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all')
617
  st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True)
618
- # Check if Stack column exists in the portfolio
619
- if 'Stack' in st.session_state['portfolio'].columns:
620
- # Create dictionary mapping index to Stack values
621
- stack_dict = dict(zip(st.session_state['portfolio'].index, st.session_state['portfolio']['Stack']))
622
- st.write(f"Found {len(stack_dict)} stack assignments")
623
- st.session_state['portfolio'] = st.session_state['portfolio'].drop(columns=['Stack'])
624
- else:
625
- stack_dict = None
626
  if st.session_state['portfolio'] is not None:
627
 
628
  # Optimize data types early for memory efficiency
@@ -2102,6 +2052,21 @@ if selected_tab == 'Manage Portfolio':
2102
 
2103
  # st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2104
  st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2105
  st.session_state['export_merge'] = st.session_state['working_frame'].copy()
2106
  elif exp_submitted:
2107
  st.session_state['settings_base'] = False
@@ -2124,6 +2089,21 @@ if selected_tab == 'Manage Portfolio':
2124
 
2125
  # st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2126
  st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2127
  st.session_state['export_merge'] = st.session_state['export_base'].copy()
2128
 
2129
  with st.container():
 
310
  df['median'] = calculate_median_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var)
311
  df['Own'] = calculate_ownership_vectorized(df[player_columns], player_columns, map_dict, type_var, sport_var)
312
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
313
  return df
314
 
315
  def create_team_filter_mask(df, player_columns, team_map, teams_to_filter, focus_type='Overall', type_var='Classic'):
 
573
 
574
  st.session_state['portfolio'] = st.session_state['portfolio'].dropna(how='all')
575
  st.session_state['portfolio'] = st.session_state['portfolio'].reset_index(drop=True)
 
 
 
 
 
 
 
 
576
  if st.session_state['portfolio'] is not None:
577
 
578
  # Optimize data types early for memory efficiency
 
2052
 
2053
  # st.session_state['working_frame'] = predict_dupes(st.session_state['working_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2054
  st.session_state['working_frame'] = reassess_edge(st.session_state['working_frame'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
2055
+ team_dict = dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['team']))
2056
+ st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
2057
+ lambda row: Counter(
2058
+ team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]
2059
+ if team_dict.get(player, '') != ''
2060
+ ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '',
2061
+ axis=1
2062
+ )
2063
+ st.session_state['working_frame']['Size'] = st.session_state['working_frame'].apply(
2064
+ lambda row: Counter(
2065
+ team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]
2066
+ if team_dict.get(player, '') != ''
2067
+ ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0,
2068
+ axis=1
2069
+ )
2070
  st.session_state['export_merge'] = st.session_state['working_frame'].copy()
2071
  elif exp_submitted:
2072
  st.session_state['settings_base'] = False
 
2089
 
2090
  # st.session_state['export_base'] = predict_dupes(st.session_state['export_base'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var)
2091
  st.session_state['export_base'] = reassess_edge(st.session_state['export_base'], st.session_state['base_frame'], st.session_state['map_dict'], site_var, type_var, Contest_Size, strength_var, sport_var, salary_max)
2092
+ team_dict = dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['team']))
2093
+ st.session_state['working_frame']['Stack'] = st.session_state['working_frame'].apply(
2094
+ lambda row: Counter(
2095
+ team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]
2096
+ if team_dict.get(player, '') != ''
2097
+ ).most_common(1)[0][0] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else '',
2098
+ axis=1
2099
+ )
2100
+ st.session_state['working_frame']['Size'] = st.session_state['working_frame'].apply(
2101
+ lambda row: Counter(
2102
+ team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]
2103
+ if team_dict.get(player, '') != ''
2104
+ ).most_common(1)[0][1] if any(team_dict.get(player, '') for player in row[stack_column_dict[site_var][type_var][sport_var]]) else 0,
2105
+ axis=1
2106
+ )
2107
  st.session_state['export_merge'] = st.session_state['export_base'].copy()
2108
 
2109
  with st.container():