James McCool
commited on
Commit
·
59dc088
1
Parent(s):
baabd98
Refactor game type handling and player filtering in app.py
Browse files- Updated the logic for calculating metrics based on game type to utilize a working dataframe instead of directly modifying the session state 'Contest' dataframe, improving code clarity and maintainability.
- Enhanced the player filtering process by integrating it into the game type selection, allowing for more dynamic user interactions and better data management.
- This change streamlines the overall data processing workflow, ensuring accurate calculations and improved user experience.
app.py
CHANGED
|
@@ -101,98 +101,89 @@ with tab2:
|
|
| 101 |
'cpt_proj_map': dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['median'] * 1.5)),
|
| 102 |
'cpt_own_map': dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['captain ownership']))
|
| 103 |
}
|
| 104 |
-
|
| 105 |
-
st.session_state['
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 106 |
|
| 107 |
# Calculate metrics based on game type
|
| 108 |
-
if
|
| 109 |
-
|
| 110 |
lambda row: Counter(
|
| 111 |
map_dict['team_map'].get(player, '') for player in row[4:]
|
| 112 |
if map_dict['team_map'].get(player, '') != ''
|
| 113 |
).most_common(1)[0][0] if any(map_dict['team_map'].get(player, '') for player in row[4:]) else '',
|
| 114 |
axis=1
|
| 115 |
)
|
| 116 |
-
|
| 117 |
lambda row: Counter(
|
| 118 |
map_dict['team_map'].get(player, '') for player in row[4:]
|
| 119 |
if map_dict['team_map'].get(player, '') != ''
|
| 120 |
).most_common(1)[0][1] if any(map_dict['team_map'].get(player, '') for player in row[4:]) else '',
|
| 121 |
axis=1
|
| 122 |
)
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
lambda row: ','.join(sorted(row.values)),
|
| 130 |
axis=1
|
| 131 |
)
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
elif
|
| 135 |
-
|
| 136 |
lambda row: Counter(
|
| 137 |
map_dict['team_map'].get(player, '') for player in row
|
| 138 |
if map_dict['team_map'].get(player, '') != ''
|
| 139 |
).most_common(1)[0][0] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
| 140 |
axis=1
|
| 141 |
)
|
| 142 |
-
|
| 143 |
lambda row: Counter(
|
| 144 |
map_dict['team_map'].get(player, '') for player in row
|
| 145 |
if map_dict['team_map'].get(player, '') != ''
|
| 146 |
).most_common(1)[0][1] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
| 147 |
axis=1
|
| 148 |
)
|
| 149 |
-
|
| 150 |
lambda row: map_dict['cpt_salary_map'].get(row.iloc[0], 0) +
|
| 151 |
sum(map_dict['salary_map'].get(player, 0) for player in row.iloc[1:]),
|
| 152 |
axis=1
|
| 153 |
)
|
| 154 |
-
|
| 155 |
lambda row: map_dict['cpt_proj_map'].get(row.iloc[0], 0) +
|
| 156 |
sum(map_dict['proj_map'].get(player, 0) for player in row.iloc[1:]),
|
| 157 |
axis=1
|
| 158 |
)
|
| 159 |
-
|
| 160 |
lambda row: map_dict['cpt_own_map'].get(row.iloc[0], 0) +
|
| 161 |
sum(map_dict['own_map'].get(player, 0) for player in row.iloc[1:]),
|
| 162 |
axis=1
|
| 163 |
)
|
| 164 |
-
|
| 165 |
lambda row: row[0] + '|' + ','.join(sorted(row[1:].values)),
|
| 166 |
axis=1
|
| 167 |
)
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
# Create a copy of the dataframe for calculations
|
| 172 |
-
for col in player_columns:
|
| 173 |
-
contest_players = st.session_state['Contest'].copy()
|
| 174 |
-
players_1per = st.session_state['Contest'].head(int(len(st.session_state['Contest']) * 0.01))
|
| 175 |
-
players_5per = st.session_state['Contest'].head(int(len(st.session_state['Contest']) * 0.05))
|
| 176 |
-
players_10per = st.session_state['Contest'].head(int(len(st.session_state['Contest']) * 0.10))
|
| 177 |
-
players_20per = st.session_state['Contest'].head(int(len(st.session_state['Contest']) * 0.20))
|
| 178 |
-
working_df = st.session_state['Contest'].copy()
|
| 179 |
-
|
| 180 |
-
with col1:
|
| 181 |
-
with st.expander("Info and filters"):
|
| 182 |
-
if st.button('Clear data', key='reset3'):
|
| 183 |
-
st.session_state.clear()
|
| 184 |
-
with st.form(key='filter_form'):
|
| 185 |
-
st.session_state['type_var'] = st.selectbox("Select Game Type", ['Classic', 'Showdown'])
|
| 186 |
-
entry_parse_var = st.selectbox("Do you want to view a specific player(s) or a group of players?", ['All', 'Specific'])
|
| 187 |
-
entry_names = st.multiselect("Select players", options=st.session_state['entry_list'], default=[])
|
| 188 |
-
submitted = st.form_submit_button("Submit")
|
| 189 |
-
if submitted:
|
| 190 |
-
if 'player_frame' in st.session_state:
|
| 191 |
-
del st.session_state['player_frame']
|
| 192 |
-
del st.session_state['stack_frame']
|
| 193 |
-
# Apply entry name filter if specific entries are selected
|
| 194 |
-
if entry_parse_var == 'Specific' and entry_names:
|
| 195 |
-
working_df = working_df[working_df['BaseName'].isin(entry_names)]
|
| 196 |
|
| 197 |
# Initialize pagination in session state if not exists
|
| 198 |
if 'current_page' not in st.session_state:
|
|
@@ -235,6 +226,17 @@ with tab2:
|
|
| 235 |
hide_index=True
|
| 236 |
)
|
| 237 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 238 |
player_counts = pd.Series(list(contest_players[player_columns].values.flatten())).value_counts()
|
| 239 |
player_1per_counts = pd.Series(list(players_1per[player_columns].values.flatten())).value_counts()
|
| 240 |
player_5per_counts = pd.Series(list(players_5per[player_columns].values.flatten())).value_counts()
|
|
@@ -252,13 +254,15 @@ with tab2:
|
|
| 252 |
dupe_20per_counts = pd.Series(list(players_20per['dupes'])).value_counts()
|
| 253 |
each_set_name = ['Overall', ' Top 1%', ' Top 5%', 'Top 10%', 'Top 20%']
|
| 254 |
each_frame_set = [contest_players, players_1per, players_5per, players_10per, players_20per]
|
|
|
|
| 255 |
with st.container():
|
| 256 |
tab1, tab2, tab3 = st.tabs(['Player Used Info', 'Stack Used Info', 'Duplication Info'])
|
| 257 |
with tab1:
|
| 258 |
player_count_var = 0
|
| 259 |
for each_set in [player_counts, player_1per_counts, player_5per_counts, player_10per_counts, player20_per_counts]:
|
| 260 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Player', 'count': 'Count'})
|
| 261 |
-
|
|
|
|
| 262 |
set_frame = set_frame[['Player', 'Percent']]
|
| 263 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[player_count_var]}'})
|
| 264 |
if 'player_frame' not in st.session_state:
|
|
@@ -275,7 +279,8 @@ with tab2:
|
|
| 275 |
stack_count_var = 0
|
| 276 |
for each_set in [stack_counts, stack_1per_counts, stack_5per_counts, stack_10per_counts, stack_20per_counts]:
|
| 277 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Stack', 'count': 'Count'})
|
| 278 |
-
|
|
|
|
| 279 |
set_frame = set_frame[['Stack', 'Percent']]
|
| 280 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[stack_count_var]}'})
|
| 281 |
if 'stack_frame' not in st.session_state:
|
|
@@ -292,7 +297,7 @@ with tab2:
|
|
| 292 |
dupe_count_var = 0
|
| 293 |
for each_set in [dupe_counts, dupe_1per_counts, dupe_5per_counts, dupe_10per_counts, dupe_20per_counts]:
|
| 294 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Dupes', 'count': 'Count'})
|
| 295 |
-
set_frame['Percent'] = set_frame['Count'] /
|
| 296 |
set_frame = set_frame[['Dupes', 'Percent']]
|
| 297 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[dupe_count_var]}'})
|
| 298 |
if 'dupe_frame' not in st.session_state:
|
|
|
|
| 101 |
'cpt_proj_map': dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['median'] * 1.5)),
|
| 102 |
'cpt_own_map': dict(zip(st.session_state['projections_df']['player_names'], st.session_state['projections_df']['captain ownership']))
|
| 103 |
}
|
| 104 |
+
# Create a copy of the dataframe for calculations
|
| 105 |
+
working_df = st.session_state['Contest'].copy()
|
| 106 |
+
|
| 107 |
+
with col1:
|
| 108 |
+
with st.expander("Info and filters"):
|
| 109 |
+
if st.button('Clear data', key='reset3'):
|
| 110 |
+
st.session_state.clear()
|
| 111 |
+
with st.form(key='filter_form'):
|
| 112 |
+
type_var = st.selectbox("Select Game Type", ['Classic', 'Showdown'])
|
| 113 |
+
entry_parse_var = st.selectbox("Do you want to view a specific player(s) or a group of players?", ['All', 'Specific'])
|
| 114 |
+
entry_names = st.multiselect("Select players", options=st.session_state['entry_list'], default=[])
|
| 115 |
+
submitted = st.form_submit_button("Submit")
|
| 116 |
+
if submitted:
|
| 117 |
+
if 'player_frame' in st.session_state:
|
| 118 |
+
del st.session_state['player_frame']
|
| 119 |
+
del st.session_state['stack_frame']
|
| 120 |
+
# Apply entry name filter if specific entries are selected
|
| 121 |
+
if entry_parse_var == 'Specific' and entry_names:
|
| 122 |
+
working_df = working_df[working_df['BaseName'].isin(entry_names)]
|
| 123 |
|
| 124 |
# Calculate metrics based on game type
|
| 125 |
+
if type_var == 'Classic':
|
| 126 |
+
working_df['stack'] = working_df.apply(
|
| 127 |
lambda row: Counter(
|
| 128 |
map_dict['team_map'].get(player, '') for player in row[4:]
|
| 129 |
if map_dict['team_map'].get(player, '') != ''
|
| 130 |
).most_common(1)[0][0] if any(map_dict['team_map'].get(player, '') for player in row[4:]) else '',
|
| 131 |
axis=1
|
| 132 |
)
|
| 133 |
+
working_df['stack_size'] = working_df.apply(
|
| 134 |
lambda row: Counter(
|
| 135 |
map_dict['team_map'].get(player, '') for player in row[4:]
|
| 136 |
if map_dict['team_map'].get(player, '') != ''
|
| 137 |
).most_common(1)[0][1] if any(map_dict['team_map'].get(player, '') for player in row[4:]) else '',
|
| 138 |
axis=1
|
| 139 |
)
|
| 140 |
+
working_df['salary'] = working_df.apply(lambda row: sum(map_dict['salary_map'].get(player, 0) for player in row), axis=1)
|
| 141 |
+
working_df['median'] = working_df.apply(lambda row: sum(map_dict['proj_map'].get(player, 0) for player in row), axis=1)
|
| 142 |
+
working_df['actual_fpts'] = working_df.apply(lambda row: sum(st.session_state['actual_dict'].get(player, 0) for player in row), axis=1)
|
| 143 |
+
working_df['Own'] = working_df.apply(lambda row: sum(map_dict['own_map'].get(player, 0) for player in row), axis=1)
|
| 144 |
+
working_df['actual_own'] = working_df.apply(lambda row: sum(st.session_state['ownership_dict'].get(player, 0) for player in row), axis=1)
|
| 145 |
+
working_df['sorted'] = working_df[player_columns].apply(
|
| 146 |
lambda row: ','.join(sorted(row.values)),
|
| 147 |
axis=1
|
| 148 |
)
|
| 149 |
+
working_df['dupes'] = working_df.groupby('sorted').transform('size')
|
| 150 |
+
working_df = working_df.drop('sorted', axis=1)
|
| 151 |
+
elif type_var == 'Showdown':
|
| 152 |
+
working_df['stack'] = working_df.apply(
|
| 153 |
lambda row: Counter(
|
| 154 |
map_dict['team_map'].get(player, '') for player in row
|
| 155 |
if map_dict['team_map'].get(player, '') != ''
|
| 156 |
).most_common(1)[0][0] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
| 157 |
axis=1
|
| 158 |
)
|
| 159 |
+
working_df['stack_size'] = working_df.apply(
|
| 160 |
lambda row: Counter(
|
| 161 |
map_dict['team_map'].get(player, '') for player in row
|
| 162 |
if map_dict['team_map'].get(player, '') != ''
|
| 163 |
).most_common(1)[0][1] if any(map_dict['team_map'].get(player, '') for player in row) else '',
|
| 164 |
axis=1
|
| 165 |
)
|
| 166 |
+
working_df['salary'] = working_df.apply(
|
| 167 |
lambda row: map_dict['cpt_salary_map'].get(row.iloc[0], 0) +
|
| 168 |
sum(map_dict['salary_map'].get(player, 0) for player in row.iloc[1:]),
|
| 169 |
axis=1
|
| 170 |
)
|
| 171 |
+
working_df['median'] = working_df.apply(
|
| 172 |
lambda row: map_dict['cpt_proj_map'].get(row.iloc[0], 0) +
|
| 173 |
sum(map_dict['proj_map'].get(player, 0) for player in row.iloc[1:]),
|
| 174 |
axis=1
|
| 175 |
)
|
| 176 |
+
working_df['Own'] = working_df.apply(
|
| 177 |
lambda row: map_dict['cpt_own_map'].get(row.iloc[0], 0) +
|
| 178 |
sum(map_dict['own_map'].get(player, 0) for player in row.iloc[1:]),
|
| 179 |
axis=1
|
| 180 |
)
|
| 181 |
+
working_df['sorted'] = working_df[player_columns].apply(
|
| 182 |
lambda row: row[0] + '|' + ','.join(sorted(row[1:].values)),
|
| 183 |
axis=1
|
| 184 |
)
|
| 185 |
+
working_df['dupes'] = working_df.groupby('sorted').transform('size')
|
| 186 |
+
working_df = working_df.drop('sorted', axis=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 187 |
|
| 188 |
# Initialize pagination in session state if not exists
|
| 189 |
if 'current_page' not in st.session_state:
|
|
|
|
| 226 |
hide_index=True
|
| 227 |
)
|
| 228 |
|
| 229 |
+
for col in player_columns:
|
| 230 |
+
contest_players = working_df.copy()
|
| 231 |
+
players_1per = working_df.head(int(len(working_df) * 0.01))
|
| 232 |
+
players_5per = working_df.head(int(len(working_df) * 0.05))
|
| 233 |
+
players_10per = working_df.head(int(len(working_df) * 0.10))
|
| 234 |
+
players_20per = working_df.head(int(len(working_df) * 0.20))
|
| 235 |
+
contest_len = len(st.session_state['Contest'])
|
| 236 |
+
len_1per = len(st.session_state['Contest']).head(int(len(working_df) * 0.01))
|
| 237 |
+
len_5per = len(st.session_state['Contest']).head(int(len(working_df) * 0.05))
|
| 238 |
+
len_10per = len(st.session_state['Contest']).head(int(len(working_df) * 0.10))
|
| 239 |
+
len_20per = len(st.session_state['Contest']).head(int(len(working_df) * 0.20))
|
| 240 |
player_counts = pd.Series(list(contest_players[player_columns].values.flatten())).value_counts()
|
| 241 |
player_1per_counts = pd.Series(list(players_1per[player_columns].values.flatten())).value_counts()
|
| 242 |
player_5per_counts = pd.Series(list(players_5per[player_columns].values.flatten())).value_counts()
|
|
|
|
| 254 |
dupe_20per_counts = pd.Series(list(players_20per['dupes'])).value_counts()
|
| 255 |
each_set_name = ['Overall', ' Top 1%', ' Top 5%', 'Top 10%', 'Top 20%']
|
| 256 |
each_frame_set = [contest_players, players_1per, players_5per, players_10per, players_20per]
|
| 257 |
+
each_len_set = [contest_len, len_1per, len_5per, len_10per, len_20per]
|
| 258 |
with st.container():
|
| 259 |
tab1, tab2, tab3 = st.tabs(['Player Used Info', 'Stack Used Info', 'Duplication Info'])
|
| 260 |
with tab1:
|
| 261 |
player_count_var = 0
|
| 262 |
for each_set in [player_counts, player_1per_counts, player_5per_counts, player_10per_counts, player20_per_counts]:
|
| 263 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Player', 'count': 'Count'})
|
| 264 |
+
st.write(len(each_frame_set[player_count_var]))
|
| 265 |
+
set_frame['Percent'] = set_frame['Count'] / each_len_set[player_count_var]
|
| 266 |
set_frame = set_frame[['Player', 'Percent']]
|
| 267 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[player_count_var]}'})
|
| 268 |
if 'player_frame' not in st.session_state:
|
|
|
|
| 279 |
stack_count_var = 0
|
| 280 |
for each_set in [stack_counts, stack_1per_counts, stack_5per_counts, stack_10per_counts, stack_20per_counts]:
|
| 281 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Stack', 'count': 'Count'})
|
| 282 |
+
st.write(len(each_frame_set[stack_count_var]))
|
| 283 |
+
set_frame['Percent'] = set_frame['Count'] / each_len_set[stack_count_var]
|
| 284 |
set_frame = set_frame[['Stack', 'Percent']]
|
| 285 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[stack_count_var]}'})
|
| 286 |
if 'stack_frame' not in st.session_state:
|
|
|
|
| 297 |
dupe_count_var = 0
|
| 298 |
for each_set in [dupe_counts, dupe_1per_counts, dupe_5per_counts, dupe_10per_counts, dupe_20per_counts]:
|
| 299 |
set_frame = each_set.to_frame().reset_index().rename(columns={'index': 'Dupes', 'count': 'Count'})
|
| 300 |
+
set_frame['Percent'] = set_frame['Count'] / each_len_set[dupe_count_var]
|
| 301 |
set_frame = set_frame[['Dupes', 'Percent']]
|
| 302 |
set_frame = set_frame.rename(columns={'Percent': f'Exposure {each_set_name[dupe_count_var]}'})
|
| 303 |
if 'dupe_frame' not in st.session_state:
|