-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcached_data.py
More file actions
228 lines (182 loc) · 7.74 KB
/
cached_data.py
File metadata and controls
228 lines (182 loc) · 7.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
import cachetools.func
import pandas as pd
from motherduck import con
import polars as pl
CACHE_SECONDS = 600
def get_matches_for_event(event_key:str) -> pd.DataFrame:
all_matches = get_matches()
return all_matches [ all_matches['event_key'] == event_key].sort_values(by=['time'], ascending=[True])
# Example controller to cache queries
# this will only run the query if it needs cache refresh
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_matches() -> pl.DataFrame:
return con.sql("select * from tba.matches").df();
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_rankings() -> pl.DataFrame:
return con.sql("select * from tba.event_rankings").df();
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_defense() -> pl.DataFrame:
return con.sql("""
select pit.team_number, pit.drive_type, GREATEST(height,width) as max_size,
t.all_tags, o.oprs as opr, o.dprs as dpr, o.ccwms as ccwm,
case pit.drive_type
when 'Swerve' then 1
when 'Tank' then 2
when 'Mecanum' then 3
else 999
end as drive_rank
from scouting.pit
INNER JOIN (
select team_number, list(tag) as all_tags
from scouting.tags
where 'Defense' in (select tag from scouting.tags where team_number = pit.team_number)
group by team_number
) as t
on ( t.team_number = pit.team_number)
INNER JOIN tba.oprs as o
on ( t.team_number = o.team_number and pit.team_number = o.team_number )
where o.event_key = '2025schar'
group by pit.team_number, pit.drive_type, pit.height, pit.width, t.all_tags, o.oprs, o.dprs, o.ccwms, drive_rank
order by drive_rank asc, max_size desc, dpr desc;
""").df()
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_team_list(event_key:str) -> list:
df = con.sql(f"""
select red1, red2, red3, blue1, blue2, blue3
from tba.matches
where event_key = '{event_key }'
""").df()
unique_teams = pd.unique(df.values.ravel())
return sorted(unique_teams.tolist())
def get_most_recent_event() -> str:
all_events = get_event_list()
if len(all_events) > 0:
return all_events[0]
else:
return None
def get_event_list() -> pd.DataFrame:
event_df = get_events()
return event_df['event_key'].values.tolist()
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_events() -> pd.DataFrame:
return con.sql("""
select event_key, max(actual_time) from tba.matches
group by event_key
order by max(actual_time) desc;
""").df()
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def _get_tba_oprs_and_ranks() -> pd.DataFrame:
tba_ranks = con.sql("""
select er.team_number, er.event_key,er.wins, er.losses, er.ties,er.rank,er.dq, op.oprs as opr, op.ccwms as ccwm, op.dprs as dpr
from frc_2025.tba.event_rankings er
join frc_2025.tba.oprs op on er.team_number = op.team_number and er.event_key = op.event_key
order by er.rank asc;
""").df()
return tba_ranks
def get_tba_oprs_and_ranks_for_event(event_key:str) -> pd.DataFrame:
r = _get_tba_oprs_and_ranks()
r = r[ r['event_key'] == event_key]
return r
def get_oprs_and_ranks_for_event(event_key:str) -> pd.DataFrame:
all_ranks = _get_tba_oprs_and_ranks()
all_ranks_this_event = all_ranks[all_ranks['event_key'] == event_key]
rank_summary_this_event = get_ranking_point_summary_for_event(event_key)
return all_ranks_this_event.merge(rank_summary_this_event,on='team_number',how='inner')
def get_oprs_and_ranks_for_team(event_key:str, team_number: int) -> dict:
all_ranks = get_oprs_and_ranks_for_event(event_key)
filtered_for_team = all_ranks[ all_ranks['team_number'] == team_number]
r = filtered_for_team.to_dict(orient='records')
return r[0] if len(r) > 0 else {}
def get_robot_specific_data_from_matches( event_key:str) -> pd.DataFrame:
# gets values out of the matches where we essentially DO have a value
# per robot, per match
d = []
team_list = get_team_list(event_key)
matches = get_matches_for_event(event_key)
def _get_robot_specific_value(row, team_number: int, prefix: str, index: i )-> list:
team_col = f"{prefix}{index}"
suffix = f"robot{index}"
if team_number in [row[team_col]]:
return [{
'team_number': row[team_number],
'auto_line': row[f"red_auto_line_{suffix}"],
'end_game': row[f"red_end_game_{suffix}"]
}]
else:
return []
for t in team_list:
for _, row in matches.iterrows():
d.extend(_get_robot_specific_value(row, t, 'red', 1 ))
d.extend(_get_robot_specific_value(row, t, 'red', 2))
d.extend(_get_robot_specific_value(row, t, 'red', 3))
d.extend(_get_robot_specific_value(row, t, 'blue', 1 ))
d.extend(_get_robot_specific_value(row, t, 'blue', 2))
d.extend(_get_robot_specific_value(row, t, 'blue', 3))
return pd.DataFrame(d)
@cachetools.func.ttl_cache(maxsize=128, ttl=CACHE_SECONDS)
def get_ranking_point_summary_for_event(event_key:str) -> pd.DataFrame:
"""
This computes a summary of how many RPs each team has, and how they got them
:param event_key:
:return:
"""
team_data = {}
def get_team_summary(team_number:int):
if team_number not in team_data:
team_data[team_number] = {
'team_number': team_number,
'total_rp': 0,
'auto_rp': 0,
'win_rp': 0,
'coral_rp': 0,
'barge_rp': 0,
'total_rp_sum': 0,
'match_count': 0
}
return team_data[team_number]
#there is probably a faster way to do this that's vectorized but
#i dont want to figure it out right now
matches = get_matches_for_event(event_key)
matches = matches [ matches['comp_level'] == 'qm'] #only consider qualifiers for rankings
def _add_team_rps_with_prefix(prefix:str,row):
if prefix == "red":
anti_prefix = "blue"
else:
anti_prefix = "red"
for col in [f"{prefix}1", f"{prefix}2", f"{prefix}3"]:
team_number = row[col]
td = get_team_summary(team_number)
# blue alliance calculated rp
td['total_rp'] += row[f"{prefix}_rp"]
td['match_count'] += 1
# 3 for win, 1 for a tie
our_score=row[f"{prefix}_score"]
their_score=row[f"{anti_prefix}_score"]
if our_score > their_score:
td['win_rp'] += 3
elif our_score == their_score:
td['win_rp'] += 1
if row[f'{prefix}_auto_bonus_achieved'] == 1:
td['auto_rp'] += 1
if row[f'{prefix}_coral_bonus_achieved'] == 1:
td['coral_rp'] += 1
if row[f'{prefix}_barge_bonus_achieved'] == 1:
td['barge_rp'] += 1
for _,row in matches.iterrows():
_add_team_rps_with_prefix('red',row)
_add_team_rps_with_prefix('blue', row)
r = pd.DataFrame(team_data.values())
r['avg_rp'] = r['total_rp'] / r['match_count']
r['avg_win_rp'] = r['win_rp'] / r['match_count']
r['avg_auto_rp'] = r['auto_rp'] / r['match_count']
r['avg_coral_rp'] = r['coral_rp'] / r['match_count']
r['avg_barge_rp'] = r['barge_rp'] / r['match_count']
return r
def clear_caches():
get_ranking_point_summary_for_event.cache_clear()
_get_tba_oprs_and_ranks.cache_clear()
get_matches.cache_clear()
get_defense.cache_clear()
get_rankings.cache_clear()
get_team_list.cache_clear()
get_events.cache_clear()