Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 22 additions & 6 deletions src/nba_api/library/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,21 @@ def __init__(self, response, status_code, url):
self._response = response
self._status_code = status_code
self._url = url
self._dict_cache = None
self._json_cache = None

def get_response(self):
return self._response

def get_dict(self):
return json.loads(self._response)
if self._dict_cache is None:
self._dict_cache = json.loads(self._response)
return self._dict_cache

def get_json(self):
return json.dumps(self.get_dict())
if self._json_cache is None:
self._json_cache = json.dumps(self.get_dict())
return self._json_cache

def valid_json(self):
try:
Expand All @@ -54,6 +60,9 @@ def valid_json(self):
def get_url(self):
return self._url

def get_status_code(self):
return self._status_code


class NBAHTTP:
nba_response = NBAResponse
Expand Down Expand Up @@ -126,8 +135,8 @@ def send_api_request(
contents = None
file_path = None

# Sort parameters by key... for some reason this matters for some requests...
parameters = sorted(parameters.items(), key=lambda kv: kv[0])
# tuples are faster to handle and iterate
parameters = tuple(sorted(parameters.items(), key=lambda kv: kv[0]))

if DEBUG and DEBUG_STORAGE:
print(endpoint, parameters)
Expand Down Expand Up @@ -173,7 +182,14 @@ def send_api_request(

data = self.nba_response(response=contents, status_code=status_code, url=url)

if raise_exception_on_error and not data.valid_json():
raise Exception("InvalidResponse: Response is not in a valid JSON format.")
if raise_exception_on_error:
if status_code is not None and status_code >= 400:
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why the additional breakout of the exception?

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The code now checks the HTTP status code first (any >= 400) raising an Exception with the status code, before
validating JSON. This ensures HTTP errors are properly caught.

raise Exception(
f"HTTPError: Request failed with status code {status_code}."
)
if not data.valid_json():
raise Exception(
"InvalidResponse: Response is not in a valid JSON format."
)

return data
4 changes: 3 additions & 1 deletion src/nba_api/live/nba/endpoints/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
class Endpoint:
class DataSet:
key = None
data = {}

def __init__(self, data=None):
if data is None:
Expand All @@ -23,6 +22,9 @@ def get_request_url(self):
def get_response(self):
return self.nba_response.get_response()

def get_status_code(self):
return self.nba_response.get_status_code()

def get_dict(self):
return self.nba_response.get_dict()

Expand Down
5 changes: 4 additions & 1 deletion src/nba_api/stats/endpoints/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
class Endpoint:
class DataSet:
key: str | None = None
data: dict[str, Any] = {}

def __init__(self, data: dict[str, Any]) -> None:
self.data = data
Expand Down Expand Up @@ -88,6 +87,10 @@ def get_response(self) -> str:
"""Return the raw response string."""
return self.nba_response.get_response()

def get_status_code(self) -> int:
"""Return the HTTP status code of the response."""
return self.nba_response.get_status_code()

def get_dict(self) -> dict[str, Any]:
"""Return the response as a dictionary."""
return self.nba_response.get_dict()
Expand Down
37 changes: 22 additions & 15 deletions src/nba_api/stats/library/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,26 +29,29 @@ class NBAStatsResponse(http.NBAResponse):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._endpoint = None
self._normalized_dict_cache = None

@staticmethod
def _build_rows(headers, row_set):
return [dict(zip(headers, raw_row, strict=False)) for raw_row in row_set]

def get_normalized_dict(self):
if self._normalized_dict_cache is not None:
return self._normalized_dict_cache

raw_data = self.get_dict()

data = {}

legacy_headers = ["resultSets", "resultSet"]
is_legacy = set(legacy_headers) & set(raw_data.keys())
legacy_headers = {"resultSets", "resultSet"}
raw_keys = raw_data.keys()
is_legacy = bool(legacy_headers & raw_keys)

if is_legacy:
if "resultSets" in raw_data:
results = raw_data["resultSets"]
if "Meta" in results:
return results
else:
results = raw_data["resultSet"]
results = raw_data.get("resultSets") or raw_data.get("resultSet")
if results and "Meta" in results:
self._normalized_dict_cache = results
return results
if isinstance(results, dict):
results = [results]
for result in results:
Expand All @@ -61,27 +64,31 @@ def get_normalized_dict(self):
endpoint_parser = get_parser_for_endpoint(self._endpoint, raw_data)
for name, dataset in endpoint_parser.get_data_sets().items():
data[name] = self._build_rows(dataset["headers"], dataset["data"])
except (KeyError, ImportError):
Copy link
Copy Markdown
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why remove the import error here? To be honest, I'm not sure why it's even catching it here in the first place

Copy link
Copy Markdown
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the import of get_parser_for_endpoint fails, that's a critical issue that should raise an exception, not silently
continue with an empty result

except KeyError:
pass

self._normalized_dict_cache = data
return data

def get_normalized_json(self):
if self._normalized_dict_cache is not None:
return json.dumps(self._normalized_dict_cache)
return json.dumps(self.get_normalized_dict())

def get_parameters(self):
if not self.valid_json() or "parameters" not in self.get_dict():
raw = self.get_dict() if self.valid_json() else None
if raw is None or "parameters" not in raw:
return None

parameters = self.get_dict()["parameters"]
parameters = raw["parameters"]
if isinstance(parameters, dict):
return parameters

parameters = {}
for parameter in self.get_dict()["parameters"]:
result = {}
for parameter in parameters:
for key, value in parameter.items():
parameters.update({key: value})
return parameters
result[key] = value
return result

def get_headers_from_data_sets(self):
raw_dict = self.get_dict()
Expand Down
138 changes: 89 additions & 49 deletions src/nba_api/stats/static/players.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import functools
import re
import unicodedata

Expand All @@ -11,17 +12,22 @@
wnba_players,
)

# Pre-built index for O(1) ID lookup
_players_by_id = {p[player_index_id]: p for p in players}
_wnba_players_by_id = {p[player_index_id]: p for p in wnba_players}

def _find_players(regex_pattern, row_id, players=players):
players_found = []
for player in players:
if re.search(
_strip_accents(regex_pattern),
_strip_accents(str(player[row_id])),
flags=re.I,
):
players_found.append(_get_player_dict(player))
return players_found
# Pre-computed cached lists
_cached_players = None
_cached_active_players = None
_cached_inactive_players = None
_cached_wnba_players = None
_cached_wnba_active_players = None
_cached_wnba_inactive_players = None


@functools.lru_cache(maxsize=128)
def _compile_regex(pattern):
return re.compile(_strip_accents(pattern), flags=re.I)


def _strip_accents(inputstr: str) -> str:
Expand All @@ -36,38 +42,72 @@ def _strip_accents(inputstr: str) -> str:
)


def _find_player_by_id(player_id, players=players):
regex_pattern = f"^{player_id}$"
players_list = _find_players(regex_pattern, player_index_id, players=players)
if len(players_list) > 1:
raise Exception("Found more than 1 id")
elif not players_list:
return None
else:
return players_list[0]


def _get_players(players=players):
players_list = []
for player in players:
players_list.append(_get_player_dict(player))
return players_list


def _get_active_players(players=players):
players_list = []
for player in players:
if player[player_index_is_active]:
players_list.append(_get_player_dict(player))
return players_list


def _get_inactive_players(players=players):
players_list = []
for player in players:
if not player[player_index_is_active]:
players_list.append(_get_player_dict(player))
return players_list
def _find_players(regex_pattern, row_id, players=players):
compiled = _compile_regex(regex_pattern)
return [
_get_player_dict(player)
for player in players
if compiled.search(_strip_accents(str(player[row_id])))
]


def _find_player_by_id(player_id, _index=_players_by_id):
player = _index.get(player_id)
return _get_player_dict(player) if player is not None else None


def _get_players(players=players, _cache=False):
global _cached_players, _cached_wnba_players
if _cache:
if players is wnba_players:
if _cached_wnba_players is None:
_cached_wnba_players = [_get_player_dict(p) for p in players]
return _cached_wnba_players
else:
if _cached_players is None:
_cached_players = [_get_player_dict(p) for p in players]
return _cached_players
return [_get_player_dict(p) for p in players]


def _get_active_players(players=players, _cache=False):
global _cached_active_players, _cached_wnba_active_players
if _cache:
if players is wnba_players:
if _cached_wnba_active_players is None:
_cached_wnba_active_players = [
_get_player_dict(p) for p in players if p[player_index_is_active]
]
return _cached_wnba_active_players
else:
if _cached_active_players is None:
_cached_active_players = [
_get_player_dict(p) for p in players if p[player_index_is_active]
]
return _cached_active_players
return [_get_player_dict(p) for p in players if p[player_index_is_active]]


def _get_inactive_players(players=players, _cache=False):
global _cached_inactive_players, _cached_wnba_inactive_players
if _cache:
if players is wnba_players:
if _cached_wnba_inactive_players is None:
_cached_wnba_inactive_players = [
_get_player_dict(p)
for p in players
if not p[player_index_is_active]
]
return _cached_wnba_inactive_players
else:
if _cached_inactive_players is None:
_cached_inactive_players = [
_get_player_dict(p)
for p in players
if not p[player_index_is_active]
]
return _cached_inactive_players
return [_get_player_dict(p) for p in players if not p[player_index_is_active]]


def _get_player_dict(player_row):
Expand Down Expand Up @@ -97,15 +137,15 @@ def find_player_by_id(player_id):


def get_players():
return _get_players()
return _get_players(_cache=True)


def get_active_players():
return _get_active_players()
return _get_active_players(_cache=True)


def get_inactive_players():
return _get_inactive_players()
return _get_inactive_players(_cache=True)


def find_wnba_players_by_full_name(regex_pattern):
Expand All @@ -121,16 +161,16 @@ def find_wnba_players_by_last_name(regex_pattern):


def find_wnba_player_by_id(player_id):
return _find_player_by_id(player_id, players=wnba_players)
return _find_player_by_id(player_id, _index=_wnba_players_by_id)


def get_wnba_players():
return _get_players(players=wnba_players)
return _get_players(players=wnba_players, _cache=True)


def get_wnba_active_players():
return _get_active_players(players=wnba_players)
return _get_active_players(players=wnba_players, _cache=True)


def get_wnba_inactive_players():
return _get_inactive_players(players=wnba_players)
return _get_inactive_players(players=wnba_players, _cache=True)
Loading