From b12c884faef0bba241651bda9c9e8e5d4552a268 Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 11:59:16 +1300 Subject: [PATCH 1/6] Modernise to Py310 --- .pre-commit-config.yaml | 9 +- pyproject.toml | 2 +- requirements.txt | 137 +-- ruff.toml | 15 +- src/hdx/api/configuration.py | 78 +- src/hdx/api/locations.py | 55 +- src/hdx/api/utilities/dataset_title_helper.py | 31 +- src/hdx/api/utilities/date_helper.py | 49 +- src/hdx/api/utilities/filestore_helper.py | 29 +- src/hdx/api/utilities/hdx_error_handler.py | 83 +- src/hdx/api/utilities/hdx_state.py | 20 +- src/hdx/data/dataset.py | 930 +++++++++--------- src/hdx/data/hdxobject.py | 283 +++--- src/hdx/data/organization.py | 92 +- src/hdx/data/resource.py | 209 ++-- src/hdx/data/resource_matcher.py | 27 +- src/hdx/data/resource_view.py | 51 +- src/hdx/data/showcase.py | 131 +-- src/hdx/data/user.py | 158 +-- src/hdx/data/vocabulary.py | 221 +++-- src/hdx/facades/infer_arguments.py | 15 +- src/hdx/facades/keyword_arguments.py | 10 +- src/hdx/facades/simple.py | 10 +- tests/hdx/api/test_ckan.py | 4 +- tests/hdx/api/test_configuration.py | 4 +- tests/hdx/api/test_locations.py | 3 +- .../api/utilities/test_hdx_error_handler.py | 2 +- tests/hdx/api/utilities/test_hdx_state.py | 7 +- tests/hdx/data/test_dataset_add_hapi_error.py | 3 +- tests/hdx/data/test_dataset_core.py | 19 +- tests/hdx/data/test_dataset_noncore.py | 19 +- .../data/test_dataset_resource_generation.py | 6 +- tests/hdx/data/test_organization.py | 9 +- tests/hdx/data/test_resource.py | 15 +- tests/hdx/data/test_resource_view.py | 5 +- tests/hdx/data/test_showcase.py | 9 +- .../hdx/data/test_update_dataset_resources.py | 4 +- tests/hdx/data/test_update_logic.py | 8 +- tests/hdx/data/test_user.py | 7 +- tests/hdx/data/test_vocabulary.py | 5 +- tests/hdx/facades/__init__.py | 8 +- tests/hdx/facades/test_infer_arguments.py | 5 +- tests/hdx/facades/test_keyword_arguments.py | 5 +- tests/hdx/facades/test_simple.py | 5 +- workingexample/run.py | 3 +- 45 files changed, 1341 insertions(+), 1459 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cb45cfde..f6310146 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,13 +2,13 @@ default_language_version: python: python3.13 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-ast - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.12.0 + rev: v0.14.10 hooks: # Run the linter. - id: ruff-check @@ -16,10 +16,11 @@ repos: # Run the formatter. - id: ruff-format - repo: https://github.com/astral-sh/uv-pre-commit - rev: 0.7.14 + rev: 0.9.22 hooks: # Run the pip compile - id: pip-compile name: pip-compile requirements.txt files: pyproject.toml - args: [ pyproject.toml, --resolver=backtracking, --all-extras, --upgrade, -q, -o, requirements.txt ] + args: [ pyproject.toml, --resolver=backtracking, --upgrade, -q, + -o, requirements.txt ] diff --git a/pyproject.toml b/pyproject.toml index 7729933b..b2fcb5ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ classifiers = [ "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", ] -requires-python = ">=3.8" +requires-python = ">=3.10" dependencies = [ "ckanapi>=4.8", diff --git a/requirements.txt b/requirements.txt index 946019ce..5084e890 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,25 +1,15 @@ # This file was autogenerated by uv via the following command: -# uv pip compile pyproject.toml --resolver=backtracking --all-extras -o requirements.txt +# uv pip compile pyproject.toml --resolver=backtracking -o requirements.txt annotated-types==0.7.0 # via pydantic -astdoc==1.3.2 - # via mkapi attrs==25.4.0 # via # frictionless # jsonlines # jsonschema # referencing -babel==2.17.0 - # via mkdocs-material -backrefs==6.1 - # via mkdocs-material -cachetools==6.2.4 - # via google-auth certifi==2026.1.4 # via requests -cfgv==3.5.0 - # via pre-commit chardet==5.2.0 # via frictionless charset-normalizer==3.4.4 @@ -27,17 +17,9 @@ charset-normalizer==3.4.4 ckanapi==4.9 # via hdx-python-api (pyproject.toml) click==8.3.1 - # via - # mkdocs - # typer -colorama==0.4.6 - # via mkdocs-material -coverage==7.13.1 - # via pytest-cov + # via typer defopt==7.0.0 # via hdx-python-api (pyproject.toml) -distlib==0.4.0 - # via virtualenv dnspython==2.8.0 # via email-validator docopt==0.6.2 @@ -50,20 +32,8 @@ email-validator==2.3.0 # via hdx-python-api (pyproject.toml) et-xmlfile==2.0.0 # via openpyxl -filelock==3.20.2 - # via virtualenv frictionless==5.18.1 # via hdx-python-utilities -ghp-import==2.1.0 - # via mkdocs -google-auth==2.46.0 - # via - # google-auth-oauthlib - # gspread -google-auth-oauthlib==1.2.2 - # via gspread -gspread==6.2.1 - # via hdx-python-api (pyproject.toml) hdx-python-country==3.9.8 # via hdx-python-api (pyproject.toml) hdx-python-utilities==3.9.9 @@ -72,8 +42,6 @@ hdx-python-utilities==3.9.9 # hdx-python-country humanize==4.15.0 # via frictionless -identify==2.6.15 - # via pre-commit idna==3.11 # via # email-validator @@ -82,21 +50,15 @@ ijson==3.4.0.post0 # via hdx-python-utilities inflect==7.5.0 # via quantulum3 -iniconfig==2.3.0 - # via pytest isodate==0.7.2 # via frictionless jinja2==3.1.6 - # via - # frictionless - # mkapi - # mkdocs - # mkdocs-material + # via frictionless jsonlines==4.0.0 # via hdx-python-utilities jsonpath-ng==1.7.0 # via libhxl -jsonschema==4.25.1 +jsonschema==4.26.0 # via # frictionless # tableschema-to-template @@ -110,105 +72,39 @@ loguru==0.7.3 # via hdx-python-utilities makefun==1.16.0 # via hdx-python-api (pyproject.toml) -markdown==3.10 - # via - # mkdocs - # mkdocs-material - # pymdown-extensions markdown-it-py==4.0.0 # via rich marko==2.2.2 # via frictionless markupsafe==3.0.3 - # via - # jinja2 - # mkdocs + # via jinja2 mdurl==0.1.2 # via markdown-it-py -mergedeep==1.3.4 - # via - # mkdocs - # mkdocs-get-deps -mkapi==4.5.0 - # via hdx-python-api (pyproject.toml) -mkdocs==1.6.1 - # via - # mkapi - # mkdocs-material -mkdocs-get-deps==0.2.0 - # via mkdocs -mkdocs-material==9.7.1 - # via mkapi -mkdocs-material-extensions==1.3.1 - # via mkdocs-material more-itertools==10.8.0 # via inflect -nodeenv==1.10.0 - # via pre-commit num2words==0.5.14 # via quantulum3 -oauthlib==3.3.1 - # via requests-oauthlib openpyxl==3.1.5 # via hdx-python-utilities -packaging==25.0 - # via - # mkdocs - # pytest -paginate==0.5.7 - # via mkdocs-material -pathspec==0.12.1 - # via mkdocs petl==1.7.17 # via frictionless -platformdirs==4.5.1 - # via - # mkdocs-get-deps - # virtualenv -pluggy==1.6.0 - # via - # pytest - # pytest-cov ply==3.11 # via # jsonpath-ng # libhxl pockets==0.9.1 # via sphinxcontrib-napoleon -pre-commit==4.5.1 - # via hdx-python-api (pyproject.toml) -pyasn1==0.6.1 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.2 - # via google-auth pydantic==2.12.5 # via frictionless pydantic-core==2.41.5 # via pydantic pygments==2.19.2 - # via - # mkdocs-material - # pytest - # rich -pymdown-extensions==10.20 - # via mkdocs-material + # via rich pyphonetics==0.5.3 # via hdx-python-utilities -pytest==9.0.2 - # via - # hdx-python-api (pyproject.toml) - # pytest-check - # pytest-cov -pytest-check==2.6.2 - # via hdx-python-api (pyproject.toml) -pytest-cov==7.0.0 - # via hdx-python-api (pyproject.toml) python-dateutil==2.9.0.post0 # via # frictionless - # ghp-import # hdx-python-utilities # libhxl python-io-wrapper==0.3.1 @@ -220,14 +116,7 @@ python-slugify==8.0.4 pyyaml==6.0.3 # via # frictionless - # mkdocs - # mkdocs-get-deps - # pre-commit - # pymdown-extensions - # pyyaml-env-tag # tableschema-to-template -pyyaml-env-tag==1.1 - # via mkdocs quantulum3==0.9.2 # via hdx-python-api (pyproject.toml) ratelimit==2.2.1 @@ -242,13 +131,9 @@ requests==2.32.5 # ckanapi # frictionless # libhxl - # mkdocs-material # requests-file - # requests-oauthlib requests-file==3.0.1 # via hdx-python-utilities -requests-oauthlib==2.0.0 - # via google-auth-oauthlib rfc3986==2.0.0 # via frictionless rich==14.2.0 @@ -257,8 +142,6 @@ rpds-py==0.30.0 # via # jsonschema # referencing -rsa==4.9.1 - # via google-auth ruamel-yaml==0.19.1 # via hdx-python-utilities setuptools==80.9.0 @@ -289,7 +172,7 @@ text-unidecode==1.3 # via python-slugify typeguard==4.4.4 # via inflect -typer==0.21.0 +typer==0.21.1 # via frictionless typing-extensions==4.15.0 # via @@ -305,16 +188,12 @@ unidecode==1.4.0 # via # libhxl # pyphonetics -urllib3==2.6.2 +urllib3==2.6.3 # via # libhxl # requests validators==0.35.0 # via frictionless -virtualenv==20.35.4 - # via pre-commit -watchdog==6.0.0 - # via mkdocs wheel==0.45.1 # via libhxl xlrd==2.0.2 diff --git a/ruff.toml b/ruff.toml index cf4db097..c78ab467 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,15 +1,10 @@ +target-version = "py310" +src = ["src"] exclude = ["_version.py"] [lint] # List of rules: https://docs.astral.sh/ruff/rules/ -select = [ - "E", # pycodestyle - default - "F", # pyflakes - default - "I" # isort +extend-select = [ + "I", # isort + "UP" # Upgrade Python ] -ignore = [ - "E501" # Line too long -] - -[lint.isort] -known-local-folder = ["hdx"] diff --git a/src/hdx/api/configuration.py b/src/hdx/api/configuration.py index 059f8e63..324d4f0a 100755 --- a/src/hdx/api/configuration.py +++ b/src/hdx/api/configuration.py @@ -5,12 +5,10 @@ from base64 import b64decode from collections import UserDict from os.path import expanduser, isfile, join -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional import ckanapi import requests - -from hdx.api import __version__ from hdx.utilities.dictandlist import merge_two_dictionaries from hdx.utilities.email import Email from hdx.utilities.loader import load_json, load_yaml @@ -18,6 +16,8 @@ from hdx.utilities.session import get_session from hdx.utilities.useragent import UserAgent, UserAgentError +from hdx.api import __version__ + logger = logging.getLogger(__name__) @@ -229,7 +229,7 @@ def set_read_only(self, read_only: bool = True) -> None: Set HDX read only flag Args: - read_only (bool): Value to set HDX read only flag. Defaults to True. + read_only: Value to set HDX read only flag. Defaults to True. Returns: None @@ -241,19 +241,19 @@ def set_api_key(self, apikey: str) -> None: Set HDX api key Args: - apikey (str): Value to set api key. + apikey: Value to set api key. Returns: None """ self.hdx_key = apikey - def get_api_key(self) -> Optional[str]: + def get_api_key(self) -> str | None: """ Return HDX api key or None if read only Returns: - Optional[str]: HDX api key or None if read only + HDX api key or None if read only """ if self.hdx_read_only: @@ -265,7 +265,7 @@ def get_user_agent(self) -> str: Return user agent Returns: - str: User agent + User agent """ return self.user_agent @@ -275,7 +275,7 @@ def get_hdx_site_url(self) -> str: Return HDX web site url Returns: - str: HDX web site url + HDX web site url """ return self.data[self.hdx_site]["url"] @@ -288,17 +288,17 @@ def get_dataset_url(self, name: str) -> str: name: Dataset name Returns: - str: HDX dataset url + HDX dataset url """ return f"{self.get_hdx_site_url()}/dataset/{name}" - def _get_credentials(self) -> Optional[Tuple[str, str]]: + def _get_credentials(self) -> tuple[str, str] | None: """ Return HDX site username and password Returns: - Optional[Tuple[str, str]]: HDX site username and password or None + HDX site username and password or None """ site = self.data[self.hdx_site] @@ -315,7 +315,7 @@ def get_session(self) -> requests.Session: Return the session object Returns: - requests.Session: The session object + The session object """ if self._session is None: @@ -329,7 +329,7 @@ def remoteckan(self) -> ckanapi.RemoteCKAN: Return the remote CKAN object (see ckanapi library) Returns: - ckanapi.RemoteCKAN: The remote CKAN object + The remote CKAN object """ if self._remoteckan is None: @@ -338,7 +338,7 @@ def remoteckan(self) -> ckanapi.RemoteCKAN: ) return self._remoteckan - def call_remoteckan(self, *args: Any, **kwargs: Any) -> Dict: + def call_remoteckan(self, *args: Any, **kwargs: Any) -> dict: """ Calls the remote CKAN @@ -347,7 +347,7 @@ def call_remoteckan(self, *args: Any, **kwargs: Any) -> Dict: **kwargs: Keyword arguments to pass to remote CKAN call_action method Returns: - Dict: The response from the remote CKAN call_action method + The response from the remote CKAN call_action method """ requests_kwargs = kwargs.get("requests_kwargs", {}) @@ -363,24 +363,24 @@ def call_remoteckan(self, *args: Any, **kwargs: Any) -> Dict: def create_session_user_agent( cls, session: requests.Session = None, - user_agent: Optional[str] = None, - user_agent_config_yaml: Optional[str] = None, - user_agent_lookup: Optional[str] = None, + user_agent: str | None = None, + user_agent_config_yaml: str | None = None, + user_agent_lookup: str | None = None, use_env: bool = False, **kwargs: Any, - ) -> Tuple[requests.Session, str]: + ) -> tuple[requests.Session, str]: """ Create session and user agent from configuration Args: - session (requests.Session): requests Session object to use. Defaults to calling hdx.utilities.session.get_session() - user_agent (Optional[str]): User agent string. HDXPythonLibrary/X.X.X- is prefixed. - user_agent_config_yaml (Optional[str]): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yaml. - user_agent_lookup (Optional[str]): Lookup key for YAML. Ignored if user_agent supplied. - use_env (bool): Whether to read environment variables. Defaults to False. + session: requests Session object to use. Defaults to calling hdx.utilities.session.get_session() + user_agent: User agent string. HDXPythonLibrary/X.X.X- is prefixed. + user_agent_config_yaml: Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yaml. + user_agent_lookup: Lookup key for YAML. Ignored if user_agent supplied. + use_env: Whether to read environment variables. Defaults to False. Returns: - Tuple[requests.Session, str]: Tuple of (session, user agent) + Tuple of (session, user agent) """ if not session: @@ -416,13 +416,13 @@ def create_session_user_agent( return session, ua def setup_session_remoteckan( - self, remoteckan: Optional[ckanapi.RemoteCKAN] = None, **kwargs: Any + self, remoteckan: ckanapi.RemoteCKAN | None = None, **kwargs: Any ) -> None: """ Set up remote CKAN from provided CKAN or by creating from configuration Args: - remoteckan (Optional[ckanapi.RemoteCKAN]): CKAN instance. Defaults to setting one up from configuration. + remoteckan: CKAN instance. Defaults to setting one up from configuration. Returns: None @@ -445,7 +445,7 @@ def emailer(self) -> Email: Return the Email object (see :any:`Email`) Returns: - Email: The email object + The email object """ if self._emailer is None: @@ -482,7 +482,7 @@ def read(cls) -> "Configuration": Read the HDX configuration Returns: - Configuration: The HDX configuration + The HDX configuration """ if cls._configuration is None: @@ -499,7 +499,7 @@ def setup( Construct the HDX configuration Args: - configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. + configuration: Configuration instance. Defaults to setting one up from passed arguments. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan is not. user_agent_config_yaml (str): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yaml. @@ -531,15 +531,15 @@ def setup( def _create( cls, configuration: Optional["Configuration"] = None, - remoteckan: Optional[ckanapi.RemoteCKAN] = None, + remoteckan: ckanapi.RemoteCKAN | None = None, **kwargs: Any, ) -> str: """ Create HDX configuration Args: - configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. - remoteckan (Optional[ckanapi.RemoteCKAN]): CKAN instance. Defaults to setting one up from configuration. + configuration: Configuration instance. Defaults to setting one up from passed arguments. + remoteckan: CKAN instance. Defaults to setting one up from configuration. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan is not. user_agent_config_yaml (str): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yaml. @@ -559,7 +559,7 @@ def _create( hdx_base_config_yaml (str): Path to YAML HDX base configuration. Defaults to library's internal hdx_base_configuration.yaml. Returns: - str: HDX site url + HDX site url """ cls.setup(configuration, **kwargs) @@ -570,15 +570,15 @@ def _create( def create( cls, configuration: Optional["Configuration"] = None, - remoteckan: Optional[ckanapi.RemoteCKAN] = None, + remoteckan: ckanapi.RemoteCKAN | None = None, **kwargs: Any, ) -> str: """ Create HDX configuration. Can only be called once (will raise an error if called more than once). Args: - configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. - remoteckan (Optional[ckanapi.RemoteCKAN]): CKAN instance. Defaults to setting one up from configuration. + configuration: Configuration instance. Defaults to setting one up from passed arguments. + remoteckan: CKAN instance. Defaults to setting one up from configuration. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan is not. user_agent_config_yaml (str): Path to YAML user agent configuration. Ignored if user_agent supplied. Defaults to ~/.useragent.yaml. @@ -598,7 +598,7 @@ def create( hdx_base_config_yaml (str): Path to YAML HDX base configuration. Defaults to library's internal hdx_base_configuration.yaml. Returns: - str: HDX site url + HDX site url """ if cls._configuration is not None: diff --git a/src/hdx/api/locations.py b/src/hdx/api/locations.py index 9a573e5c..2264e0f9 100755 --- a/src/hdx/api/locations.py +++ b/src/hdx/api/locations.py @@ -1,9 +1,8 @@ """Locations in HDX""" -from typing import Dict, List, Optional, Tuple +from collections.abc import Sequence from hdx.api.configuration import Configuration -from hdx.utilities.typehint import ListTuple class Locations: @@ -12,15 +11,15 @@ class Locations: _validlocations = None @classmethod - def validlocations(cls, configuration=None) -> List[Dict]: + def validlocations(cls, configuration=None) -> list[dict]: """ Read valid locations from HDX Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: - List[Dict]: A list of valid locations + A list of valid locations """ if cls._validlocations is None: if configuration is None: @@ -31,12 +30,12 @@ def validlocations(cls, configuration=None) -> List[Dict]: return cls._validlocations @classmethod - def set_validlocations(cls, locations: ListTuple[Dict]) -> None: + def set_validlocations(cls, locations: Sequence[dict]) -> None: """ Set valid locations using list of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} Args: - locations (ListTuple[Dict]): List of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} + locations: List of dictionaries of form {'name': 'zmb', 'title', 'Zambia'} Returns: None @@ -47,18 +46,18 @@ def set_validlocations(cls, locations: ListTuple[Dict]) -> None: def get_location_from_HDX_code( cls, code: str, - locations: Optional[ListTuple[Dict]] = None, - configuration: Optional[Configuration] = None, - ) -> Optional[str]: + locations: Sequence[dict] | None = None, + configuration: Configuration | None = None, + ) -> str | None: """Get location from HDX location code Args: - code (str): code for which to get location name - locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + code: code for which to get location name + locations: Valid locations list. Defaults to list downloaded from HDX. + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[str]: location name or None + location name or None """ if locations is None: locations = cls.validlocations(configuration) @@ -72,18 +71,18 @@ def get_location_from_HDX_code( def get_HDX_code_from_location( cls, location: str, - locations: Optional[ListTuple[Dict]] = None, - configuration: Optional[Configuration] = None, - ) -> Optional[str]: + locations: Sequence[dict] | None = None, + configuration: Configuration | None = None, + ) -> str | None: """Get HDX code for location Args: - location (str): Location for which to get HDX code - locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + location: Location for which to get HDX code + locations: Valid locations list. Defaults to list downloaded from HDX. + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[str]: HDX code or None + HDX code or None """ if locations is None: locations = cls.validlocations(configuration) @@ -102,18 +101,18 @@ def get_HDX_code_from_location( def get_HDX_code_from_location_partial( cls, location: str, - locations: Optional[ListTuple[Dict]] = None, - configuration: Optional[Configuration] = None, - ) -> Tuple[Optional[str], bool]: + locations: Sequence[dict] | None = None, + configuration: Configuration | None = None, + ) -> tuple[str | None, bool]: """Get HDX code for location Args: - location (str): Location for which to get HDX code - locations (Optional[ListTuple[Dict]]): Valid locations list. Defaults to list downloaded from HDX. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + location: Location for which to get HDX code + locations: Valid locations list. Defaults to list downloaded from HDX. + configuration: HDX configuration. Defaults to global configuration. Returns: - Tuple[Optional[str], bool]: HDX code and if the match is exact or (None, False) for no match + HDX code and if the match is exact or (None, False) for no match """ hdx_code = cls.get_HDX_code_from_location(location, locations, configuration) diff --git a/src/hdx/api/utilities/dataset_title_helper.py b/src/hdx/api/utilities/dataset_title_helper.py index 5ada66e6..ed27bb28 100755 --- a/src/hdx/api/utilities/dataset_title_helper.py +++ b/src/hdx/api/utilities/dataset_title_helper.py @@ -3,12 +3,10 @@ import logging import re from datetime import datetime, timedelta +from re import Match from string import punctuation, whitespace -from typing import List, Match, Optional, Tuple from dateutil.parser import ParserError -from quantulum3 import parser - from hdx.utilities.dateparse import parse_date, parse_date_range from hdx.utilities.text import ( PUNCTUATION_MINUS_BRACKETS, @@ -16,6 +14,7 @@ remove_from_end, remove_string, ) +from quantulum3 import parser logger = logging.getLogger(__name__) @@ -36,19 +35,19 @@ class DatasetTitleHelper: def fuzzy_match_dates_in_title( cls, title: str, - ranges: List[Tuple[datetime, datetime]], - ignore_wrong_years: List[int], + ranges: list[tuple[datetime, datetime]], + ignore_wrong_years: list[int], ) -> str: """ Fuzzy match dates in title appending to ranges Args: - title (str): Title to parse - ranges (List[Tuple[datetime,datetime]]): List of date ranges found so far - ignore_wrong_years (List[int]): Numbers identified as years that probably are not years + title: Title to parse + ranges: List of date ranges found so far + ignore_wrong_years: Numbers identified as years that probably are not years Returns: - str: Title with dates removed + Title with dates removed """ for quant in parser.parse(title): @@ -139,17 +138,17 @@ def fuzzy_match_dates_in_title( @classmethod def get_month_year_in_slash_range( - cls, match: Match, ignore_wrong_years: List[int] - ) -> Tuple[Optional[int], Optional[int], Optional[int]]: + cls, match: Match, ignore_wrong_years: list[int] + ) -> tuple[int | None, int | None, int | None]: """ Get year(s) and month from slash range eg. 2007/08. If second value is between 1 and 12, take it as a month. Args: match (Match): Match object - ignore_wrong_years (List[int]): Numbers identified as years that probably are not years + ignore_wrong_years: Numbers identified as years that probably are not years Returns: - Tuple[Optional[int], Optional[int], Optional[int]]: First year, first month, second year + First year, first month, second year """ first_year_str = match.group(1) @@ -174,15 +173,15 @@ def get_month_year_in_slash_range( @classmethod def get_dates_from_title( cls, title: str - ) -> Tuple[str, List[Tuple[datetime, datetime]]]: + ) -> tuple[str, list[tuple[datetime, datetime]]]: """ Get dataset dates (start and end dates in a list) from title and clean title of dates Args: - title (str): Title to get date from and clean + title: Title to get date from and clean Returns: - Tuple[str,List[Tuple[datetime,datetime]]]: Cleaned title, list of start and end dates + Cleaned title, list of start and end dates """ ranges = [] diff --git a/src/hdx/api/utilities/date_helper.py b/src/hdx/api/utilities/date_helper.py index 31ac31c3..f9c8f3e4 100755 --- a/src/hdx/api/utilities/date_helper.py +++ b/src/hdx/api/utilities/date_helper.py @@ -2,7 +2,6 @@ from collections.abc import Iterable from datetime import datetime, timezone -from typing import Dict, List, Optional, Tuple, Union from hdx.utilities.dateparse import now_utc, parse_date @@ -10,7 +9,7 @@ class DateHelper: @staticmethod def get_hdx_date( - date: Union[datetime, str], + date: datetime | str, ignore_timeinfo: bool, include_microseconds=False, max=False, @@ -18,12 +17,12 @@ def get_hdx_date( """Get an HDX date as a string from a datetime.datetime object. Args: - date (Union[datetime, str]): Date as datetime or string - ignore_timeinfo (bool): Ignore time and time zone of date. Defaults to True. - include_microseconds (bool): Include microsconds. Defaults to False. + date: Date as datetime or string + ignore_timeinfo: Ignore time and time zone of date. Defaults to True. + include_microseconds: Include microsconds. Defaults to False. Returns: - str: HDX date as a string + HDX date as a string """ if ignore_timeinfo: timezone_handling = 0 @@ -63,9 +62,9 @@ def get_hdx_date( @staticmethod def get_time_period_info( hdx_time_period: str, - date_format: Optional[str] = None, + date_format: str | None = None, today: datetime = now_utc(), - ) -> Dict: + ) -> dict: """Get time period as datetimes and strings in specified format. If no format is supplied, the ISO 8601 format is used. Returns a dictionary containing keys startdate (start date as datetime), enddate @@ -74,12 +73,12 @@ def get_time_period_info( forward every day). Args: - hdx_time_period (str): Input time period - date_format (Optional[str]): Date format. None is taken to be ISO 8601. Defaults to None. - today (datetime): Date to use for today. Defaults to now_utc(). + hdx_time_period: Input time period + date_format: Date format. None is taken to be ISO 8601. Defaults to None. + today: Date to use for today. Defaults to now_utc(). Returns: - Dict: Dictionary of date information + Dictionary of date information """ result = {} if hdx_time_period: @@ -115,8 +114,8 @@ def get_time_period_info( @classmethod def get_hdx_time_period( cls, - startdate: Union[datetime, str], - enddate: Union[datetime, str, None] = None, + startdate: datetime | str, + enddate: datetime | str | None = None, ongoing: bool = False, ignore_timeinfo: bool = True, ) -> str: @@ -124,13 +123,13 @@ def get_hdx_time_period( strings with option to set ongoing. Args: - startdate (Union[datetime, str]): Dataset start date - enddate (Union[datetime, str, None]): Dataset end date. Defaults to None. - ongoing (bool): True if ongoing, False if not. Defaults to False. - ignore_timeinfo (bool): Ignore time and time zone of date. Defaults to True. + startdate: Dataset start date + enddate: Dataset end date. Defaults to None. + ongoing: True if ongoing, False if not. Defaults to False. + ignore_timeinfo: Ignore time and time zone of date. Defaults to True. Returns: - str: HDX time period + HDX time period """ startdate = cls.get_hdx_date( startdate, @@ -154,18 +153,18 @@ def get_hdx_time_period( @classmethod def get_hdx_time_period_from_years( cls, - startyear: Union[str, int, Iterable], - endyear: Union[str, int, None] = None, - ) -> Tuple[str, List[int]]: + startyear: str | int | Iterable, + endyear: str | int | None = None, + ) -> tuple[str, list[int]]: """Get an HDX time period from an iterable of years or a start and end year. Args: - startyear (Union[str, int, Iterable]): Start year given as string or int or range in an iterable - endyear (Union[str, int, None]): End year given as string or int + startyear: Start year given as string or int or range in an iterable + endyear: End year given as string or int Returns: - Tuple[str,List[int]]: (HDX time period, the start and end year if supplied or sorted list of years) + (HDX time period, the start and end year if supplied or sorted list of years) """ retval = [] if isinstance(startyear, str): diff --git a/src/hdx/api/utilities/filestore_helper.py b/src/hdx/api/utilities/filestore_helper.py index dea52581..cc094bd3 100755 --- a/src/hdx/api/utilities/filestore_helper.py +++ b/src/hdx/api/utilities/filestore_helper.py @@ -1,7 +1,7 @@ """Helper to the Dataset class for handling resources with filestores.""" import logging -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from hdx.utilities.dateparse import now_utc_notz from hdx.utilities.file_hashing import get_size_and_hash @@ -20,7 +20,7 @@ def resource_check_required_fields(resource: "Resource", **kwargs: Any) -> None: """Helper method to get ignore_fields from kwargs if it exists and add package_id Args: - resource (Resource): Resource to check + resource: Resource to check **kwargs: Keyword arguments Returns: @@ -42,7 +42,7 @@ def resource_check_required_fields(resource: "Resource", **kwargs: Any) -> None: def check_filestore_resource( cls, resource_data_to_update: "Resource", - filestore_resources: Dict[int, str], + filestore_resources: dict[int, str], resource_index: int, **kwargs: Any, ) -> int: @@ -57,12 +57,12 @@ def check_filestore_resource( 4 = file not uploaded (hash, size unchanged), given last_modified ignored Args: - resource_data_to_update (Resource): Updated resource from dataset - filestore_resources (Dict[int, str]): List of (index of resource, file to upload) - resource_index (int): Index of resource + resource_data_to_update: Updated resource from dataset + filestore_resources: List of (index of resource, file to upload) + resource_index: Index of resource Returns: - int: Status code + Status code """ resource_data_to_update.set_types() resource_data_to_update.correct_format(resource_data_to_update.data) @@ -83,7 +83,7 @@ def dataset_update_filestore_resource( cls, original_resource_data: "Resource", resource_data_to_update: "Resource", - filestore_resources: Dict[int, str], + filestore_resources: dict[int, str], resource_index: int, **kwargs: Any, ) -> int: @@ -98,14 +98,15 @@ def dataset_update_filestore_resource( 4 = file not uploaded (hash, size unchanged), given last_modified ignored Args: - original_resource_data (Resource): Original resource from dataset - resource_data_to_update (Resource): Updated resource from dataset - filestore_resources (Dict[int, str]): List of (index of resources, file to upload) - resource_index (int): Index of resource - **kwargs: Any, + original_resource_data: Original resource from dataset + resource_data_to_update: Updated resource from dataset + filestore_resources: List of (index of resources, file to upload) + resource_index: Index of resource + **kwargs: See below + force_update (bool): Whether to force an update Returns: - int: Status code + Status code """ file_to_upload = resource_data_to_update.get_file_to_upload() if file_to_upload: diff --git a/src/hdx/api/utilities/hdx_error_handler.py b/src/hdx/api/utilities/hdx_error_handler.py index f6480a43..56aefdb7 100644 --- a/src/hdx/api/utilities/hdx_error_handler.py +++ b/src/hdx/api/utilities/hdx_error_handler.py @@ -1,12 +1,13 @@ import logging +from collections.abc import Sequence from os import getenv -from typing import Any, Tuple +from typing import Any -from hdx.data.dataset import Dataset -from hdx.data.hdxobject import HDXError from hdx.utilities.dictandlist import dict_of_sets_add from hdx.utilities.error_handler import ErrorHandler -from hdx.utilities.typehint import ListTuple + +from hdx.data.dataset import Dataset +from hdx.data.hdxobject import HDXError logger = logging.getLogger(__name__) @@ -19,8 +20,8 @@ class HDXErrorHandler(ErrorHandler): sorted. Args: - should_exit_on_error (bool): Whether to exit with a 1 code if there are errors. Default is False. - write_to_hdx (Any): Whether to write errors to HDX resources. Default is None (write errors). + should_exit_on_error: Whether to exit with a 1 code if there are errors. Default is False. + write_to_hdx: Whether to write errors to HDX resources. Default is None (write errors). """ @@ -46,10 +47,10 @@ def get_category(pipeline: str, identifier: str) -> str: Get category from pipeline and identifier Args: - pipeline (str): Name of the pipeline originating the error - identifier (str): Identifier e.g. dataset name + pipeline: Name of the pipeline originating the error + identifier: Identifier e.g. dataset name Returns: - str: Category + Category """ return f"{pipeline} - {identifier}" @@ -65,12 +66,12 @@ def errors_to_hdx( Add a new message to the hdx_error type Args: - pipeline (str): Name of the pipeline originating the error - identifier (str): Identifier e.g. dataset name - text (str): Text to use e.g. "sector CSS not found in table" - resource_name (str): The resource name that the message applies to. Only needed if writing errors to HDX - message_type (str): The type of message (error or warning). Default is "error" - err_to_hdx (bool): Flag indicating if the message should be added to HDX metadata. Default is False + pipeline: Name of the pipeline originating the error + identifier: Identifier e.g. dataset name + text: Text to use e.g. "sector CSS not found in table" + resource_name: The resource name that the message applies to. Only needed if writing errors to HDX + message_type: The type of message (error or warning). Default is "error" + err_to_hdx: Flag indicating if the message should be added to HDX metadata. Default is False Returns: None """ @@ -94,12 +95,12 @@ def add_message( identifier is usually a dataset name. Args: - pipeline (str): Name of the pipeline originating the error - identifier (str): Identifier e.g. dataset name - text (str): Text to use e.g. "sector CSS not found in table" - resource_name (str): The resource name that the message applies to. Only needed if writing errors to HDX - message_type (str): The type of message (error or warning). Default is "error" - err_to_hdx (bool): Flag indicating if the message should be added to HDX metadata. Default is False + pipeline: Name of the pipeline originating the error + identifier: Identifier e.g. dataset name + text: Text to use e.g. "sector CSS not found in table" + resource_name: The resource name that the message applies to. Only needed if writing errors to HDX + message_type: The type of message (error or warning). Default is "error" + err_to_hdx: Flag indicating if the message should be added to HDX metadata. Default is False Returns: None """ @@ -123,13 +124,13 @@ def add_missing_value_message( identifier is usually a dataset name. Args: - pipeline (str): Name of the scaper originating the error - identifier (str): Identifier e.g. dataset name - value_type (str): Type of value e.g. "sector" - value (Any): Missing value - resource_name (str): The resource name that the message applies to. Only needed if writing errors to HDX - message_type (str): The type of message (error or warning). Default is "error" - err_to_hdx (bool): Flag indicating if the message should be added to HDX metadata. Default is False + pipeline: Name of the scaper originating the error + identifier: Identifier e.g. dataset name + value_type: Type of value e.g. "sector" + value: Missing value + resource_name: The resource name that the message applies to. Only needed if writing errors to HDX + message_type: The type of message (error or warning). Default is "error" + err_to_hdx: Flag indicating if the message should be added to HDX metadata. Default is False Returns: None """ @@ -148,7 +149,7 @@ def add_multi_valued_message( pipeline: str, identifier: str, text: str, - values: ListTuple, + values: Sequence, resource_name: str = "", message_type: str = "error", err_to_hdx: bool = False, @@ -161,15 +162,15 @@ def add_multi_valued_message( a dataset name. Args: - pipeline (str): Name of the scaper originating the error - identifier (str): Identifier e.g. dataset name - text (str): Text to use e.g. "negative values removed" - values (ListTuple): The list of related values of concern - resource_name (str): The resource name that the message applies to. Only needed if writing errors to HDX - message_type (str): The type of message (error or warning). Default is "error" - err_to_hdx (bool): Flag indicating if the message should be added to HDX metadata. Default is False + pipeline: Name of the scaper originating the error + identifier: Identifier e.g. dataset name + text: Text to use e.g. "negative values removed" + values: The list of related values of concern + resource_name: The resource name that the message applies to. Only needed if writing errors to HDX + message_type: The type of message (error or warning). Default is "error" + err_to_hdx: Flag indicating if the message should be added to HDX metadata. Default is False Returns: - bool: True if a message was added, False if not + True if a message was added, False if not """ text = self.multi_valued_message(text, values) if text is None: @@ -216,17 +217,17 @@ def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: def write_errors_to_resource( - identifier: Tuple[str, str, str], errors: set[str] + identifier: tuple[str, str, str], errors: set[str] ) -> bool: """ Writes error messages to a resource on HDX. If the resource already has an error message, it is only overwritten if the two messages are different. Args: - identifier (Tuple[str, str, str]): Scraper, dataset, and resource names that the message applies to - errors (set[str]): Set of errors to use e.g. "negative values removed" + identifier: Scraper, dataset, and resource names that the message applies to + errors: Set of errors to use e.g. "negative values removed" Returns: - bool: True if a message was added, False if not + True if a message was added, False if not """ # We are using the names here because errors may be specified in the YAML by us _, dataset_name, resource_name = identifier diff --git a/src/hdx/api/utilities/hdx_state.py b/src/hdx/api/utilities/hdx_state.py index 889beb19..68d8d7c7 100644 --- a/src/hdx/api/utilities/hdx_state.py +++ b/src/hdx/api/utilities/hdx_state.py @@ -1,15 +1,17 @@ """Utility to save state to a dataset and read it back.""" import logging +from collections.abc import Callable from os.path import join -from typing import Any, Callable, Optional +from typing import Any -from hdx.api.configuration import Configuration -from hdx.data.dataset import Dataset from hdx.utilities.loader import load_text from hdx.utilities.saver import save_text from hdx.utilities.state import State +from hdx.api.configuration import Configuration +from hdx.data.dataset import Dataset + logger = logging.getLogger(__name__) @@ -20,11 +22,11 @@ class HDXState(State): while the output transformation outputs a string. Args: - dataset_name_or_id (str): Dataset name or ID - path (str): Path to temporary folder for state - read_fn (Callable[[str], Any]): Input state transformation. Defaults to lambda x: x. + dataset_name_or_id: Dataset name or ID + path: Path to temporary folder for state + read_fn: Input state transformation. Defaults to lambda x: x. write_fn: Callable[[Any], str]: Output state transformation. Defaults to lambda x: x. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. """ def __init__( @@ -33,7 +35,7 @@ def __init__( path: str, read_fn: Callable[[str], Any] = lambda x: x, write_fn: Callable[[Any], str] = lambda x: x, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, ) -> None: self._dataset_name_or_id = dataset_name_or_id self._resource = None @@ -44,7 +46,7 @@ def read(self) -> Any: """Read state from HDX dataset Returns: - Any: State + State """ dataset = Dataset.read_from_hdx( self._dataset_name_or_id, configuration=self._configuration diff --git a/src/hdx/data/dataset.py b/src/hdx/data/dataset.py index c920d432..ed90ec45 100755 --- a/src/hdx/data/dataset.py +++ b/src/hdx/data/dataset.py @@ -4,37 +4,17 @@ import logging import sys import warnings +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence from copy import deepcopy from datetime import datetime from os.path import isfile, join from typing import ( TYPE_CHECKING, Any, - Callable, - Dict, - Iterable, - Iterator, - List, Optional, - Tuple, Union, ) -from hxl.input import HXLIOException, InputOptions, munge_url - -import hdx.data.organization as org_module -import hdx.data.resource as res_module -import hdx.data.resource_view as resource_view -import hdx.data.showcase as sc_module -import hdx.data.user as user -import hdx.data.vocabulary as vocabulary -from hdx.api.configuration import Configuration -from hdx.api.locations import Locations -from hdx.api.utilities.dataset_title_helper import DatasetTitleHelper -from hdx.api.utilities.date_helper import DateHelper -from hdx.api.utilities.filestore_helper import FilestoreHelper -from hdx.data.hdxobject import HDXError, HDXObject -from hdx.data.resource_matcher import ResourceMatcher from hdx.location.country import Country from hdx.utilities.base_downloader import BaseDownload from hdx.utilities.dateparse import ( @@ -50,8 +30,22 @@ from hdx.utilities.loader import load_json from hdx.utilities.path import script_dir_plus_file from hdx.utilities.saver import save_iterable, save_json -from hdx.utilities.typehint import ListTuple, ListTupleDict from hdx.utilities.uuid import is_valid_uuid +from hxl.input import HXLIOException, InputOptions, munge_url + +import hdx.data.organization as org_module +import hdx.data.resource as res_module +import hdx.data.resource_view as resource_view +import hdx.data.showcase as sc_module +import hdx.data.user as user +import hdx.data.vocabulary as vocabulary +from hdx.api.configuration import Configuration +from hdx.api.locations import Locations +from hdx.api.utilities.dataset_title_helper import DatasetTitleHelper +from hdx.api.utilities.date_helper import DateHelper +from hdx.api.utilities.filestore_helper import FilestoreHelper +from hdx.data.hdxobject import HDXError, HDXObject +from hdx.data.resource_matcher import ResourceMatcher if TYPE_CHECKING: from hdx.data.organization import Organization @@ -70,8 +64,8 @@ class Dataset(HDXObject): """Dataset class enabling operations on datasets and associated resources. Args: - initial_data (Optional[Dict]): Initial dataset metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial dataset metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ max_attempts = 5 @@ -130,8 +124,8 @@ class Dataset(HDXObject): def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} @@ -140,11 +134,11 @@ def __init__( self._preview_resourceview = None @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "package_show", @@ -163,8 +157,8 @@ def __setitem__(self, key: Any, value: Any) -> None: """Set dictionary items but do not allow setting of resources Args: - key (Any): Key in dictionary - value (Any): Value to put in dictionary + key: Key in dictionary + value: Value to put in dictionary Returns: None @@ -193,11 +187,11 @@ def unseparate_resources(self) -> None: if self._resources: self.data["resources"] = self._convert_hdxobjects(self._resources) - def get_dataset_dict(self) -> Dict: + def get_dataset_dict(self) -> dict: """Move self.resources into resources key in internal dictionary Returns: - Dict: Dataset dictionary + Dataset dictionary """ package = deepcopy(self.data) if self._resources: @@ -209,8 +203,8 @@ def save_to_json(self, path: str, follow_urls: bool = False): datasets, HXL proxy urls etc. are followed to retrieve final urls. Args: - path (str): Path to save dataset - follow_urls (bool): Whether to follow urls. Defaults to False. + path: Path to save dataset + follow_urls: Whether to follow urls. Defaults to False. Returns: None @@ -229,10 +223,10 @@ def load_from_json(path: str) -> Optional["Dataset"]: """Load dataset from JSON Args: - path (str): Path to load dataset + path: Path to load dataset Returns: - Optional[Dataset]: Dataset created from JSON or None + Dataset created from JSON or None """ jsonobj = load_json(path, loaderror_if_empty=False) if jsonobj is None: @@ -247,18 +241,18 @@ def init_resources(self) -> None: Returns: None """ - self._resources: List[res_module.Resource] = [] + self._resources: list[res_module.Resource] = [] def _get_resource_from_obj( - self, resource: Union["Resource", Dict, str] + self, resource: Union["Resource", dict, str] ) -> "Resource": """Add new or update existing resource in dataset with new metadata Args: - resource (Union[Resource,Dict,str]): Either resource id or resource metadata from a Resource object or a dictionary + resource: Either resource id or resource metadata from a Resource object or a dictionary Returns: - Resource: Resource object + Resource object """ if isinstance(resource, str): if is_valid_uuid(resource) is False: @@ -276,17 +270,17 @@ def _get_resource_from_obj( def add_update_resource( self, - resource: Union["Resource", Dict, str], + resource: Union["Resource", dict, str], ignore_datasetid: bool = False, ) -> "Resource": """Add new or update existing resource in dataset with new metadata Args: - resource (Union[Resource,Dict,str]): Either resource id or resource metadata from a Resource object or a dictionary - ignore_datasetid (bool): Whether to ignore dataset id in the resource + resource: Either resource id or resource metadata from a Resource object or a dictionary + ignore_datasetid: Whether to ignore dataset id in the resource Returns: - Resource: The resource that was added after matching with any existing resource + The resource that was added after matching with any existing resource """ resource = self._get_resource_from_obj(resource) if "package_id" in resource: @@ -310,14 +304,14 @@ def add_update_resource( def add_update_resources( self, - resources: ListTuple[Union["Resource", Dict, str]], + resources: Sequence[Union["Resource", dict, str]], ignore_datasetid: bool = False, ) -> None: """Add new to the dataset or update existing resources with new metadata Args: - resources (ListTuple[Union[Resource,Dict,str]]): A list of either resource ids or resources metadata from either Resource objects or dictionaries - ignore_datasetid (bool): Whether to ignore dataset id in the resource. Defaults to False. + resources: A list of either resource ids or resources metadata from either Resource objects or dictionaries + ignore_datasetid: Whether to ignore dataset id in the resource. Defaults to False. Returns: None @@ -353,28 +347,28 @@ def add_update_resources( def delete_resource( self, - resource: Union["Resource", Dict, str], + resource: Union["Resource", dict, str], delete: bool = True, ) -> bool: """Delete a resource from the dataset and also from HDX by default Args: - resource (Union[Resource,Dict,str]): Either resource id or resource metadata from a Resource object or a dictionary - delete (bool): Whetehr to delete the resource from HDX (not just the dataset). Defaults to True. + resource: Either resource id or resource metadata from a Resource object or a dictionary + delete: Whetehr to delete the resource from HDX (not just the dataset). Defaults to True. Returns: - bool: True if resource removed or False if not + True if resource removed or False if not """ if isinstance(resource, str): if is_valid_uuid(resource) is False: raise HDXError(f"{resource} is not a valid resource id!") return self._remove_hdxobject(self._resources, resource, delete=delete) - def get_resources(self) -> List["Resource"]: + def get_resources(self) -> list["Resource"]: """Get dataset's resources Returns: - List[Resource]: List of Resource objects + List of Resource objects """ return self._resources @@ -382,10 +376,10 @@ def get_resource(self, index: int = 0) -> "Resource": """Get one resource from dataset by index Args: - index (int): Index of resource in dataset. Defaults to 0. + index: Index of resource in dataset. Defaults to 0. Returns: - Resource: Resource object + Resource object """ return self._resources[index] @@ -393,12 +387,12 @@ def number_of_resources(self) -> int: """Get number of dataset's resources Returns: - int: Number of Resource objects + Number of Resource objects """ return len(self._resources) def reorder_resources( - self, resource_ids: ListTuple[str], hxl_update: bool = True + self, resource_ids: Sequence[str], hxl_update: bool = True ) -> None: """Reorder resources in dataset according to provided list. Resources are updated in the dataset object to match new order. However, the dataset is not @@ -407,8 +401,8 @@ def reorder_resources( original order. Args: - resource_ids (ListTuple[str]): List of resource ids - hxl_update (bool): Whether to call package_hxl_update. Defaults to True. + resource_ids: List of resource ids + hxl_update: Whether to call package_hxl_update. Defaults to True. Returns: None @@ -438,11 +432,11 @@ def move_resource( with the value of insert_before. Args: - resource_name (str): Name of resource to move - insert_before (str): Resource to insert before + resource_name: Name of resource to move + insert_before: Resource to insert before Returns: - Resource: The resource that was moved + The resource that was moved """ from_index = None to_index = None @@ -468,7 +462,7 @@ def update_from_yaml( """Update dataset metadata with static metadata from YAML file Args: - path (str): Path to YAML dataset metadata. Defaults to config/hdx_dataset_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_dataset_static.yaml. Returns: None @@ -482,7 +476,7 @@ def update_from_json( """Update dataset metadata with static metadata from JSON file Args: - path (str): Path to JSON dataset metadata. Defaults to config/hdx_dataset_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_dataset_static.json. Returns: None @@ -492,16 +486,16 @@ def update_from_json( @staticmethod def read_from_hdx( - identifier: str, configuration: Optional[Configuration] = None + identifier: str, configuration: Configuration | None = None ) -> Optional["Dataset"]: """Reads the dataset given by identifier from HDX and returns Dataset object Args: - identifier (str): Identifier of dataset - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of dataset + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[Dataset]: Dataset object if successful read, None if not + Dataset object if successful read, None if not """ dataset = Dataset(configuration=configuration) @@ -526,10 +520,10 @@ def _dataset_load_from_hdx(self, id_or_name: str) -> bool: """Loads the dataset given by either id or name from HDX Args: - id_or_name (str): Either id or name of dataset + id_or_name: Either id or name of dataset Returns: - bool: True if loaded, False if not + True if loaded, False if not """ if not self._load_from_hdx("dataset", id_or_name): @@ -546,13 +540,13 @@ def check_resources_url_filetoupload(self) -> None: for resource in self._resources: resource.check_both_url_filetoupload() - def check_resources_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_resources_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for resources is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -564,7 +558,7 @@ def check_resources_fields(self, ignore_fields: ListTuple[str] = tuple()) -> Non def check_required_fields( self, - ignore_fields: ListTuple[str] = tuple(), + ignore_fields: Sequence[str] = (), allow_no_resources: bool = False, **kwargs: Any, ) -> None: @@ -573,8 +567,8 @@ def check_required_fields( particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). - allow_no_resources (bool): Whether to allow no resources. Defaults to False. + ignore_fields: Fields to ignore. Default is (). + allow_no_resources: Whether to allow no resources. Defaults to False. Returns: None @@ -590,25 +584,25 @@ def check_required_fields( @staticmethod def revise( - match: Dict[str, Any], - filter: ListTuple[str] = tuple(), - update: Dict[str, Any] = {}, - files_to_upload: Dict[str, str] = {}, - configuration: Optional[Configuration] = None, + match: dict[str, Any], + filter: Sequence[str] = (), + update: dict[str, Any] = {}, + files_to_upload: dict[str, str] = {}, + configuration: Configuration | None = None, **kwargs: Any, ) -> "Dataset": """Revises an HDX dataset in HDX Args: match (Dict[str,Any]): Metadata on which to match dataset - filter (ListTuple[str]): Filters to apply. Defaults to tuple(). - update (Dict[str,Any]): Metadata updates to apply. Defaults to {}. - files_to_upload (Dict[str,str]): Files to upload to HDX. Defaults to {}. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + filter: Filters to apply. Defaults to tuple(). + update: Metadata updates to apply. Defaults to {}. + files_to_upload: Files to upload to HDX. Defaults to {}. + configuration: HDX configuration. Defaults to global configuration. **kwargs: Additional arguments to pass to package_revise Returns: - Dataset: Dataset object + Dataset object """ separators = (",", ":") data = {"match": json.dumps(match, separators=separators)} @@ -630,7 +624,7 @@ def revise( dataset.separate_resources() return dataset - def _prepare_hdx_call(self, data: Dict, kwargs: Any) -> None: + def _prepare_hdx_call(self, data: dict, kwargs: Any) -> None: """Common method used in create and update calls. Cleans tags and processes keyword arguments populating updated_by_script (details about what script is doing the update and when), batch @@ -638,11 +632,11 @@ def _prepare_hdx_call(self, data: Dict, kwargs: Any) -> None: CKAN /datasets page are grouped). Args: - data (Dict): Dataset data to update if needed + data: Dataset data to update if needed **kwargs: See below updated_by_script (str): Script info. Defaults to user agent. - batch: Batch UUID for where multiple datasets are grouped into one batch - batch_mode: Whether to group by batch. Defaults to not grouping. + batch (str): Batch UUID for where multiple datasets are grouped into one batch + batch_mode (bool): Whether to group by batch. Defaults to not grouping. Returns: None @@ -668,9 +662,9 @@ def _prepare_hdx_call(self, data: Dict, kwargs: Any) -> None: def _revise_filter( self, - dataset_data_to_update: Dict, - keys_to_delete: ListTuple[str], - resources_to_delete: ListTuple[int], + dataset_data_to_update: dict, + keys_to_delete: Sequence[str], + resources_to_delete: Sequence[int], ): """Returns the revise filter parameter adding to it any keys in the dataset metadata and resources that are specified to be deleted. Also compare @@ -678,9 +672,9 @@ def _revise_filter( removed in which case these should be added to the filter Args: - dataset_data_to_update (Dict): Dataset data to be updated - keys_to_delete (ListTuple[str]): List of top level metadata keys to delete - resources_to_delete (ListTuple[int]): List of indexes of resources to delete + dataset_data_to_update: Dataset data to be updated + keys_to_delete: List of top level metadata keys to delete + resources_to_delete: List of indexes of resources to delete """ revise_filter = [] for key in keys_to_delete: @@ -732,17 +726,17 @@ def _revise_filter( def _revise_files_to_upload_resource_deletions( self, - resources_to_update: ListTuple["Resource"], - resources_to_delete: ListTuple[int], - filestore_resources: Dict[int, str], + resources_to_update: Sequence["Resource"], + resources_to_delete: Sequence[int], + filestore_resources: dict[int, str], ): """Returns the files to be uploaded and updates resources_to_update and filestore_resources to reflect any deletions. Args: - resources_to_update (ListTuple[Resource]): Resources to update - resources_to_delete (ListTuple[int]): List of indexes of resources to delete - filestore_resources (Dict[int, str]): List of (index of resources, file to upload) + resources_to_update: Resources to update + resources_to_delete: List of indexes of resources to delete + filestore_resources: List of (index of resources, file to upload) """ files_to_upload = {} if not self.is_requestable(): @@ -767,33 +761,33 @@ def _revise_files_to_upload_resource_deletions( def _revise_dataset( self, allow_no_resources: bool, - keys_to_delete: ListTuple[str], - resources_to_update: ListTuple["Resource"], - resources_to_delete: ListTuple[int], - filestore_resources: Dict[int, str], - new_resource_order: Optional[ListTuple[str]], + keys_to_delete: Sequence[str], + resources_to_update: Sequence["Resource"], + resources_to_delete: Sequence[int], + filestore_resources: dict[int, str], + new_resource_order: Sequence[str] | None, hxl_update: bool, create_default_views: bool = False, test: bool = False, **kwargs: Any, - ) -> Dict: + ) -> dict: """Helper method to save the modified dataset and add any filestore resources Args: - allow_no_resources (bool): Whether to allow no resources - keys_to_delete (ListTuple[str]): List of top level metadata keys to delete - resources_to_update (ListTuple[Resource]): Resources to update - resources_to_delete (ListTuple[int]): List of indexes of resources to delete - filestore_resources (Dict[int, str]): List of (index of resources, file to upload) - new_resource_order (Optional[ListTuple[str]]): New resource order to use or None - hxl_update (bool): Whether to call package_hxl_update. - create_default_views (bool): Whether to create default views. Defaults to False. - test (bool): Whether running in a test. Defaults to False. + allow_no_resources: Whether to allow no resources + keys_to_delete: List of top level metadata keys to delete + resources_to_update: Resources to update + resources_to_delete: List of indexes of resources to delete + filestore_resources: List of (index of resources, file to upload) + new_resource_order: New resource order to use or None + hxl_update: Whether to call package_hxl_update. + create_default_views: Whether to create default views. Defaults to False. + test: Whether running in a test. Defaults to False. **kwargs: See below ignore_field (str): Any field to ignore when checking dataset metadata. Defaults to None. Returns: - Dict: Dictionary of what gets passed to the revise call (for testing) + Dictionary of what gets passed to the revise call (for testing) """ results = {} dataset_data_to_update = self._old_data @@ -873,7 +867,7 @@ def _dataset_update_resources( remove_additional_resources: bool, match_resource_order: bool, **kwargs: Any, - ) -> Tuple[List, List, Dict, List, Dict]: + ) -> tuple[list, list, dict, list, dict]: """Helper method to compare new and existing dataset data returning resources to be updated, resources to be deleted, resources where files need to be uploaded to the filestore and if match_resource_order is @@ -884,13 +878,13 @@ def _dataset_update_resources( new_resource_order, resource status codes) Args: - update_resources (bool): Whether to update resources - match_resources_by_metadata (bool): Compare resource metadata rather than position in list - remove_additional_resources (bool): Remove additional resources found in dataset (if updating) - match_resource_order (bool): Match order of given resources by name + update_resources: Whether to update resources + match_resources_by_metadata: Compare resource metadata rather than position in list + remove_additional_resources: Remove additional resources found in dataset (if updating) + match_resource_order: Match order of given resources by name Returns: - Tuple[List, List, Dict, List, Dict]: Tuple of resources information + Tuple of resources information """ # When the user sets up a dataset, "data" contains the metadata. The # HDX dataset update process involves copying "data" to "old_data" and @@ -1013,13 +1007,13 @@ def _dataset_hdx_update( allow_no_resources: bool, update_resources: bool, match_resources_by_metadata: bool, - keys_to_delete: ListTuple[str], + keys_to_delete: Sequence[str], remove_additional_resources: bool, match_resource_order: bool, create_default_views: bool, hxl_update: bool, **kwargs: Any, - ) -> Tuple[Dict, Dict]: + ) -> tuple[dict, dict]: """Helper method to compare new and existing dataset data, update resources including those with files in the filestore, delete extra resources if needed and update dataset data and save the dataset and @@ -1029,17 +1023,17 @@ def _dataset_hdx_update( dictionary of what gets passed to the revise call (for testing)) Args: - allow_no_resources (bool): Whether to allow no resources - update_resources (bool): Whether to update resources - match_resources_by_metadata (bool): Compare resource metadata rather than position in list - keys_to_delete (ListTuple[str]): List of top level metadata keys to delete - remove_additional_resources (bool): Remove additional resources found in dataset (if updating) - match_resource_order (bool): Match order of given resources by name - create_default_views (bool): Whether to call package_create_default_resource_views. - hxl_update (bool): Whether to call package_hxl_update. + allow_no_resources: Whether to allow no resources + update_resources: Whether to update resources + match_resources_by_metadata: Compare resource metadata rather than position in list + keys_to_delete: List of top level metadata keys to delete + remove_additional_resources: Remove additional resources found in dataset (if updating) + match_resource_order: Match order of given resources by name + create_default_views: Whether to call package_create_default_resource_views. + hxl_update: Whether to call package_hxl_update. Returns: - Tuple[Dict, Dict]: Tuple of (resource status codes, revise call info) + Tuple of (resource status codes, revise call info) """ ( resources_to_update, @@ -1088,13 +1082,13 @@ def update_in_hdx( allow_no_resources: bool = False, update_resources: bool = True, match_resources_by_metadata: bool = True, - keys_to_delete: ListTuple[str] = tuple(), + keys_to_delete: Sequence[str] = (), remove_additional_resources: bool = False, match_resource_order: bool = False, create_default_views: bool = True, hxl_update: bool = True, **kwargs: Any, - ) -> Dict: + ) -> dict: """Check if dataset exists in HDX and if so, update it. match_resources_by_metadata uses ids if they are available, otherwise names only if names are unique or format in addition if not. @@ -1108,14 +1102,14 @@ def update_in_hdx( 4 = file not uploaded (hash, size unchanged), given last_modified ignored Args: - allow_no_resources (bool): Whether to allow no resources. Defaults to False. - update_resources (bool): Whether to update resources. Defaults to True. - match_resources_by_metadata (bool): Compare resource metadata rather than position in list. Defaults to True. - keys_to_delete (ListTuple[str]): List of top level metadata keys to delete. Defaults to tuple(). - remove_additional_resources (bool): Remove additional resources found in dataset. Defaults to False. - match_resource_order (bool): Match order of given resources by name. Defaults to False. - create_default_views (bool): Whether to call package_create_default_resource_views. Defaults to True. - hxl_update (bool): Whether to call package_hxl_update. Defaults to True. + allow_no_resources: Whether to allow no resources. Defaults to False. + update_resources: Whether to update resources. Defaults to True. + match_resources_by_metadata: Compare resource metadata rather than position in list. Defaults to True. + keys_to_delete: List of top level metadata keys to delete. Defaults to tuple(). + remove_additional_resources: Remove additional resources found in dataset. Defaults to False. + match_resource_order: Match order of given resources by name. Defaults to False. + create_default_views: Whether to call package_create_default_resource_views. Defaults to True. + hxl_update: Whether to call package_hxl_update. Defaults to True. **kwargs: See below keep_crisis_tags (bool): Whether to keep existing crisis tags. Defaults to True. updated_by_script (str): String to identify your script. Defaults to your user agent. @@ -1123,7 +1117,7 @@ def update_in_hdx( force_update (bool): Forces files to be updated even if they haven't changed Returns: - Dict: Status codes of resources + Status codes of resources """ self.check_resources_url_filetoupload() loaded = False @@ -1156,13 +1150,13 @@ def create_in_hdx( allow_no_resources: bool = False, update_resources: bool = True, match_resources_by_metadata: bool = True, - keys_to_delete: ListTuple[str] = tuple(), + keys_to_delete: Sequence[str] = (), remove_additional_resources: bool = False, match_resource_order: bool = False, create_default_views: bool = True, hxl_update: bool = True, **kwargs: Any, - ) -> Dict: + ) -> dict: """Check if dataset exists in HDX and if so, update it, otherwise create it. match_resources_by_metadata uses ids if they are available, otherwise names only if names are unique or format in addition if not. @@ -1176,14 +1170,14 @@ def create_in_hdx( 4 = file not uploaded (hash, size unchanged), given last_modified ignored Args: - allow_no_resources (bool): Whether to allow no resources. Defaults to False. - update_resources (bool): Whether to update resources (if updating). Defaults to True. - match_resources_by_metadata (bool): Compare resource metadata rather than position in list. Defaults to True. - keys_to_delete (ListTuple[str]): List of top level metadata keys to delete. Defaults to tuple(). - remove_additional_resources (bool): Remove additional resources found in dataset (if updating). Defaults to False. - match_resource_order (bool): Match order of given resources by name. Defaults to False. - create_default_views (bool): Whether to call package_create_default_resource_views (if updating). Defaults to True. - hxl_update (bool): Whether to call package_hxl_update. Defaults to True. + allow_no_resources: Whether to allow no resources. Defaults to False. + update_resources: Whether to update resources (if updating). Defaults to True. + match_resources_by_metadata: Compare resource metadata rather than position in list. Defaults to True. + keys_to_delete: List of top level metadata keys to delete. Defaults to tuple(). + remove_additional_resources: Remove additional resources found in dataset (if updating). Defaults to False. + match_resource_order: Match order of given resources by name. Defaults to False. + create_default_views: Whether to call package_create_default_resource_views (if updating). Defaults to True. + hxl_update: Whether to call package_hxl_update. Defaults to True. **kwargs: See below keep_crisis_tags (bool): Whether to keep existing crisis tags. Defaults to True. updated_by_script (str): String to identify your script. Defaults to your user agent. @@ -1191,7 +1185,7 @@ def create_in_hdx( force_update (bool): Forces files to be updated even if they haven't changed Returns: - Dict: Status codes of resources + Status codes of resources """ self.check_resources_url_filetoupload() loadedid = self.data.get("id") @@ -1273,17 +1267,17 @@ def hxl_update(self) -> None: @classmethod def search_in_hdx( cls, - query: Optional[str] = "*:*", - configuration: Optional[Configuration] = None, + query: str | None = "*:*", + configuration: Configuration | None = None, page_size: int = 1000, **kwargs: Any, - ) -> List["Dataset"]: + ) -> list["Dataset"]: """Searches for datasets in HDX Args: - query (Optional[str]): Query (in Solr format). Defaults to '*:*'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - page_size (int): Size of page to use internally to query HDX. Defaults to 1000. + query: Query (in Solr format). Defaults to '*:*'. + configuration: HDX configuration. Defaults to global configuration. + page_size: Size of page to use internally to query HDX. Defaults to 1000. **kwargs: See below fq (string): Any filter queries to apply rows (int): Number of matching rows to return. Defaults to all datasets (sys.maxsize). @@ -1292,11 +1286,11 @@ def search_in_hdx( facet (string): Whether to enable faceted results. Default to True. facet.mincount (int): Minimum counts for facet fields should be included in the results facet.limit (int): Maximum number of values the facet fields return (- = unlimited). Defaults to 50. - facet.field (List[str]): Fields to facet upon. Default is empty. + facet.field (list[str]): Fields to facet upon. Default is empty. use_default_schema (bool): Use default package schema instead of custom schema. Defaults to False. Returns: - List[Dataset]: list of datasets resulting from query + list of datasets resulting from query """ dataset = Dataset(configuration=configuration) @@ -1383,18 +1377,18 @@ def search_in_hdx( @staticmethod def get_all_dataset_names( - configuration: Optional[Configuration] = None, **kwargs: Any - ) -> List[str]: + configuration: Configuration | None = None, **kwargs: Any + ) -> list[str]: """Get all dataset names in HDX Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. **kwargs: See below rows (int): Number of rows to return. Defaults to all datasets (sys.maxsize) start (int): Offset in the complete result for where the set of returned dataset names should begin Returns: - List[str]: list of all dataset names in HDX + list of all dataset names in HDX """ total_rows = kwargs.get("rows") if total_rows: @@ -1410,15 +1404,15 @@ def get_all_dataset_names( @classmethod def get_all_datasets( cls, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, page_size: int = 1000, **kwargs: Any, - ) -> List["Dataset"]: + ) -> list["Dataset"]: """Get all datasets from HDX (just calls search_in_hdx) Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - page_size (int): Size of page to use internally to query HDX. Defaults to 1000. + configuration: HDX configuration. Defaults to global configuration. + page_size: Size of page to use internally to query HDX. Defaults to 1000. **kwargs: See below fq (string): Any filter queries to apply rows (int): Number of matching rows to return. Defaults to all datasets (sys.maxsize). @@ -1427,11 +1421,11 @@ def get_all_datasets( facet (string): Whether to enable faceted results. Default to True. facet.mincount (int): Minimum counts for facet fields should be included in the results facet.limit (int): Maximum number of values the facet fields return (- = unlimited). Defaults to 50. - facet.field (List[str]): Fields to facet upon. Default is empty. + facet.field (list[str]): Fields to facet upon. Default is empty. use_default_schema (bool): Use default package schema instead of custom schema. Defaults to False. Returns: - List[Dataset]: list of datasets resulting from query + list of datasets resulting from query """ if "sort" not in kwargs: kwargs["sort"] = "metadata_created asc" @@ -1444,15 +1438,15 @@ def get_all_datasets( @staticmethod def get_all_resources( - datasets: ListTuple["Dataset"], - ) -> List["Resource"]: + datasets: Sequence["Dataset"], + ) -> list["Resource"]: """Get all resources from a list of datasets (such as returned by search) Args: - datasets (ListTuple[Dataset]): list of datasets + datasets: list of datasets Returns: - List[Resource]: list of resources within those datasets + list of resources within those datasets """ resources = [] for dataset in datasets: @@ -1465,36 +1459,36 @@ def autocomplete( cls, name: str, limit: int = 20, - configuration: Optional[Configuration] = None, - ) -> List: + configuration: Configuration | None = None, + ) -> list: """Autocomplete a dataset name and return matches Args: - name (str): Name to autocomplete - limit (int): Maximum number of matches to return - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + name: Name to autocomplete + limit: Maximum number of matches to return + configuration: HDX configuration. Defaults to global configuration. Returns: - List: Autocomplete matches + Autocomplete matches """ return cls._autocomplete(name, limit, configuration) def get_time_period( self, - date_format: Optional[str] = None, + date_format: str | None = None, today: datetime = now_utc(), - ) -> Dict: + ) -> dict: """Get dataset date as datetimes and strings in specified format. If no format is supplied, the ISO 8601 format is used. Returns a dictionary containing keys startdate (start date as datetime), enddate (end date as datetime), startdate_str (start date as string), enddate_str (end date as string) and ongoing (whether the end date is a rolls forward every day). Args: - date_format (Optional[str]): Date format. None is taken to be ISO 8601. Defaults to None. - today (datetime): Date to use for today. Defaults to now_utc(). + date_format: Date format. None is taken to be ISO 8601. Defaults to None. + today: Date to use for today. Defaults to now_utc(). Returns: - Dict: Dictionary of date information + Dictionary of date information """ return DateHelper.get_time_period_info( self.data.get("dataset_date"), date_format, today @@ -1502,9 +1496,9 @@ def get_time_period( def get_reference_period( self, - date_format: Optional[str] = None, + date_format: str | None = None, today: datetime = now_utc(), - ) -> Dict: + ) -> dict: warnings.warn( "get_reference_period() is deprecated, use get_time_period() instead", DeprecationWarning, @@ -1513,8 +1507,8 @@ def get_reference_period( def set_time_period( self, - startdate: Union[datetime, str], - enddate: Union[datetime, str, None] = None, + startdate: datetime | str, + enddate: datetime | str | None = None, ongoing: bool = False, ignore_timeinfo: bool = True, ) -> None: @@ -1525,10 +1519,10 @@ def set_time_period( ignore_timeinfo to False. In this case, the time will be converted to UTC. Args: - startdate (Union[datetime, str]): Dataset start date - enddate (Union[datetime, str, None]): Dataset end date. Defaults to None. - ongoing (bool): True if ongoing, False if not. Defaults to False. - ignore_timeinfo (bool): Ignore time and time zone of date. Defaults to True. + startdate: Dataset start date + enddate: Dataset end date. Defaults to None. + ongoing: True if ongoing, False if not. Defaults to False. + ignore_timeinfo: Ignore time and time zone of date. Defaults to True. Returns: None @@ -1539,8 +1533,8 @@ def set_time_period( def set_reference_period( self, - startdate: Union[datetime, str], - enddate: Union[datetime, str, None] = None, + startdate: datetime | str, + enddate: datetime | str | None = None, ongoing: bool = False, ignore_timeinfo: bool = True, ) -> None: @@ -1552,17 +1546,17 @@ def set_reference_period( def set_time_period_year_range( self, - dataset_year: Union[str, int, Iterable], - dataset_end_year: Optional[Union[str, int]] = None, - ) -> List[int]: + dataset_year: str | int | Iterable, + dataset_end_year: str | int | None = None, + ) -> list[int]: """Set time period as a range from year or start and end year. Args: - dataset_year (Union[str, int, Iterable]): Dataset year given as string or int or range in an iterable - dataset_end_year (Optional[Union[str, int]]): Dataset end year given as string or int + dataset_year: Dataset year given as string or int or range in an iterable + dataset_end_year: Dataset end year given as string or int Returns: - List[int]: The start and end year if supplied or sorted list of years + The start and end year if supplied or sorted list of years """ ( self.data["dataset_date"], @@ -1572,9 +1566,9 @@ def set_time_period_year_range( def set_reference_period_year_range( self, - dataset_year: Union[str, int, Iterable], - dataset_end_year: Optional[Union[str, int]] = None, - ) -> List[int]: + dataset_year: str | int | Iterable, + dataset_end_year: str | int | None = None, + ) -> list[int]: warnings.warn( "set_reference_period_year_range() is deprecated, use set_time_period_year_range() instead", DeprecationWarning, @@ -1582,34 +1576,34 @@ def set_reference_period_year_range( return self.set_time_period_year_range(dataset_year, dataset_end_year) @classmethod - def list_valid_update_frequencies(cls) -> List[str]: + def list_valid_update_frequencies(cls) -> list[str]: """List of valid update frequency values Returns: - List[str]: Allowed update frequencies + Allowed update frequencies """ return list(cls.update_frequencies.keys()) @classmethod - def transform_update_frequency(cls, frequency: Union[str, int]) -> Optional[str]: + def transform_update_frequency(cls, frequency: str | int) -> str | None: """Get numeric update frequency (as string since that is required field format) from textual representation or vice versa (eg. 'Every month' = '30', '30' or 30 = 'Every month') Args: - frequency (Union[str, int]): Update frequency in one format + frequency: Update frequency in one format Returns: - Optional[str]: Update frequency in alternative format or None if not valid + Update frequency in alternative format or None if not valid """ if isinstance(frequency, int): frequency = str(frequency) return cls.update_frequencies.get(frequency.lower()) - def get_expected_update_frequency(self) -> Optional[str]: + def get_expected_update_frequency(self) -> str | None: """Get expected update frequency (in textual rather than numeric form) Returns: - Optional[str]: Update frequency in textual form or None if the update frequency doesn't exist or is blank. + Update frequency in textual form or None if the update frequency doesn't exist or is blank. """ days = self.data.get("data_update_frequency", None) if days: @@ -1617,12 +1611,12 @@ def get_expected_update_frequency(self) -> Optional[str]: else: return None - def set_expected_update_frequency(self, update_frequency: Union[str, int]) -> None: + def set_expected_update_frequency(self, update_frequency: str | int) -> None: """Set expected update frequency. You can pass frequencies like "Every week" or '7' or 7. Valid values for update frequency can be found from Dataset.list_valid_update_frequencies(). Args: - update_frequency (Union[str, int]): Update frequency + update_frequency: Update frequency Returns: None @@ -1637,52 +1631,52 @@ def set_expected_update_frequency(self, update_frequency: Union[str, int]) -> No raise HDXError("Invalid update frequency supplied!") self.data["data_update_frequency"] = update_frequency - def get_tags(self) -> List[str]: + def get_tags(self) -> list[str]: """Return the dataset's list of tags Returns: - List[str]: list of tags or [] if there are none + list of tags or [] if there are none """ return self._get_tags() def add_tag( self, tag: str, log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Add a tag Args: - tag (str): Tag to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + tag: Tag to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ return vocabulary.Vocabulary.add_mapped_tag(self, tag, log_deleted=log_deleted) def add_tags( - self, tags: ListTuple[str], log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + self, tags: Sequence[str], log_deleted: bool = True + ) -> tuple[list[str], list[str]]: """Add a list of tags Args: - tags (ListTuple[str]): List of tags to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + tags: List of tags to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ return vocabulary.Vocabulary.add_mapped_tags( self, tags, log_deleted=log_deleted ) - def clean_tags(self, log_deleted: bool = True) -> Tuple[List[str], List[str]]: + def clean_tags(self, log_deleted: bool = True) -> tuple[list[str], list[str]]: """Clean tags in an HDX object according to tags cleanup spreadsheet, deleting invalid tags that cannot be mapped Args: - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of mapped tags and list of deleted tags and tags not added + Tuple containing list of mapped tags and list of deleted tags and tags not added """ return vocabulary.Vocabulary.clean_tags(self, log_deleted=log_deleted) @@ -1690,10 +1684,10 @@ def remove_tag(self, tag: str) -> bool: """Remove a tag Args: - tag (str): Tag to remove + tag: Tag to remove Returns: - bool: True if tag removed or False if not + True if tag removed or False if not """ return self._remove_hdxobject( self.data.get("tags"), tag.lower(), matchon="name" @@ -1703,7 +1697,7 @@ def is_subnational(self) -> bool: """Return if the dataset is subnational Returns: - bool: True if the dataset is subnational, False if not + True if the dataset is subnational, False if not """ return self.data["subnational"] == "1" @@ -1711,7 +1705,7 @@ def set_subnational(self, subnational: bool) -> None: """Set if dataset is subnational or national Args: - subnational (bool): True for subnational, False for national + subnational: True for subnational, False for national Returns: None @@ -1721,16 +1715,14 @@ def set_subnational(self, subnational: bool) -> None: else: self.data["subnational"] = "0" - def get_location_iso3s( - self, locations: Optional[ListTuple[str]] = None - ) -> List[str]: + def get_location_iso3s(self, locations: Sequence[str] | None = None) -> list[str]: """Return the dataset's location Args: - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations: Valid locations list. Defaults to list downloaded from HDX. Returns: - List[str]: list of location iso3s + list of location iso3s """ countries = self.data.get("groups") countryisos = [] @@ -1746,16 +1738,14 @@ def get_location_iso3s( countryisos.append(countryiso) return countryisos - def get_location_names( - self, locations: Optional[ListTuple[str]] = None - ) -> List[str]: + def get_location_names(self, locations: Sequence[str] | None = None) -> list[str]: """Return the dataset's location Args: - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. + locations: Valid locations list. Defaults to list downloaded from HDX. Returns: - List[str]: list of location names + list of location names """ countries = self.data.get("groups") countrynames = [] @@ -1775,20 +1765,20 @@ def add_country_location( self, country: str, exact: bool = True, - locations: Optional[ListTuple[str]] = None, + locations: Sequence[str] | None = None, use_live: bool = True, ) -> bool: """Add a country. If an iso 3 code is not provided, value is parsed and if it is a valid country name, converted to an iso 3 code. If the country is already added, it is ignored. Args: - country (str): Country to add - exact (bool): True for exact matching or False to allow fuzzy matching. Defaults to True. - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. - use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. + country: Country to add + exact: True for exact matching or False to allow fuzzy matching. Defaults to True. + locations: Valid locations list. Defaults to list downloaded from HDX. + use_live: Try to get use latest country data from web rather than file in package. Defaults to True. Returns: - bool: True if country added or False if country already present + True if country added or False if country already present """ iso3, match = Country.get_iso3_country_code_fuzzy(country, use_live=use_live) if iso3 is None: @@ -1803,20 +1793,20 @@ def add_country_location( def add_country_locations( self, - countries: ListTuple[str], - locations: Optional[ListTuple[str]] = None, + countries: Sequence[str], + locations: Sequence[str] | None = None, use_live: bool = True, ) -> bool: """Add a list of countries. If iso 3 codes are not provided, values are parsed and where they are valid country names, converted to iso 3 codes. If any country is already added, it is ignored. Args: - countries (ListTuple[str]): List of countries to add - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. - use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. + countries: List of countries to add + locations: Valid locations list. Defaults to list downloaded from HDX. + use_live: Try to get use latest country data from web rather than file in package. Defaults to True. Returns: - bool: True if all countries added or False if any already present. + True if all countries added or False if any already present. """ allcountriesadded = True for country in countries: @@ -1829,19 +1819,19 @@ def add_country_locations( def add_region_location( self, region: str, - locations: Optional[ListTuple[str]] = None, + locations: Sequence[str] | None = None, use_live: bool = True, ) -> bool: """Add all countries in a region. If a 3 digit UNStats M49 region code is not provided, value is parsed as a region name. If any country is already added, it is ignored. Args: - region (str): M49 region, intermediate region or subregion to add - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. - use_live (bool): Try to get use latest country data from web rather than file in package. Defaults to True. + region: M49 region, intermediate region or subregion to add + locations: Valid locations list. Defaults to list downloaded from HDX. + use_live: Try to get use latest country data from web rather than file in package. Defaults to True. Returns: - bool: True if all countries in region added or False if any already present. + True if all countries in region added or False if any already present. """ return self.add_country_locations( Country.get_countries_in_region( @@ -1854,20 +1844,20 @@ def add_other_location( self, location: str, exact: bool = True, - alterror: Optional[str] = None, - locations: Optional[ListTuple[str]] = None, + alterror: str | None = None, + locations: Sequence[str] | None = None, ) -> bool: """Add a location which is not a country or region. Value is parsed and compared to existing locations in HDX. If the location is already added, it is ignored. Args: - location (str): Location to add - exact (bool): True for exact matching or False to allow fuzzy matching. Defaults to True. - alterror (Optional[str]): Alternative error message to builtin if location not found. Defaults to None. - locations (Optional[ListTuple[str]]): Valid locations list. Defaults to list downloaded from HDX. + location: Location to add + exact: True for exact matching or False to allow fuzzy matching. Defaults to True. + alterror: Alternative error message to builtin if location not found. Defaults to None. + locations: Valid locations list. Defaults to list downloaded from HDX. Returns: - bool: True if location added or False if location already present + True if location added or False if location already present """ hdx_code, match = Locations.get_HDX_code_from_location_partial( location, locations=locations, configuration=self.configuration @@ -1892,10 +1882,10 @@ def remove_location(self, location: str) -> bool: """Remove a location. If the location is already added, it is ignored. Args: - location (str): Location to remove + location: Location to remove Returns: - bool: True if location removed or False if not + True if location removed or False if not """ res = self._remove_hdxobject(self.data.get("groups"), location, matchon="name") if not res: @@ -1912,17 +1902,17 @@ def get_maintainer(self) -> "User": """Get the dataset's maintainer. Returns: - User: Dataset's maintainer + Dataset's maintainer """ return user.User.read_from_hdx( self.data["maintainer"], configuration=self.configuration ) - def set_maintainer(self, maintainer: Union["User", Dict, str]) -> None: + def set_maintainer(self, maintainer: Union["User", dict, str]) -> None: """Set the dataset's maintainer. Args: - maintainer (Union[User,Dict,str]): Either a user id or User metadata from a User object or dictionary. + maintainer: Either a user id or User metadata from a User object or dictionary. Returns: None """ @@ -1944,7 +1934,7 @@ def get_organization(self) -> "Organization": """Get the dataset's organization. Returns: - Organization: Dataset's organization + Dataset's organization """ return org_module.Organization.read_from_hdx( self.data["owner_org"], configuration=self.configuration @@ -1952,12 +1942,12 @@ def get_organization(self) -> "Organization": def set_organization( self, - organization: Union["Organization", Dict, str], + organization: Union["Organization", dict, str], ) -> None: """Set the dataset's organization. Args: - organization (Union[Organization,Dict,str]): Either an Organization id or Organization metadata from an Organization object or dictionary. + organization: Either an Organization id or Organization metadata from an Organization object or dictionary. Returns: None """ @@ -1977,11 +1967,11 @@ def set_organization( raise HDXError(f"{organization} is not a valid organization id!") self.data["owner_org"] = organization - def get_showcases(self) -> List["Showcase"]: + def get_showcases(self) -> list["Showcase"]: """Get any showcases the dataset is in Returns: - List[Showcase]: List of showcases + List of showcases """ assoc_result, showcases_dicts = self._read_from_hdx( "showcase", @@ -1999,15 +1989,15 @@ def get_showcases(self) -> List["Showcase"]: return showcases def _get_dataset_showcase_dict( - self, showcase: Union["Showcase", Dict, str] - ) -> Dict: + self, showcase: Union["Showcase", dict, str] + ) -> dict: """Get dataset showcase dict Args: - showcase (Union[Showcase,Dict,str]): Either a showcase id or Showcase metadata from a Showcase object or dictionary + showcase: Either a showcase id or Showcase metadata from a Showcase object or dictionary Returns: - dict: Dataset showcase dict + Dataset showcase dict """ if isinstance(showcase, sc_module.Showcase) or isinstance(showcase, dict): if "id" not in showcase: @@ -2023,17 +2013,17 @@ def _get_dataset_showcase_dict( def add_showcase( self, - showcase: Union["Showcase", Dict, str], - showcases_to_check: ListTuple["Showcase"] = None, + showcase: Union["Showcase", dict, str], + showcases_to_check: Sequence["Showcase"] = None, ) -> bool: """Add dataset to showcase Args: - showcase (Union[Showcase,Dict,str]): Either a showcase id or showcase metadata from a Showcase object or dictionary - showcases_to_check (ListTuple[Showcase]): List of showcases against which to check existence of showcase. Defaults to showcases containing dataset. + showcase: Either a showcase id or showcase metadata from a Showcase object or dictionary + showcases_to_check: List of showcases against which to check existence of showcase. Defaults to showcases containing dataset. Returns: - bool: True if the showcase was added, False if already present + True if the showcase was added, False if already present """ dataset_showcase = self._get_dataset_showcase_dict(showcase) if showcases_to_check is None: @@ -2050,17 +2040,17 @@ def add_showcase( def add_showcases( self, - showcases: ListTuple[Union["Showcase", Dict, str]], - showcases_to_check: ListTuple["Showcase"] = None, + showcases: Sequence[Union["Showcase", dict, str]], + showcases_to_check: Sequence["Showcase"] = None, ) -> bool: """Add dataset to multiple showcases Args: - showcases (ListTuple[Union[Showcase,Dict,str]]): A list of either showcase ids or showcase metadata from Showcase objects or dictionaries - showcases_to_check (ListTuple[Showcase]): list of showcases against which to check existence of showcase. Defaults to showcases containing dataset. + showcases: A list of either showcase ids or showcase metadata from Showcase objects or dictionaries + showcases_to_check: list of showcases against which to check existence of showcase. Defaults to showcases containing dataset. Returns: - bool: True if all showcases added or False if any already present + True if all showcases added or False if any already present """ if showcases_to_check is None: showcases_to_check = self.get_showcases() @@ -2070,11 +2060,11 @@ def add_showcases( allshowcasesadded = False return allshowcasesadded - def remove_showcase(self, showcase: Union["Showcase", Dict, str]) -> None: + def remove_showcase(self, showcase: Union["Showcase", dict, str]) -> None: """Remove dataset from showcase Args: - showcase (Union[Showcase,Dict,str]): Either a showcase id string or showcase metadata from a Showcase object or dictionary + showcase: Either a showcase id string or showcase metadata from a Showcase object or dictionary Returns: None @@ -2090,7 +2080,7 @@ def is_requestable(self) -> bool: """Return whether the dataset is requestable or not Returns: - bool: Whether the dataset is requestable or not + Whether the dataset is requestable or not """ return self.data.get("is_requestdata_type", False) @@ -2098,7 +2088,7 @@ def set_requestable(self, requestable: bool = True) -> None: """Set the dataset to be of type requestable or not Args: - requestable (bool): Set whether dataset is requestable. Defaults to True. + requestable: Set whether dataset is requestable. Defaults to True. Returns: None @@ -2107,11 +2097,11 @@ def set_requestable(self, requestable: bool = True) -> None: if requestable: self.data["private"] = False - def get_fieldnames(self) -> List[str]: + def get_fieldnames(self) -> list[str]: """Return list of fieldnames in your data. Only applicable to requestable datasets. Returns: - List[str]: List of field names + List of field names """ if not self.is_requestable(): raise NotRequestableError( @@ -2123,10 +2113,10 @@ def add_fieldname(self, fieldname: str) -> bool: """Add a fieldname to list of fieldnames in your data. Only applicable to requestable datasets. Args: - fieldname (str): Fieldname to add + fieldname: Fieldname to add Returns: - bool: True if fieldname added or False if tag already present + True if fieldname added or False if tag already present """ if not self.is_requestable(): raise NotRequestableError( @@ -2134,14 +2124,14 @@ def add_fieldname(self, fieldname: str) -> bool: ) return self._add_string_to_commastring("field_names", fieldname) - def add_fieldnames(self, fieldnames: ListTuple[str]) -> bool: + def add_fieldnames(self, fieldnames: Sequence[str]) -> bool: """Add a list of fieldnames to list of fieldnames in your data. Only applicable to requestable datasets. Args: - fieldnames (ListTuple[str]): List of fieldnames to add + fieldnames: List of fieldnames to add Returns: - bool: True if all fieldnames added or False if any already present + True if all fieldnames added or False if any already present """ if not self.is_requestable(): raise NotRequestableError( @@ -2153,10 +2143,10 @@ def remove_fieldname(self, fieldname: str) -> bool: """Remove a fieldname. Only applicable to requestable datasets. Args: - fieldname (str): Fieldname to remove + fieldname: Fieldname to remove Returns: - bool: True if fieldname removed or False if not + True if fieldname removed or False if not """ if not self.is_requestable(): raise NotRequestableError( @@ -2164,11 +2154,11 @@ def remove_fieldname(self, fieldname: str) -> bool: ) return self._remove_string_from_commastring("field_names", fieldname) - def get_filetypes(self) -> List[str]: + def get_filetypes(self) -> list[str]: """Return list of filetypes in your data Returns: - List[str]: List of filetypes + List of filetypes """ if not self.is_requestable(): return [resource.get_format() for resource in self._resources] @@ -2178,10 +2168,10 @@ def add_filetype(self, filetype: str) -> bool: """Add a filetype to list of filetypes in your data. Only applicable to requestable datasets. Args: - filetype (str): filetype to add + filetype: filetype to add Returns: - bool: True if filetype added or False if tag already present + True if filetype added or False if tag already present """ if not self.is_requestable(): raise NotRequestableError( @@ -2189,14 +2179,14 @@ def add_filetype(self, filetype: str) -> bool: ) return self._add_string_to_commastring("file_types", filetype) - def add_filetypes(self, filetypes: ListTuple[str]) -> bool: + def add_filetypes(self, filetypes: Sequence[str]) -> bool: """Add a list of filetypes to list of filetypes in your data. Only applicable to requestable datasets. Args: - filetypes (ListTuple[str]): list of filetypes to add + filetypes: list of filetypes to add Returns: - bool: True if all filetypes added or False if any already present + True if all filetypes added or False if any already present """ if not self.is_requestable(): raise NotRequestableError( @@ -2208,10 +2198,10 @@ def remove_filetype(self, filetype: str) -> bool: """Remove a filetype Args: - filetype (str): Filetype to remove + filetype: Filetype to remove Returns: - bool: True if filetype removed or False if not + True if filetype removed or False if not """ if not self.is_requestable(): raise NotRequestableError( @@ -2223,18 +2213,18 @@ def set_custom_viz(self, url: str) -> None: """Set custom visualization url for dataset Args: - url (str): Custom visualization url + url: Custom visualization url Returns: None """ self.data["customviz"] = [{"url": url}] - def get_custom_viz(self) -> Optional[str]: + def get_custom_viz(self) -> str | None: """Get custom visualization url for dataset Returns: - Optional[str]: Custom visualization url or None: + Custom visualization url or None: """ viz = self.data.get("customviz") if not isinstance(viz, list): @@ -2260,15 +2250,15 @@ def preview_resource(self) -> None: self.data["dataset_preview"] = "resource_id" def set_quickchart_resource( - self, resource: Union["Resource", Dict, str, int] + self, resource: Union["Resource", dict, str, int] ) -> "Resource": """Set the resource that will be used for displaying QuickCharts in dataset preview Args: - resource (Union[Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position + resource: Either resource id or name, resource metadata from a Resource object or a dictionary or position Returns: - Resource: Resource that is used for preview or None if no preview set + Resource that is used for preview or None if no preview set """ if isinstance(resource, int) and not isinstance(resource, bool): resource = self._resources[resource] @@ -2300,7 +2290,7 @@ def quickcharts_resource_last(self) -> bool: """Move the QuickCharts resource to be last. Assumes that it's name begins 'QuickCharts-'. Returns: - bool: True if QuickCharts resource found, False if not + True if QuickCharts resource found, False if not """ for i, resource in enumerate(self._resources): if resource["name"][:12] == "QuickCharts-": @@ -2312,7 +2302,7 @@ def create_default_views(self, create_datastore_views: bool = False) -> None: """Create default resource views for all resources in dataset Args: - create_datastore_views (bool): Whether to try to create resource views that point to the datastore + create_datastore_views: Whether to try to create resource views that point to the datastore Returns: None @@ -2340,12 +2330,12 @@ def _create_preview_resourceview(self) -> None: def _generate_resource_view( self, - resource: Union["Resource", Dict, str, int] = 0, - path: Optional[str] = None, - bites_disabled: Optional[ListTuple[bool]] = None, - indicators: Optional[ListTuple[Dict]] = None, - findreplace: Optional[Dict] = None, - ) -> Optional[resource_view.ResourceView]: + resource: Union["Resource", dict, str, int] = 0, + path: str | None = None, + bites_disabled: Sequence[bool] | None = None, + indicators: Sequence[dict] | None = None, + findreplace: dict | None = None, + ) -> resource_view.ResourceView | None: """Create QuickCharts for the given resource in a dataset. If you do not supply a path, then the internal indicators resource view template will be used. You can disable specific bites by providing @@ -2362,14 +2352,14 @@ def _generate_resource_view( dataset create or update if a resource id is not yet available. Args: - resource (Union[Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. - path (Optional[str]): Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yaml or internal template). - bites_disabled (Optional[ListTuple[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). - indicators (Optional[ListTuple[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). - findreplace (Optional[Dict]): Replacements for anything else in resource view. Defaults to None. + resource: Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. + path: Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yaml or internal template). + bites_disabled: Which QC bites should be disabled. Defaults to None (all bites enabled). + indicators: Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). + findreplace: Replacements for anything else in resource view. Defaults to None. Returns: - Optional[resource_view.ResourceView]: The resource view if QuickCharts created, None is not + The resource view if QuickCharts created, None is not """ if not bites_disabled: bites_disabled = [False, False, False] @@ -2498,11 +2488,11 @@ def replace_indicator(qc_config, index): def generate_quickcharts( self, - resource: Union["Resource", Dict, str, int] = 0, - path: Optional[str] = None, - bites_disabled: Optional[ListTuple[bool]] = None, - indicators: Optional[ListTuple[Dict]] = None, - findreplace: Optional[Dict] = None, + resource: Union["Resource", dict, str, int] = 0, + path: str | None = None, + bites_disabled: Sequence[bool] | None = None, + indicators: Sequence[dict] | None = None, + findreplace: dict | None = None, ) -> resource_view.ResourceView: """Create QuickCharts for the given resource in a dataset. If you do not supply a path, then the internal indicators resource view template @@ -2521,14 +2511,14 @@ def generate_quickcharts( be disabled if there are no valid charts to display. Args: - resource (Union[Resource,Dict,str,int]): Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. - path (Optional[str]): Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yaml or internal template). - bites_disabled (Optional[ListTuple[bool]]): Which QC bites should be disabled. Defaults to None (all bites enabled). - indicators (Optional[ListTuple[Dict]]): Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). - findreplace (Optional[Dict]): Replacements for anything else in resource view. Defaults to None. + resource: Either resource id or name, resource metadata from a Resource object or a dictionary or position. Defaults to 0. + path: Path to YAML resource view metadata. Defaults to None (config/hdx_resource_view_static.yaml or internal template). + bites_disabled: Which QC bites should be disabled. Defaults to None (all bites enabled). + indicators: Indicator codes, QC titles and units for resource view template. Defaults to None (don't use template). + findreplace: Replacements for anything else in resource view. Defaults to None. Returns: - resource_view.ResourceView: The resource view if QuickCharts created, None is not + The resource view if QuickCharts created, None is not """ resourceview = self._generate_resource_view( resource, path, bites_disabled, indicators, findreplace=findreplace @@ -2537,16 +2527,16 @@ def generate_quickcharts( self.preview_off() return resourceview - def get_name_or_id(self, prefer_name: bool = True) -> Optional[str]: + def get_name_or_id(self, prefer_name: bool = True) -> str | None: """Get dataset name or id eg. for use in urls. If prefer_name is True, name is preferred over id if available, otherwise id is preferred over name if available. Args: - prefer_name (bool): Whether name is preferred over id. Default to True. + prefer_name: Whether name is preferred over id. Default to True. Returns: - Optional[str]: HDX dataset id or name or None if not available + HDX dataset id or name or None if not available """ id = self.data.get("id") name = self.data.get("name") @@ -2559,30 +2549,30 @@ def get_name_or_id(self, prefer_name: bool = True) -> Optional[str]: return id return name - def get_hdx_url(self, prefer_name: bool = True) -> Optional[str]: + def get_hdx_url(self, prefer_name: bool = True) -> str | None: """Get the url of the dataset on HDX or None if the dataset name and id fields are missing. If prefer_name is True, name is preferred over id if available, otherwise id is preferred over name if available. Args: - prefer_name (bool): Whether name is preferred over id in url. Default to True. + prefer_name: Whether name is preferred over id in url. Default to True. Returns: - Optional[str]: Url of the dataset on HDX or None if the dataset is missing fields + Url of the dataset on HDX or None if the dataset is missing fields """ name_or_id = self.get_name_or_id(prefer_name) if not name_or_id: return None return f"{self.configuration.get_hdx_site_url()}/dataset/{name_or_id}" - def get_api_url(self, prefer_name: bool = True) -> Optional[str]: + def get_api_url(self, prefer_name: bool = True) -> str | None: """Get the API url of the dataset on HDX Args: - prefer_name (bool): Whether name is preferred over id in url. Default to True. + prefer_name: Whether name is preferred over id in url. Default to True. Returns: - Optional[str]: API url of the dataset on HDX or None if the dataset is missing fields + API url of the dataset on HDX or None if the dataset is missing fields """ name_or_id = self.get_name_or_id(prefer_name) if not name_or_id: @@ -2591,7 +2581,7 @@ def get_api_url(self, prefer_name: bool = True) -> Optional[str]: def remove_dates_from_title( self, change_title: bool = True, set_time_period: bool = False - ) -> List[Tuple[datetime, datetime]]: + ) -> list[tuple[datetime, datetime]]: """Remove dates from dataset title returning sorted the dates that were found in title. The title in the dataset metadata will be changed by default. The dataset's metadata field time period will not be changed by default, but if @@ -2599,11 +2589,11 @@ def remove_dates_from_title( to set the time period field. Args: - change_title (bool): Whether to change the dataset title. Defaults to True. - set_time_period (bool): Whether to set time period from date or range in title. Defaults to False. + change_title: Whether to change the dataset title. Defaults to True. + set_time_period: Whether to set time period from date or range in title. Defaults to False. Returns: - List[Tuple[datetime,datetime]]: Date ranges found in title + Date ranges found in title """ if "title" not in self.data: raise HDXError("Dataset has no title!") @@ -2620,16 +2610,16 @@ def generate_resource( self, folder: str, filename: str, - rows: Iterable[ListTupleDict], - resourcedata: Dict, - headers: Union[int, ListTuple[str], None] = None, - columns: Union[ListTuple[int], ListTuple[str], None] = None, + rows: Iterable[Sequence | Mapping], + resourcedata: dict, + headers: int | Sequence[str] | None = None, + columns: Sequence[int] | Sequence[str] | None = None, format: str = "csv", - encoding: Optional[str] = None, - datecol: Optional[Union[int, str]] = None, - yearcol: Optional[Union[int, str]] = None, - date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, - ) -> Tuple[bool, Dict]: + encoding: str | None = None, + datecol: int | str | None = None, + yearcol: int | str | None = None, + date_function: Callable[[dict], dict | None] | None = None, + ) -> tuple[bool, dict]: """Write rows to file and create resource, adding it to the dataset. The headers argument is either a row number (rows start counting at 1), or the actual headers defined as a list of strings. If not set, all rows will be treated as @@ -2651,20 +2641,20 @@ def generate_resource( startdate and enddate. Args: - folder (str): Folder to which to write file containing rows - filename (str): Filename of file to write rows - rows (Iterable[ListTupleDict]): List of rows in dict or list form - resourcedata (Dict): Resource data - headers (Union[int, ListTuple[str], None]): All headers. Defaults to None. - columns (Union[ListTuple[int], ListTuple[str], None]): Columns to write. Defaults to all. - format (str): Format to write. Defaults to csv. - encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). + folder: Folder to which to write file containing rows + filename: Filename of file to write rows + rows: List of rows in dict or list form + resourcedata: Resource data + headers: All headers. Defaults to None. + columns: Columns to write. Defaults to all. + format: Format to write. Defaults to csv. + encoding: Encoding to use. Defaults to None (infer encoding). datecol: Optional[Union[int, str]] = None, yearcol: Optional[Union[int, str]] = None, date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, Returns: - Tuple[bool, Dict]: (True if resource added, dictionary of results) + (True if resource added, dictionary of results) """ if [datecol, yearcol, date_function].count(None) < 2: raise HDXError("Supply one of datecol, yearcol or date_function!") @@ -2698,7 +2688,7 @@ def datecol_function(row): date_function = datecol_function - def process_row(row: ListTupleDict) -> Optional[ListTupleDict]: + def process_row(row: Sequence | Mapping) -> Sequence | Mapping | None: if date_function is None: return row result = date_function(row) @@ -2752,10 +2742,10 @@ def generate_resource_from_rows( self, folder: str, filename: str, - rows: Iterable[ListTupleDict], - resourcedata: Dict, - headers: Optional[ListTuple[str]] = None, - encoding: Optional[str] = None, + rows: Iterable[Sequence | Mapping], + resourcedata: dict, + headers: Sequence[str] | None = None, + encoding: str | None = None, ) -> Optional["Resource"]: """Write rows to csv and create resource, adding it to the dataset. The headers argument is either a row number (rows start counting at @@ -2763,15 +2753,15 @@ def generate_resource_from_rows( rows will be treated as containing values. Args: - folder (str): Folder to which to write file containing rows - filename (str): Filename of file to write rows - rows (Iterable[ListTupleDict]): List of rows in dict or list form - resourcedata (Dict): Resource data - headers (Optional[ListTuple[str]]): List of headers. Defaults to None. - encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). + folder: Folder to which to write file containing rows + filename: Filename of file to write rows + rows: List of rows in dict or list form + resourcedata: Resource data + headers: List of headers. Defaults to None. + encoding: Encoding to use. Defaults to None (infer encoding). Returns: - Optional[Resource]: The created resource or None if not created + The created resource or None if not created """ warnings.warn( "generate_resource_from_rows() is deprecated, use generate_resource() instead", @@ -2784,18 +2774,18 @@ def generate_resource_from_rows( def generate_resource_from_iterable( self, - headers: ListTuple[str], - iterable: Iterable[Union[ListTuple, Dict]], - hxltags: Dict[str, str], + headers: Sequence[str], + iterable: Iterable[Sequence | dict], + hxltags: dict[str, str], folder: str, filename: str, - resourcedata: Dict, - datecol: Optional[Union[int, str]] = None, - yearcol: Optional[Union[int, str]] = None, - date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, - quickcharts: Optional[Dict] = None, - encoding: Optional[str] = None, - ) -> Tuple[bool, Dict]: + resourcedata: dict, + datecol: int | str | None = None, + yearcol: int | str | None = None, + date_function: Callable[[dict], dict | None] | None = None, + quickcharts: dict | None = None, + encoding: str | None = None, + ) -> tuple[bool, dict]: """Given headers and an iterable, write rows to csv and create resource, adding to it the dataset. The returned dictionary will contain the resource in the key resource, headers in the key headers @@ -2832,20 +2822,20 @@ def generate_resource_from_iterable( of HXL tags is used. Args: - headers (ListTuple[str]): Headers - iterable (Iterable[Union[ListTuple,Dict]]): Iterable returning rows - hxltags (Dict[str,str]): Header to HXL hashtag mapping - folder (str): Folder to which to write file containing rows - filename (str): Filename of file to write rows - resourcedata (Dict): Resource data - datecol (Optional[Union[int,str]]): Date column for setting time period. Defaults to None (don't set). - yearcol (Optional[Union[int,str]]): Year column for setting dataset year range. Defaults to None (don't set). - date_function (Optional[Callable[[Dict],Optional[Dict]]]): Date function to call for each row. Defaults to None. - quickcharts (Optional[Dict]): Dictionary containing optional keys: hashtag, values, cutdown and/or cutdownhashtags - encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). + headers: Headers + iterable: Iterable returning rows + hxltags: Header to HXL hashtag mapping + folder: Folder to which to write file containing rows + filename: Filename of file to write rows + resourcedata: Resource data + datecol: Date column for setting time period. Defaults to None (don't set). + yearcol: Year column for setting dataset year range. Defaults to None (don't set). + date_function: Date function to call for each row. Defaults to None. + quickcharts: Dictionary containing optional keys: hashtag, values, cutdown and/or cutdownhashtags + encoding: Encoding to use. Defaults to None (infer encoding). Returns: - Tuple[bool, Dict]: (True if resource added, dictionary of results) + (True if resource added, dictionary of results) """ warnings.warn( "generate_resource_from_iterable() is deprecated, use generate_resource() instead", @@ -2996,18 +2986,18 @@ def datecol_function(row): def generate_resource_from_iterator( self, - headers: ListTuple[str], - iterator: Iterator[Union[ListTuple, Dict]], - hxltags: Dict[str, str], + headers: Sequence[str], + iterator: Iterator[Sequence | dict], + hxltags: dict[str, str], folder: str, filename: str, - resourcedata: Dict, - datecol: Optional[Union[int, str]] = None, - yearcol: Optional[Union[int, str]] = None, - date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, - quickcharts: Optional[Dict] = None, - encoding: Optional[str] = None, - ) -> Tuple[bool, Dict]: + resourcedata: dict, + datecol: int | str | None = None, + yearcol: int | str | None = None, + date_function: Callable[[dict], dict | None] | None = None, + quickcharts: dict | None = None, + encoding: str | None = None, + ) -> tuple[bool, dict]: warnings.warn( "generate_resource_from_iterator() is deprecated, use generate_resource() instead", DeprecationWarning, @@ -3032,17 +3022,17 @@ def download_generate_resource( url: str, folder: str, filename: str, - resourcedata: Dict, - header_insertions: Optional[ListTuple[Tuple[int, str]]] = None, - row_function: Optional[Callable[[List[str], Dict], Dict]] = None, - columns: Union[ListTuple[int], ListTuple[str], None] = None, + resourcedata: dict, + header_insertions: Sequence[tuple[int, str]] | None = None, + row_function: Callable[[list[str], dict], dict] | None = None, + columns: Sequence[int] | Sequence[str] | None = None, format: str = "csv", - encoding: Optional[str] = None, - datecol: Optional[Union[int, str]] = None, - yearcol: Optional[Union[int, str]] = None, - date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, + encoding: str | None = None, + datecol: int | str | None = None, + yearcol: int | str | None = None, + date_function: Callable[[dict], dict | None] | None = None, **kwargs: Any, - ) -> Tuple[bool, Dict]: + ) -> tuple[bool, dict]: """Download url, write rows to csv and create resource, adding to it the dataset. The returned dictionary will contain the resource in the key resource, headers in the key headers and list of rows in the key @@ -3087,24 +3077,24 @@ def download_generate_resource( of HXL tags is used. Args: - downloader (BaseDownload): A Download or Retrieve object - url (str): URL to download - hxltags (Dict[str,str]): Header to HXL hashtag mapping - folder (str): Folder to which to write file containing rows - filename (str): Filename of file to write rows - resourcedata (Dict): Resource data - header_insertions (Optional[ListTuple[Tuple[int,str]]]): List of (position, header) to insert. Defaults to None. - row_function (Optional[Callable[[List[str],Dict],Dict]]): Function to call for each row. Defaults to None. - columns (Union[ListTuple[int], ListTuple[str], None]): Columns to write. Defaults to all. - format (str): Format to write. Defaults to csv. - encoding (Optional[str]): Encoding to use. Defaults to None (infer encoding). + downloader: A Download or Retrieve object + url: URL to download + hxltags: Header to HXL hashtag mapping + folder: Folder to which to write file containing rows + filename: Filename of file to write rows + resourcedata: Resource data + header_insertions: List of (position, header) to insert. Defaults to None. + row_function: Function to call for each row. Defaults to None. + columns: Columns to write. Defaults to all. + format: Format to write. Defaults to csv. + encoding: Encoding to use. Defaults to None (infer encoding). datecol: Optional[Union[int, str]] = None, yearcol: Optional[Union[int, str]] = None, date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, **kwargs: Any additional args to pass to downloader.get_tabular_rows Returns: - Tuple[bool, Dict]: (True if resource added, dictionary of results) + (True if resource added, dictionary of results) """ headers, iterator = downloader.get_tabular_rows( url, @@ -3132,18 +3122,18 @@ def download_and_generate_resource( self, downloader: BaseDownload, url: str, - hxltags: Dict[str, str], + hxltags: dict[str, str], folder: str, filename: str, - resourcedata: Dict, - header_insertions: Optional[ListTuple[Tuple[int, str]]] = None, - row_function: Optional[Callable[[List[str], Dict], Dict]] = None, - datecol: Optional[str] = None, - yearcol: Optional[str] = None, - date_function: Optional[Callable[[Dict], Optional[Dict]]] = None, - quickcharts: Optional[Dict] = None, + resourcedata: dict, + header_insertions: Sequence[tuple[int, str]] | None = None, + row_function: Callable[[list[str], dict], dict] | None = None, + datecol: str | None = None, + yearcol: str | None = None, + date_function: Callable[[dict], dict | None] | None = None, + quickcharts: dict | None = None, **kwargs: Any, - ) -> Tuple[bool, Dict]: + ) -> tuple[bool, dict]: """Download url, write rows to csv and create resource, adding to it the dataset. The returned dictionary will contain the resource in the key resource, headers in the key headers and list of rows in the key @@ -3188,22 +3178,22 @@ def download_and_generate_resource( of HXL tags is used. Args: - downloader (BaseDownload): A Download or Retrieve object - url (str): URL to download - hxltags (Dict[str,str]): Header to HXL hashtag mapping - folder (str): Folder to which to write file containing rows - filename (str): Filename of file to write rows - resourcedata (Dict): Resource data - header_insertions (Optional[ListTuple[Tuple[int,str]]]): List of (position, header) to insert. Defaults to None. - row_function (Optional[Callable[[List[str],Dict],Dict]]): Function to call for each row. Defaults to None. - datecol (Optional[str]): Date column for setting time period. Defaults to None (don't set). - yearcol (Optional[str]): Year column for setting dataset year range. Defaults to None (don't set). - date_function (Optional[Callable[[Dict],Optional[Dict]]]): Date function to call for each row. Defaults to None. - quickcharts (Optional[Dict]): Dictionary containing optional keys: hashtag, values, cutdown and/or cutdownhashtags + downloader: A Download or Retrieve object + url: URL to download + hxltags: Header to HXL hashtag mapping + folder: Folder to which to write file containing rows + filename: Filename of file to write rows + resourcedata: Resource data + header_insertions: List of (position, header) to insert. Defaults to None. + row_function: Function to call for each row. Defaults to None. + datecol: Date column for setting time period. Defaults to None (don't set). + yearcol: Year column for setting dataset year range. Defaults to None (don't set). + date_function: Date function to call for each row. Defaults to None. + quickcharts: Dictionary containing optional keys: hashtag, values, cutdown and/or cutdownhashtags **kwargs: Any additional args to pass to downloader.get_tabular_rows Returns: - Tuple[bool, Dict]: (True if resource added, dictionary of results) + (True if resource added, dictionary of results) """ warnings.warn( "download_and_generate_resource() is deprecated, use download_generate_resource() instead", @@ -3234,8 +3224,8 @@ def download_and_generate_resource( def add_hapi_error( self, error_message: str, - resource_name: Optional[str] = None, - resource_id: Optional[str] = None, + resource_name: str | None = None, + resource_id: str | None = None, ) -> bool: """Writes error messages that were uncovered while processing data for the HAPI database to a resource's metadata on HDX. If the resource @@ -3243,12 +3233,12 @@ def add_hapi_error( messages are different. Args: - error_message (str): Error(s) uncovered - resource_name (Optional[str]): Resource name. Defaults to None - resource_id (Optional[str]): Resource id. Defaults to None + error_message: Error(s) uncovered + resource_name: Resource name. Defaults to None + resource_id: Resource id. Defaults to None Returns: - bool: True if a message was added, False if not + True if a message was added, False if not """ if resource_name is None and resource_id is None: return False diff --git a/src/hdx/data/hdxobject.py b/src/hdx/data/hdxobject.py index a4dce245..81282073 100755 --- a/src/hdx/data/hdxobject.py +++ b/src/hdx/data/hdxobject.py @@ -6,18 +6,18 @@ import logging from abc import ABC, abstractmethod from collections import UserDict +from collections.abc import Sequence from os.path import isfile -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from ckanapi.errors import NotFound - -from hdx.api.configuration import Configuration from hdx.utilities.dictandlist import merge_two_dictionaries from hdx.utilities.loader import ( load_json_into_existing_dict, load_yaml_into_existing_dict, ) -from hdx.utilities.typehint import ListTuple + +from hdx.api.configuration import Configuration logger = logging.getLogger(__name__) @@ -31,21 +31,21 @@ class HDXObject(UserDict, ABC): New HDX objects should extend this in similar fashion to Resource for example. Args: - initial_data (Dict): Initial metadata dictionary - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial metadata dictionary + configuration: HDX configuration. Defaults to global configuration. """ @staticmethod @abstractmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ def __init__( - self, initial_data: Dict, configuration: Optional[Configuration] = None + self, initial_data: dict, configuration: Configuration | None = None ) -> None: self._old_data = None if configuration is None: @@ -54,11 +54,11 @@ def __init__( self.configuration: Configuration = configuration super().__init__(initial_data) - def get_old_data_dict(self) -> Dict: + def get_old_data_dict(self) -> dict: """Get previous internal dictionary Returns: - Dict: Previous internal dictionary + Previous internal dictionary """ return self._old_data @@ -66,7 +66,7 @@ def update_from_yaml(self, path: str) -> None: """Update metadata with static metadata from YAML file Args: - path (str): Path to YAML dataset metadata + path: Path to YAML dataset metadata Returns: None @@ -79,7 +79,7 @@ def update_from_json(self, path: str) -> None: """Update metadata with static metadata from JSON file Args: - path (str): Path to JSON dataset metadata + path: Path to JSON dataset metadata Returns: None @@ -91,20 +91,20 @@ def _read_from_hdx( object_type: str, value: str, fieldname: str = "id", - action: Optional[str] = None, + action: str | None = None, **kwargs: Any, - ) -> Tuple[bool, Union[Dict, str]]: + ) -> tuple[bool, dict | str]: """Makes a read call to HDX passing in given parameter. Args: - object_type (str): Description of HDX object type (for messages) - value (str): Value of HDX field - fieldname (str): HDX field name. Defaults to id. - action (Optional[str]): Replacement CKAN action url to use. Defaults to None. + object_type: Description of HDX object type (for messages) + value: Value of HDX field + fieldname: HDX field name. Defaults to id. + action: Replacement CKAN action url to use. Defaults to None. **kwargs: Other fields to pass to CKAN. Returns: - Tuple[bool, Union[Dict, str]]: (True/False, HDX object metadata/Error) + (True/False, HDX object metadata/Error) """ if not fieldname: raise HDXError(f"Empty {object_type} field name!") @@ -126,11 +126,11 @@ def _load_from_hdx(self, object_type: str, id_field: str) -> bool: """Helper method to load the HDX object given by identifier from HDX Args: - object_type (str): Description of HDX object type (for messages) - id_field (str): HDX object identifier + object_type: Description of HDX object type (for messages) + id_field: HDX object identifier Returns: - bool: True if loaded, False if not + True if loaded, False if not """ success, result = self._read_from_hdx(object_type, id_field) if success: @@ -143,16 +143,16 @@ def _load_from_hdx(self, object_type: str, id_field: str) -> bool: @staticmethod @abstractmethod def read_from_hdx( - id_field: str, configuration: Optional[Configuration] = None + id_field: str, configuration: Configuration | None = None ) -> Optional["HDXObject"]: """Abstract method to read the HDX object given by identifier from HDX and return it Args: - id_field (str): HDX object identifier - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + id_field: HDX object identifier + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[HDXObject]: HDX object if successful read, None if not + HDX object if successful read, None if not """ @classmethod @@ -160,17 +160,17 @@ def _read_from_hdx_class( cls, object_type: str, identifier: str, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, ) -> Optional["HDXObject"]: """Reads the HDX object given by identifier from HDX and returns it Args: - object_type (str): Description of HDX object type (for messages) - identifier (str): Identifier - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + object_type: Description of HDX object type (for messages) + identifier: Identifier + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[HDXObject]: HDX object if successful read, None if not + HDX object if successful read, None if not """ hdxobject = cls(configuration=configuration) result = hdxobject._load_from_hdx(object_type, identifier) @@ -190,9 +190,9 @@ def _check_load_existing_object( """Check metadata exists and contains HDX object identifier, and if so load HDX object Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier - operation (str): Operation to report if error. Defaults to update. + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier + operation: Operation to report if error. Defaults to update. Returns: None @@ -202,24 +202,24 @@ def _check_load_existing_object( raise HDXError(f"No existing {object_type} to {operation}!") @abstractmethod - def check_required_fields(self, ignore_fields: ListTuple[str] = []) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Abstract method to check that metadata for HDX object is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is []. + ignore_fields: Fields to ignore. Default is (). Returns: None """ def _check_required_fields( - self, object_type: str, ignore_fields: ListTuple[str] + self, object_type: str, ignore_fields: Sequence[str] ) -> None: """Helper method to check that metadata for HDX object is complete Args: - ignore_fields (ListTuple[str]): Any fields to ignore in the check + ignore_fields: Any fields to ignore in the check Returns: None @@ -235,7 +235,7 @@ def _check_fields(self, object_type: str, **kwargs: Any) -> None: """Helper method to check metadata fields unless it is specified not to do so. Args: - object_type (str): Description of HDX object type (for messages) + object_type: Description of HDX object type (for messages) **kwargs: See below ignore_field (str): Any field to ignore when checking dataset metadata. Defaults to None. @@ -277,17 +277,17 @@ def _hdx_update( self, object_type: str, id_field_name: str, - files_to_upload: Dict = {}, + files_to_upload: dict | None = None, force_active: bool = False, **kwargs: Any, ) -> None: """Helper method to update HDX object Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier - files_to_upload (Dict): Files to upload to HDX - force_active (bool): Make object state active. Defaults to False. + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier + files_to_upload: Files to upload to HDX + force_active: Make object state active. Defaults to False. **kwargs: See below operation (str): Operation to perform eg. patch. Defaults to update. ignore_field (str): Any field to ignore when checking metadata. Defaults to None. @@ -305,17 +305,17 @@ def _merge_hdx_update( self, object_type: str, id_field_name: str, - files_to_upload: Dict = {}, + files_to_upload: dict | None = None, force_active: bool = False, **kwargs: Any, ) -> None: """Helper method to check if HDX object exists and update it Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier - files_to_upload (Dict): Files to upload to HDX - force_active (bool): Make object state active. Defaults to False. + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier + files_to_upload: Files to upload to HDX + force_active: Make object state active. Defaults to False. **kwargs: See below operation (str): Operation to perform eg. patch. Defaults to update. ignore_field (str): Any field to ignore when checking metadata. Defaults to None. @@ -344,17 +344,17 @@ def _update_in_hdx( self, object_type: str, id_field_name: str, - files_to_upload: Dict = {}, + files_to_upload: dict | None = None, force_active: bool = True, **kwargs: Any, ) -> None: """Helper method to check if HDX object exists in HDX and if so, update it Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier - files_to_upload (Dict): Files to upload to HDX - force_active (bool): Make object state active. Defaults to True. + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier + files_to_upload: Files to upload to HDX + force_active: Make object state active. Defaults to True. **kwargs: See below operation (str): Operation to perform eg. patch. Defaults to update. ignore_field (str): Any field to ignore when checking dataset metadata. Defaults to None. @@ -378,26 +378,28 @@ def _update_in_hdx( def _write_to_hdx( self, action: str, - data: Dict, - id_field_name: Optional[str] = None, - files_to_upload: Dict = {}, - ) -> Union[Dict, List]: + data: dict, + id_field_name: str | None = None, + files_to_upload: dict | None = None, + ) -> dict | list: """Creates or updates an HDX object in HDX and return HDX object metadata dict Args: - action (str): Action to perform eg. 'create', 'update' - data (Dict): Data to write to HDX - id_field_name (Optional[str]): Name of field containing HDX object identifier. Defaults to None. - files_to_upload (Dict): Files to upload to HDX + action: Action to perform eg. 'create', 'update' + data: Data to write to HDX + id_field_name: Name of field containing HDX object identifier. Defaults to None. + files_to_upload: Files to upload to HDX Returns: - Union[Dict,List]: HDX object metadata + HDX object metadata """ + open_files_to_upload = {} try: - for key, value in files_to_upload.items(): - files_to_upload[key] = open(value, "rb") + if files_to_upload: + for key, value in files_to_upload.items(): + open_files_to_upload[key] = open(value, "rb") return self.configuration.call_remoteckan( - self.actions()[action], data, files=files_to_upload + self.actions()[action], data, files=open_files_to_upload ) except Exception as e: if id_field_name: @@ -406,26 +408,24 @@ def _write_to_hdx( idstr = "" raise HDXError(f"Failed when trying to {action}{idstr}! (POST)") from e finally: - for file in files_to_upload.values(): - if isinstance(file, str): - continue + for file in open_files_to_upload.values(): file.close() def _save_to_hdx( self, action: str, id_field_name: str, - files_to_upload: Dict = {}, + files_to_upload: dict | None = None, force_active: bool = False, ) -> None: """Creates or updates an HDX object in HDX, saving current data and replacing with returned HDX object data from HDX Args: - action (str): Action to perform: 'create' or 'update' - id_field_name (str): Name of field containing HDX object identifier - files_to_upload (Dict): Files to upload to HDX - force_active (bool): Make object state active. Defaults to False. + action: Action to perform: 'create' or 'update' + id_field_name: Name of field containing HDX object identifier + files_to_upload: Files to upload to HDX + force_active: Make object state active. Defaults to False. Returns: None @@ -449,7 +449,7 @@ def _create_in_hdx( object_type: str, id_field_name: str, name_field_name: str, - files_to_upload: Dict = {}, + files_to_upload: dict | None = None, force_active: bool = True, **kwargs: Any, ) -> None: @@ -457,11 +457,11 @@ def _create_in_hdx( Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier - name_field_name (str): Name of field containing HDX object name - files_to_upload (Dict): Files to upload to HDX - force_active (bool): Make object state active. Defaults to True. + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier + name_field_name: Name of field containing HDX object name + files_to_upload: Files to upload to HDX + force_active: Make object state active. Defaults to True. Returns: None @@ -494,8 +494,8 @@ def _delete_from_hdx(self, object_type: str, id_field_name: str) -> None: """Helper method to deletes a resource from HDX Args: - object_type (str): Description of HDX object type (for messages) - id_field_name (str): Name of field containing HDX object identifier + object_type: Description of HDX object type (for messages) + id_field_name: Name of field containing HDX object identifier Returns: None @@ -509,20 +509,20 @@ def _autocomplete( cls, name: str, limit: int = 20, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, **kwargs: Any, - ) -> List: + ) -> list: """Helper method to autocomplete a name and return matches Args: - name (str): Name to autocomplete - limit (int): Maximum number of matches to return - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + name: Name to autocomplete + limit: Maximum number of matches to return + configuration: HDX configuration. Defaults to global configuration. **kwargs: - offset (int): The offset to start returning tags from. + offset: The offset to start returning tags from. Returns: - List: Autocomplete matches + Autocomplete matches """ hdxobject = cls(configuration=configuration) data = {"q": name, "limit": limit} @@ -531,7 +531,7 @@ def _autocomplete( def _addupdate_hdxobject( self, - hdxobjects: ListTuple["HDXObject"], + hdxobjects: Sequence["HDXObject"], id_field: str, new_hdxobject: "HDXObject", ) -> "HDXObject": @@ -539,12 +539,12 @@ def _addupdate_hdxobject( already exists in the list Args: - hdxobjects (ListTuple[HDXObject]): list of HDX objects to which to add new objects or update existing ones - id_field (str): Field on which to match to determine if object already exists in list - new_hdxobject (HDXObject): The HDX object to be added/updated + hdxobjects: list of HDX objects to which to add new objects or update existing ones + id_field: Field on which to match to determine if object already exists in list + new_hdxobject: The HDX object to be added/updated Returns: - HDXObject: The HDX object which was added or updated + The HDX object which was added or updated """ for hdxobject in hdxobjects: if hdxobject[id_field] == new_hdxobject[id_field]: @@ -555,21 +555,21 @@ def _addupdate_hdxobject( def _remove_hdxobject( self, - objlist: ListTuple[Union["HDXObject", Dict]], - obj: Union["HDXObject", Dict, str], + objlist: Sequence[Union["HDXObject", dict]], + obj: Union["HDXObject", dict, str], matchon: str = "id", delete: bool = False, ) -> bool: """Remove an HDX object from a list within the parent HDX object Args: - objlist (ListTuple[Union[HDXObject,Dict]]): list of HDX objects - obj (Union[HDXObject,Dict,str]): Either an id or hdx object metadata either from an HDX object or a dictionary - matchon (str): Field to match on. Defaults to id. - delete (bool): Whether to delete HDX object. Defaults to False. + objlist: list of HDX objects + obj: Either an id or hdx object metadata either from an HDX object or a dictionary + matchon: Field to match on. Defaults to id. + delete: Whether to delete HDX object. Defaults to False. Returns: - bool: True if object removed, False if not + True if object removed, False if not """ if objlist is None: return False @@ -590,14 +590,15 @@ def _remove_hdxobject( return True return False - def _convert_hdxobjects(self, hdxobjects: ListTuple["HDXObject"]) -> List[Dict]: + @staticmethod + def _convert_hdxobjects(hdxobjects: Sequence["HDXObject"]) -> list[dict]: """Helper function to convert supplied list of HDX objects to a list of dict Args: - hdxobjects (ListTuple[HDXObject]): List of HDX objects to convert + hdxobjects: List of HDX objects to convert Returns: - List[Dict]: List of HDX objects converted to simple dictionaries + List of HDX objects converted to simple dictionaries """ newhdxobjects = [] for hdxobject in hdxobjects: @@ -606,19 +607,19 @@ def _convert_hdxobjects(self, hdxobjects: ListTuple["HDXObject"]) -> List[Dict]: def _copy_hdxobjects( self, - hdxobjects: ListTuple["HDXObject"], + hdxobjects: Sequence["HDXObject"], hdxobjectclass: type, - attributes_to_copy: ListTuple[str] = (), - ) -> List["HDXObject"]: + attributes_to_copy: Sequence[str] = (), + ) -> list["HDXObject"]: """Helper function to make a deep copy of a supplied list of HDX objects Args: - hdxobjects (ListTuple[HDXObject]): list of HDX objects to copy - hdxobjectclass (type): Type of the HDX Objects to be copied - attributes_to_copy (ListTuple[str]): Attributes to copy over from the HDX object. Defaults to (). + hdxobjects: list of HDX objects to copy + hdxobjectclass: Type of the HDX Objects to be copied + attributes_to_copy: Attributes to copy over from the HDX object. Defaults to (). Returns: - List[HDXObject]: Deep copy of list of HDX objects + Deep copy of list of HDX objects """ newhdxobjects = [] for hdxobject in hdxobjects: @@ -634,7 +635,7 @@ def _copy_hdxobjects( def _separate_hdxobjects( self, - hdxobjects: ListTuple["HDXObject"], + hdxobjects: list["HDXObject"], hdxobjects_name: str, id_field: str, hdxobjectclass: type, @@ -644,10 +645,10 @@ def _separate_hdxobjects( the internal dictionary is then deleted. Args: - hdxobjects (ListTuple[HDXObject]): list of HDX objects to which to add new objects or update existing ones - hdxobjects_name (str): Name of key in internal dictionary from which to obtain list of HDX objects - id_field (str): Field on which to match to determine if object already exists in list - hdxobjectclass (type): Type of the HDX Object to be added/updated + hdxobjects: list of HDX objects to which to add new objects or update existing ones + hdxobjects_name: Name of key in internal dictionary from which to obtain list of HDX objects + id_field: Field on which to match to determine if object already exists in list + hdxobjectclass: Type of the HDX Object to be added/updated Returns: None @@ -670,26 +671,26 @@ def _separate_hdxobjects( ) del self.data[hdxobjects_name] - def _get_tags(self) -> List[str]: + def _get_tags(self) -> list[str]: """Return the dataset's list of tags Returns: - List[str]: list of tags or [] if there are none + list of tags or [] if there are none """ tags = self.data.get("tags", None) if not tags: return [] return [x["name"] for x in tags] - def _add_tag(self, tag: str, vocabulary_id: Optional[str] = None) -> bool: + def _add_tag(self, tag: str, vocabulary_id: str | None = None) -> bool: """Add a tag Args: - tag (str): Tag to add - vocabulary_id (Optional[str]): Vocabulary tag is in. Defaults to None. + tag: Tag to add + vocabulary_id: Vocabulary tag is in. Defaults to None. Returns: - bool: True if tag added or False if tag already present + True if tag added or False if tag already present """ tag = tag.lower() tags = self.data.get("tags", None) @@ -706,16 +707,16 @@ def _add_tag(self, tag: str, vocabulary_id: Optional[str] = None) -> bool: return True def _add_tags( - self, tags: ListTuple[str], vocabulary_id: Optional[str] = None - ) -> List[str]: + self, tags: Sequence[str], vocabulary_id: str | None = None + ) -> list[str]: """Add a list of tag Args: - tags (ListTuple[str]): list of tags to add - vocabulary_id (Optional[str]): Vocabulary tag is in. Defaults to None. + tags: list of tags to add + vocabulary_id: Vocabulary tag is in. Defaults to None. Returns: - List[str]: Tags that were successfully added + Tags that were successfully added """ added_tags = [] for tag in tags: @@ -723,14 +724,14 @@ def _add_tags( added_tags.append(tag) return added_tags - def _get_stringlist_from_commastring(self, field: str) -> List[str]: + def _get_stringlist_from_commastring(self, field: str) -> list[str]: """Return list of strings from comma separated list Args: - field (str): Field containing comma separated list + field: Field containing comma separated list Returns: - List[str]: List of strings + List of strings """ strings = self.data.get(field) if strings: @@ -742,11 +743,11 @@ def _add_string_to_commastring(self, field: str, string: str) -> bool: """Add a string to a comma separated list of strings Args: - field (str): Field containing comma separated list - string (str): String to add + field: Field containing comma separated list + string: String to add Returns: - bool: True if string added or False if string already present + True if string added or False if string already present """ if string in self._get_stringlist_from_commastring(field): return False @@ -756,15 +757,15 @@ def _add_string_to_commastring(self, field: str, string: str) -> bool: self.data[field] = strings return True - def _add_strings_to_commastring(self, field: str, strings: ListTuple[str]) -> bool: + def _add_strings_to_commastring(self, field: str, strings: Sequence[str]) -> bool: """Add a list of strings to a comma separated list of strings Args: - field (str): Field containing comma separated list - strings (ListTuple[str]): list of strings to add + field: Field containing comma separated list + strings: list of strings to add Returns: - bool: True if all strings added or False if any already present. + True if all strings added or False if any already present. """ allstringsadded = True for string in strings: @@ -776,11 +777,11 @@ def _remove_string_from_commastring(self, field: str, string: str) -> bool: """Remove a string from a comma separated list of strings Args: - field (str): Field containing comma separated list - string (str): String to remove + field: Field containing comma separated list + string: String to remove Returns: - bool: True if string removed or False if not + True if string removed or False if not """ commastring = self.data.get(field, "") if string in commastring: diff --git a/src/hdx/data/organization.py b/src/hdx/data/organization.py index 65dc8bf0..3609d6e9 100755 --- a/src/hdx/data/organization.py +++ b/src/hdx/data/organization.py @@ -1,14 +1,14 @@ """Organization class containing all logic for creating, checking, and updating organizations.""" import logging +from collections.abc import Sequence from os.path import join -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union import hdx.data.dataset import hdx.data.user as user_module from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError, HDXObject -from hdx.utilities.typehint import ListTuple if TYPE_CHECKING: from hdx.data.user import User @@ -20,25 +20,25 @@ class Organization(HDXObject): """Organization class containing all logic for creating, checking, and updating organizations. Args: - initial_data (Optional[Dict]): Initial organization metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial organization metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} super().__init__(initial_data, configuration=configuration) @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "organization_show", @@ -55,7 +55,7 @@ def update_from_yaml( """Update organization metadata with static metadata from YAML file Args: - path (str): Path to YAML dataset metadata. Defaults to config/hdx_organization_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_organization_static.yaml. Returns: None @@ -68,7 +68,7 @@ def update_from_json( """Update organization metadata with static metadata from JSON file Args: - path (str): Path to JSON dataset metadata. Defaults to config/hdx_organization_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_organization_static.json. Returns: None @@ -77,25 +77,25 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["Organization"]: """Reads the organization given by identifier from HDX and returns Organization object Args: - identifier (str): Identifier of organization - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of organization + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[Organization]: Organization object if successful read, None if not + Organization object if successful read, None if not """ return cls._read_from_hdx_class("organization", identifier, configuration) - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for organization is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -126,13 +126,13 @@ def delete_from_hdx(self) -> None: """ self._delete_from_hdx("organization", "id") - def get_users(self, capacity: Optional[str] = None) -> List["User"]: + def get_users(self, capacity: str | None = None) -> list["User"]: """Returns the organization's users. Args: - capacity (Optional[str]): Filter by capacity eg. member, admin. Defaults to None. + capacity: Filter by capacity eg. member, admin. Defaults to None. Returns: - List[User]: Organization's users. + Organization's users. """ users = [] usersdicts = self.data.get("users") @@ -152,16 +152,16 @@ def get_users(self, capacity: Optional[str] = None) -> List["User"]: def add_update_user( self, - user: Union["User", Dict, str], - capacity: Optional[str] = None, + user: Union["User", dict, str], + capacity: str | None = None, ) -> None: """Add new or update existing user in organization with new metadata. Capacity eg. member, admin must be supplied either within the User object or dictionary or using the capacity argument (which takes precedence). Args: - user (Union[User,Dict,str]): Either a user id or user metadata either from a User object or a dictionary - capacity (Optional[str]): Capacity of user eg. member, admin. Defaults to None. + user: Either a user id or user metadata either from a User object or a dictionary + capacity: Capacity of user eg. member, admin. Defaults to None. Returns: None @@ -186,16 +186,16 @@ def add_update_user( def add_update_users( self, - users: ListTuple[Union["User", Dict, str]], - capacity: Optional[str] = None, + users: Sequence[Union["User", dict, str]], + capacity: str | None = None, ) -> None: """Add new or update existing users in organization with new metadata. Capacity eg. member, admin must be supplied either within the User object or dictionary or using the capacity argument (which takes precedence). Args: - users (ListTuple[Union[User,Dict,str]]): A list of either user ids or users metadata from User objects or dictionaries - capacity (Optional[str]): Capacity of users eg. member, admin. Defaults to None. + users: A list of either user ids or users metadata from User objects or dictionaries + capacity: Capacity of users eg. member, admin. Defaults to None. Returns: None @@ -203,22 +203,22 @@ def add_update_users( for user in users: self.add_update_user(user, capacity) - def remove_user(self, user: Union["User", Dict, str]) -> bool: + def remove_user(self, user: Union["User", dict, str]) -> bool: """Remove a user from the organization Args: - user (Union[User,Dict,str]): Either a user id or user metadata either from a User object or a dictionary + user: Either a user id or user metadata either from a User object or a dictionary Returns: - bool: True if user removed or False if not + True if user removed or False if not """ return self._remove_hdxobject(self.data.get("users"), user) - def get_datasets(self, query: str = "*:*", **kwargs: Any) -> List["Dataset"]: # noqa: F821 + def get_datasets(self, query: str = "*:*", **kwargs: Any) -> list["Dataset"]: # noqa: F821 """Get list of datasets in organization Args: - query (str): Restrict datasets returned to this query (in Solr format). Defaults to '*:*'. + query: Restrict datasets returned to this query (in Solr format). Defaults to '*:*'. **kwargs: See below sort (string): Sorting of the search results. Defaults to 'relevance asc, metadata_modified desc'. rows (int): Number of matching rows to return. Defaults to all datasets (sys.maxsize). @@ -226,11 +226,11 @@ def get_datasets(self, query: str = "*:*", **kwargs: Any) -> List["Dataset"]: # facet (string): Whether to enable faceted results. Default to True. facet.mincount (int): Minimum counts for facet fields should be included in the results facet.limit (int): Maximum number of values the facet fields return (- = unlimited). Defaults to 50. - facet.field (List[str]): Fields to facet upon. Default is empty. + facet.field (list[str]): Fields to facet upon. Default is empty. use_default_schema (bool): Use default package schema instead of custom schema. Defaults to False. Returns: - List[Dataset]: List of datasets in organization + List of datasets in organization """ return hdx.data.dataset.Dataset.search_in_hdx( query=query, @@ -241,23 +241,23 @@ def get_datasets(self, query: str = "*:*", **kwargs: Any) -> List["Dataset"]: # @staticmethod def get_all_organization_names( - configuration: Optional[Configuration] = None, **kwargs: Any - ) -> List[str]: + configuration: Configuration | None = None, **kwargs: Any + ) -> list[str]: """Get all organization names in HDX Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. **kwargs: See below sort (str): Sort the search results according to field name and sort-order. Allowed fields are ‘name’, ‘package_count’ and ‘title’. Defaults to 'name asc'. - organizations (List[str]): List of names of the groups to return. + organizations (list[str]): List of names of the groups to return. all_fields (bool): Return group dictionaries instead of just names. Only core fields are returned - get some more using the include_* options. Defaults to False. include_extras (bool): If all_fields, include the group extra fields. Defaults to False. include_tags (bool): If all_fields, include the group tags. Defaults to False. - include_groups: If all_fields, include the groups the groups are in. Defaults to False. + include_groups (bool): If all_fields, include the groups the groups are in. Defaults to False. include_users (bool): If all_fields, include the organization users. Defaults to False. Returns: - List[str]: List of all organization names in HDX + List of all organization names in HDX """ organization = Organization(configuration=configuration) return organization._write_to_hdx("list", kwargs) @@ -267,16 +267,16 @@ def autocomplete( cls, name: str, limit: int = 20, - configuration: Optional[Configuration] = None, - ) -> List: + configuration: Configuration | None = None, + ) -> list: """Autocomplete an organization name and return matches Args: - name (str): Name to autocomplete - limit (int): Maximum number of matches to return - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + name: Name to autocomplete + limit: Maximum number of matches to return + configuration: HDX configuration. Defaults to global configuration. Returns: - List: Autocomplete matches + Autocomplete matches """ return cls._autocomplete(name, limit, configuration) diff --git a/src/hdx/data/resource.py b/src/hdx/data/resource.py index 9212b6cf..a579092b 100755 --- a/src/hdx/data/resource.py +++ b/src/hdx/data/resource.py @@ -2,10 +2,17 @@ import logging import warnings +from collections.abc import Sequence from datetime import datetime from os.path import join from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional + +from hdx.utilities.dateparse import now_utc, now_utc_notz, parse_date +from hdx.utilities.downloader import Download +from hdx.utilities.file_hashing import get_size_and_hash +from hdx.utilities.retriever import Retrieve +from hdx.utilities.uuid import is_valid_uuid import hdx.data.dataset import hdx.data.resource_matcher @@ -13,12 +20,6 @@ from hdx.api.utilities.date_helper import DateHelper from hdx.data.hdxobject import HDXError, HDXObject from hdx.data.resource_view import ResourceView -from hdx.utilities.dateparse import now_utc, now_utc_notz, parse_date -from hdx.utilities.downloader import Download -from hdx.utilities.file_hashing import get_size_and_hash -from hdx.utilities.retriever import Retrieve -from hdx.utilities.typehint import ListTuple -from hdx.utilities.uuid import is_valid_uuid logger = logging.getLogger(__name__) @@ -28,16 +29,16 @@ class Resource(HDXObject): resources. Args: - initial_data (Optional[Dict]): Initial resource metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial resource metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ _formats_dict = None def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} @@ -47,11 +48,11 @@ def __init__( self._url_backup = None @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "resource_show", @@ -71,7 +72,7 @@ def update_from_yaml( """Update resource metadata with static metadata from YAML file Args: - path (Optional[str]): Path to YAML dataset metadata. Defaults to config/hdx_resource_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_resource_static.yaml. Returns: None @@ -84,7 +85,7 @@ def update_from_json( """Update resource metadata with static metadata from JSON file Args: - path (Optional[str]): Path to JSON dataset metadata. Defaults to config/hdx_resource_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_resource_static.json. Returns: None @@ -93,16 +94,16 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["Resource"]: """Reads the resource given by identifier from HDX and returns Resource object Args: - identifier (str): Identifier of resource - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of resource + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[Resource]: Resource object if successful read, None if not + Resource object if successful read, None if not """ if is_valid_uuid(identifier) is False: @@ -111,9 +112,9 @@ def read_from_hdx( def get_date_of_resource( self, - date_format: Optional[str] = None, + date_format: str | None = None, today: datetime = now_utc(), - ) -> Dict: + ) -> dict: """Get resource date as datetimes and strings in specified format. If no format is supplied, the ISO 8601 format is used. Returns a dictionary containing keys startdate (start date as datetime), enddate (end date as datetime), @@ -121,11 +122,11 @@ def get_date_of_resource( ongoing (whether the end date is a rolls forward every day). Args: - date_format (Optional[str]): Date format. None is taken to be ISO 8601. Defaults to None. - today (datetime): Date to use for today. Defaults to now_utc(). + date_format: Date format. None is taken to be ISO 8601. Defaults to None. + today: Date to use for today. Defaults to now_utc(). Returns: - Dict: Dictionary of date information + Dictionary of date information """ return DateHelper.get_time_period_info( self.data.get("daterange_for_data"), date_format, today @@ -133,8 +134,8 @@ def get_date_of_resource( def set_date_of_resource( self, - startdate: Union[datetime, str], - enddate: Union[datetime, str], + startdate: datetime | str, + enddate: datetime | str, ignore_timeinfo: bool = True, ) -> None: """Set resource date from either datetime objects or strings. Any time and time @@ -144,9 +145,9 @@ def set_date_of_resource( ignore_timeinfo to False. In this case, the time will be converted to UTC. Args: - startdate (Union[datetime, str]): Resource start date - enddate (Union[datetime, str]): Resource end date - ignore_timeinfo (bool): Ignore time and time zone of date. Defaults to True. + startdate: Resource start date + enddate: Resource end date + ignore_timeinfo: Ignore time and time zone of date. Defaults to True. Returns: None @@ -158,18 +159,18 @@ def set_date_of_resource( @classmethod def read_formats_mappings( cls, - configuration: Optional[Configuration] = None, - url: Optional[str] = None, - ) -> Dict: + configuration: Configuration | None = None, + url: str | None = None, + ) -> dict: """ Read HDX formats list Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - url (Optional[str]): Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). + configuration: HDX configuration. Defaults to global configuration. + url: Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). Returns: - Dict: Returns formats dictionary + Returns formats dictionary """ if not cls._formats_dict: if configuration is None: @@ -192,12 +193,12 @@ def read_formats_mappings( return cls._formats_dict @classmethod - def set_formatsdict(cls, formats_dict: Dict) -> None: + def set_formatsdict(cls, formats_dict: dict) -> None: """ Set formats dictionary Args: - formats_dict (Dict): Formats dictionary + formats_dict: Formats dictionary Returns: None @@ -206,16 +207,16 @@ def set_formatsdict(cls, formats_dict: Dict) -> None: @classmethod def get_mapped_format( - cls, format: str, configuration: Optional[Configuration] = None - ) -> Optional[str]: + cls, format: str, configuration: Configuration | None = None + ) -> str | None: """Given a file format, return an HDX format to which it maps Args: - format (str): File type to map - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + format: File type to map + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[str]: Mapped format or None if no mapping found + Mapped format or None if no mapping found """ if not format: return None @@ -232,18 +233,18 @@ def get_mapped_format( hdx_format = mappings.get(file_format) return hdx_format - def get_format(self) -> Optional[str]: + def get_format(self) -> str | None: """Get the resource's format Returns: - Optional[str]: Resource's format or None if it has not been set + Resource's format or None if it has not been set """ file_format = self.data.get("format") if file_format: file_format = file_format.lower() return file_format - def get_file_type(self) -> Optional[str]: + def get_file_type(self) -> str | None: warnings.warn( "get_file_type() is deprecated, use get_format() instead", DeprecationWarning, @@ -254,10 +255,10 @@ def set_format(self, format: str) -> str: """Set the resource's file type Args: - format (str): Format to set on resource + format: Format to set on resource Returns: - str: Format that was set + Format that was set """ file_format = self.get_mapped_format(format, configuration=self.configuration) if not file_format: @@ -279,7 +280,7 @@ def clean_format(self) -> str: cannot be mapped Returns: - str: Format that was set + Format that was set """ return self.set_format(self.data.get("format")) @@ -290,11 +291,11 @@ def clean_file_type(self) -> str: ) return self.set_format(self.data.get("format")) - def get_file_to_upload(self) -> Optional[str]: + def get_file_to_upload(self) -> str | None: """Get the file uploaded Returns: - Optional[str]: The file that will be or has been uploaded or None if there isn't one + The file that will be or has been uploaded or None if there isn't one """ return self._file_to_upload @@ -304,11 +305,11 @@ def set_file_to_upload( """Delete any existing url and set the file uploaded to the local path provided Args: - file_to_upload (str): Local path to file to upload - guess_format_from_suffix (bool): Set format from file suffix. Defaults to False. + file_to_upload: Local path to file to upload + guess_format_from_suffix: Set format from file suffix. Defaults to False. Returns: - Optional[str]: The format that was guessed or None if no format was set + The format that was guessed or None if no format was set """ if "url" in self.data: self._url_backup = self.data["url"] @@ -334,11 +335,11 @@ def check_neither_url_filetoupload(self) -> None: if self._file_to_upload is None and "url" not in self.data: raise HDXError("Either a url or a file to upload must be supplied!") - def correct_format(self, data: Dict = None) -> None: + def correct_format(self, data: dict = None) -> None: """Correct the format of the file Args: - data (Dict): Resource data. + data: Resource data. Returns: None @@ -372,13 +373,13 @@ def set_types(self) -> None: if "tracking_summary" in self.data: del self.data["tracking_summary"] - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for resource is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -407,7 +408,7 @@ def _resource_merge_hdx_update( force_update (bool): Force file to be updated even if it hasn't changed. Defaults to False. Returns: - int: Status code + Status code """ data_updated = kwargs.pop("data_updated", self._data_updated) files = {} @@ -457,7 +458,7 @@ def _resource_merge_hdx_update( self._merge_hdx_update("resource", "id", files, True, **kwargs) return status - def _get_resource_id(self, **kwargs: Any) -> Optional[str]: + def _get_resource_id(self, **kwargs: Any) -> str | None: """Helper function to get resource id if available from given resource or by comparing ot a given dataset's resources. @@ -466,7 +467,7 @@ def _get_resource_id(self, **kwargs: Any) -> Optional[str]: dataset (Dataset): Existing dataset if available to obtain resource id Returns: - Optional[str]: Resource id or None + Resource id or None """ loadedid = self.data.get("id") if loadedid is None: @@ -533,7 +534,7 @@ def update_in_hdx(self, **kwargs: Any) -> int: dataset (Dataset): Existing dataset if available to obtain resource id Returns: - int: Status code + Status code """ self.check_both_url_filetoupload() _ = self._get_resource_id(**kwargs) @@ -566,7 +567,7 @@ def create_in_hdx(self, **kwargs: Any) -> int: dataset (Dataset): Existing dataset if available to obtain resource id Returns: - int: Status code + Status code """ self.check_both_url_filetoupload() loadedid = self._get_resource_id(**kwargs) @@ -604,7 +605,7 @@ def get_dataset(self) -> "Dataset": # noqa: F821 """Return dataset containing this resource Returns: - Dataset: Dataset containing this resource + Dataset containing this resource """ package_id = self.data.get("package_id") if package_id is None: @@ -614,20 +615,20 @@ def get_dataset(self) -> "Dataset": # noqa: F821 @staticmethod def search_in_hdx( query: str, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, **kwargs: Any, - ) -> List["Resource"]: + ) -> list["Resource"]: """Searches for resources in HDX. NOTE: Does not search dataset metadata! Args: - query (str): Query - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + query: Query + configuration: HDX configuration. Defaults to global configuration. **kwargs: See below order_by (str): A field on the Resource model that orders the results offset (int): Apply an offset to the query limit (int): Apply a limit to the query Returns: - List[Resource]: List of resources resulting from query + List of resources resulting from query """ resources = [] @@ -646,17 +647,17 @@ def search_in_hdx( return resources def download( - self, folder: Optional[str] = None, retriever: Optional[Retrieve] = None - ) -> Tuple[str, str]: + self, folder: str | None = None, retriever: Retrieve | None = None + ) -> tuple[str, str]: """Download resource store to provided folder or temporary folder if no folder supplied Args: - folder (Optional[str]): Folder to download resource to. Defaults to None. - retriever (Optional[Retrieve]): Retrieve object to use. Defaults to None. + folder: Folder to download resource to. Defaults to None. + retriever: Retrieve object to use. Defaults to None. Returns: - Tuple[str, str]: (URL downloaded, Path to downloaded file) + (URL downloaded, Path to downloaded file) """ # Download the resource @@ -685,15 +686,15 @@ def download( @staticmethod def get_all_resource_ids_in_datastore( - configuration: Optional[Configuration] = None, - ) -> List[str]: + configuration: Configuration | None = None, + ) -> list[str]: """Get list of resources that have a datastore returning their ids. Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: - List[str]: List of resource ids that are in the datastore + List of resource ids that are in the datastore """ resource = Resource(configuration=configuration) success, result = resource._read_from_hdx( @@ -715,7 +716,7 @@ def has_datastore(self) -> bool: """Check if the resource has a datastore. Returns: - bool: Whether the resource has a datastore or not + Whether the resource has a datastore or not """ success, result = self._read_from_hdx( "datastore", @@ -746,24 +747,22 @@ def delete_datastore(self) -> None: if not success: logger.debug(result) - def get_resource_views(self) -> List[ResourceView]: + def get_resource_views(self) -> list[ResourceView]: """Get any resource views in the resource Returns: - List[ResourceView]: List of resource views + List of resource views """ return ResourceView.get_all_for_resource(self.data["id"]) - def _get_resource_view( - self, resource_view: Union[ResourceView, Dict] - ) -> ResourceView: + def _get_resource_view(self, resource_view: ResourceView | dict) -> ResourceView: """Get resource view id Args: - resource_view (Union[ResourceView,Dict]): ResourceView metadata from a ResourceView object or dictionary + resource_view: ResourceView metadata from a ResourceView object or dictionary Returns: - ResourceView: ResourceView object + ResourceView object """ if isinstance(resource_view, dict): resource_view = ResourceView( @@ -775,13 +774,11 @@ def _get_resource_view( f"Type {type(resource_view).__name__} is not a valid resource view!" ) - def add_update_resource_view( - self, resource_view: Union[ResourceView, Dict] - ) -> None: + def add_update_resource_view(self, resource_view: ResourceView | dict) -> None: """Add new resource view in resource with new metadata Args: - resource_view (Union[ResourceView,Dict]): Resource view metadata either from a ResourceView object or a dictionary + resource_view: Resource view metadata either from a ResourceView object or a dictionary Returns: None @@ -791,12 +788,12 @@ def add_update_resource_view( resource_view.create_in_hdx() def add_update_resource_views( - self, resource_views: ListTuple[Union[ResourceView, Dict]] + self, resource_views: Sequence[ResourceView | dict] ) -> None: """Add new or update existing resource views in resource with new metadata. Args: - resource_views (ListTuple[Union[ResourceView,Dict]]): A list of resource views metadata from ResourceView objects or dictionaries + resource_views: A list of resource views metadata from ResourceView objects or dictionaries Returns: None @@ -807,12 +804,12 @@ def add_update_resource_views( self.add_update_resource_view(resource_view) def reorder_resource_views( - self, resource_views: ListTuple[Union[ResourceView, Dict, str]] + self, resource_views: Sequence[ResourceView | dict | str] ) -> None: """Order resource views in resource. Args: - resource_views (ListTuple[Union[ResourceView,Dict,str]]): A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries + resource_views: A list of either resource view ids or resource views metadata from ResourceView objects or dictionaries Returns: None @@ -836,13 +833,11 @@ def reorder_resource_views( order=ids, ) - def delete_resource_view( - self, resource_view: Union[ResourceView, Dict, str] - ) -> None: + def delete_resource_view(self, resource_view: ResourceView | dict | str) -> None: """Delete a resource view from the resource and HDX Args: - resource_view (Union[ResourceView,Dict,str]): Either a resource view id or resource view metadata either from a ResourceView object or a dictionary + resource_view: Either a resource view id or resource view metadata either from a ResourceView object or a dictionary Returns: None @@ -889,7 +884,7 @@ def is_broken(self) -> bool: """Return if resource is broken Returns: - bool: Whether resource is broken + Whether resource is broken """ return self.data.get("broken_link", False) @@ -915,7 +910,7 @@ def is_marked_data_updated(self) -> bool: """Return if the resource's data is marked to be updated Returns: - bool: Whether resource's data is marked to be updated + Whether resource's data is marked to be updated """ return self._data_updated @@ -931,18 +926,18 @@ def get_date_data_updated(self) -> datetime: """Get date resource data was updated Returns: - datetime: Date resource data was updated + Date resource data was updated """ return parse_date(self.data["last_modified"], include_microseconds=True) def set_date_data_updated( - self, date: Union[datetime, str], ignore_timeinfo: bool = False + self, date: datetime | str, ignore_timeinfo: bool = False ) -> None: """Set date resource data was updated Args: - date (Union[datetime, str]): Date resource data was updated - ignore_timeinfo (bool): Ignore time and time zone of date. Defaults to False. + date: Date resource data was updated + ignore_timeinfo: Ignore time and time zone of date. Defaults to False. Returns: None @@ -951,11 +946,11 @@ def set_date_data_updated( date, ignore_timeinfo=ignore_timeinfo, include_microseconds=True ) - def get_hdx_url(self) -> Optional[str]: + def get_hdx_url(self) -> str | None: """Get the url of the resource on HDX Returns: - Optional[str]: Url of the resource on HDX or None if the resource is missing the id field + Url of the resource on HDX or None if the resource is missing the id field """ id = self.data.get("id") if not id: @@ -965,11 +960,11 @@ def get_hdx_url(self) -> Optional[str]: return None return f"{self.configuration.get_hdx_site_url()}/dataset/{dataset_id}/resource/{id}" - def get_api_url(self) -> Optional[str]: + def get_api_url(self) -> str | None: """Get the API url of the resource on HDX Returns: - Optional[str]: API url of the resource on HDX or None if the resource is missing the id field + API url of the resource on HDX or None if the resource is missing the id field """ id = self.data.get("id") if not id: diff --git a/src/hdx/data/resource_matcher.py b/src/hdx/data/resource_matcher.py index a53125aa..c170bbd3 100755 --- a/src/hdx/data/resource_matcher.py +++ b/src/hdx/data/resource_matcher.py @@ -1,9 +1,8 @@ """Helper to the Dataset class for handling matching resources.""" import collections -from typing import TYPE_CHECKING, List, Optional, Tuple - -from hdx.utilities.typehint import ListTuple +from collections.abc import Sequence +from typing import TYPE_CHECKING if TYPE_CHECKING: from hdx.data.resource import Resource @@ -12,17 +11,17 @@ class ResourceMatcher: @staticmethod def match_resource_list( - resources1: ListTuple["Resource"], + resources1: Sequence["Resource"], resource2: "Resource", - ) -> Optional[int]: + ) -> int | None: """Helper method to find the index of a resource that matches a given resource Args: - resources1 (ListTuple[Resource]): List of resources - resource2 (Resource): Resource to match with list + resources1: List of resources + resource2: Resource to match with list Returns: - Optional[int]: Index of resource that matches in list or None + Index of resource that matches in list or None """ id2 = resource2.get("id") grouping2 = resource2.get("grouping") @@ -55,18 +54,18 @@ def match_resource_list( @staticmethod def match_resource_lists( - resources1: ListTuple["Resource"], - resources2: ListTuple["Resource"], - ) -> Tuple[List, List, List, List]: + resources1: Sequence["Resource"], + resources2: Sequence["Resource"], + ) -> tuple[list, list, list, list]: """Helper method to match two lists of resources returning the indices that match in two lists and that don't match in two more lists Args: - resources1 (ListTuple[Resource]): List of resources - resources2 (ListTuple[Resource]): List of resources to match with first list + resources1: List of resources + resources2: List of resources to match with first list Returns: - Tuple[List, List, List, List]: Returns indices that match (2 lists) and that don't match (2 lists) + Returns indices that match (2 lists) and that don't match (2 lists) """ ids1 = [x.get("id") for x in resources1] groups1 = [x.get("grouping") for x in resources1] diff --git a/src/hdx/data/resource_view.py b/src/hdx/data/resource_view.py index 53ecec64..414812c1 100755 --- a/src/hdx/data/resource_view.py +++ b/src/hdx/data/resource_view.py @@ -1,13 +1,14 @@ """Resource view class containing all logic for creating, checking, and updating resource views.""" import logging +from collections.abc import Sequence from os.path import join -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union + +from hdx.utilities.uuid import is_valid_uuid from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError, HDXObject -from hdx.utilities.typehint import ListTuple -from hdx.utilities.uuid import is_valid_uuid logger = logging.getLogger(__name__) @@ -16,25 +17,25 @@ class ResourceView(HDXObject): """ResourceView class containing all logic for creating, checking, and updating resource views. Args: - initial_data (Optional[Dict]): Initial resource view metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial resource view metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} super().__init__(initial_data, configuration=configuration) @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "resource_view_show", @@ -51,7 +52,7 @@ def update_from_yaml( """Update resource view metadata with static metadata from YAML file Args: - path (Optional[str]): Path to YAML resource view metadata. Defaults to config/hdx_resource_view_static.yaml. + path: Path to YAML resource view metadata. Defaults to config/hdx_resource_view_static.yaml. Returns: None @@ -64,7 +65,7 @@ def update_from_json( """Update resource view metadata with static metadata from JSON file Args: - path (Optional[str]): Path to JSON dataset metadata. Defaults to config/hdx_resource_view_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_resource_view_static.json. Returns: None @@ -73,31 +74,31 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["ResourceView"]: """Reads the resource view given by identifier from HDX and returns ResourceView object Args: - identifier (str): Identifier of resource view - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of resource view + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[ResourceView]: ResourceView object if successful read, None if not + ResourceView object if successful read, None if not """ return cls._read_from_hdx_class("resource view", identifier, configuration) @staticmethod def get_all_for_resource( - identifier: str, configuration: Optional[Configuration] = None - ) -> List["ResourceView"]: + identifier: str, configuration: Configuration | None = None + ) -> list["ResourceView"]: """Read all resource views for a resource given by identifier from HDX and returns list of ResourceView objects Args: - identifier (str): Identifier of resource - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of resource + configuration: HDX configuration. Defaults to global configuration. Returns: - List[ResourceView]: List of ResourceView objects + List of ResourceView objects """ resourceview = ResourceView(configuration=configuration) @@ -113,12 +114,12 @@ def get_all_for_resource( resourceviews.append(resourceview) return resourceviews - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for resource view is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -129,7 +130,7 @@ def _update_resource_view(self, log: bool = False, **kwargs: Any) -> bool: """Check if resource view exists in HDX and if so, update resource view Returns: - bool: True if updated and False if not + True if updated and False if not """ updated = False if "id" in self.data and self._load_from_hdx("resource view", self.data["id"]): @@ -179,11 +180,11 @@ def delete_from_hdx(self) -> None: """ self._delete_from_hdx("resource view", "id") - def copy(self, resource_view: Union["ResourceView", Dict, str]) -> None: + def copy(self, resource_view: Union["ResourceView", dict, str]) -> None: """Copies all fields except id, resource_id and package_id from another resource view. Args: - resource_view (Union[ResourceView,Dict,str]): Either a resource view id or resource view metadata either from a ResourceView object or a dictionary + resource_view: Either a resource view id or resource view metadata either from a ResourceView object or a dictionary Returns: None diff --git a/src/hdx/data/showcase.py b/src/hdx/data/showcase.py index 6fbedfc6..fc0c6c4f 100755 --- a/src/hdx/data/showcase.py +++ b/src/hdx/data/showcase.py @@ -2,16 +2,17 @@ import logging import sys +from collections.abc import Sequence from os.path import join -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union + +from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.uuid import is_valid_uuid import hdx.data.dataset import hdx.data.vocabulary from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXObject -from hdx.utilities.dictandlist import merge_two_dictionaries -from hdx.utilities.typehint import ListTuple -from hdx.utilities.uuid import is_valid_uuid logger = logging.getLogger(__name__) @@ -20,8 +21,8 @@ class Showcase(HDXObject): """Showcase class containing all logic for creating, checking, and updating showcases. Args: - initial_data (Optional[Dict]): Initial showcase metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial showcase metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ max_int = sys.maxsize @@ -29,19 +30,19 @@ class Showcase(HDXObject): def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} super().__init__(initial_data, configuration=configuration) @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "ckanext_showcase_show", @@ -61,7 +62,7 @@ def update_from_yaml( """Update showcase metadata with static metadata from YAML file Args: - path (Optional[str]): Path to YAML dataset metadata. Defaults to config/hdx_showcase_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_showcase_static.yaml. Returns: None @@ -74,7 +75,7 @@ def update_from_json( """Update showcase metadata with static metadata from JSON file Args: - path (Optional[str]): Path to JSON dataset metadata. Defaults to config/hdx_showcase_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_showcase_static.json. Returns: None @@ -83,25 +84,25 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["Showcase"]: """Reads the showcase given by identifier from HDX and returns Showcase object Args: - identifier (str): Identifier of showcase - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of showcase + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[Showcase]: Showcase object if successful read, None if not + Showcase object if successful read, None if not """ return cls._read_from_hdx_class("showcase", identifier, configuration) - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for showcase is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -148,54 +149,54 @@ def delete_from_hdx(self) -> None: """ self._delete_from_hdx("showcase", "id") - def get_tags(self) -> List[str]: + def get_tags(self) -> list[str]: """Return the dataset's list of tags Returns: - List[str]: List of tags or [] if there are none + List of tags or [] if there are none """ return self._get_tags() def add_tag( self, tag: str, log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Add a tag Args: - tag (str): Tag to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + tag: Tag to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ return hdx.data.vocabulary.Vocabulary.add_mapped_tag( self, tag, log_deleted=log_deleted ) def add_tags( - self, tags: ListTuple[str], log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + self, tags: Sequence[str], log_deleted: bool = True + ) -> tuple[list[str], list[str]]: """Add a list of tags Args: - tags (ListTuple[str]): List of tags to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + tags: List of tags to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ return hdx.data.vocabulary.Vocabulary.add_mapped_tags( self, tags, log_deleted=log_deleted ) - def clean_tags(self, log_deleted: bool = True) -> Tuple[List[str], List[str]]: + def clean_tags(self, log_deleted: bool = True) -> tuple[list[str], list[str]]: """Clean tags in an HDX object according to tags cleanup spreadsheet Args: - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of mapped tags and list of deleted tags and tags not added + Tuple containing list of mapped tags and list of deleted tags and tags not added """ return hdx.data.vocabulary.Vocabulary.clean_tags(self, log_deleted=log_deleted) @@ -203,20 +204,20 @@ def remove_tag(self, tag: str) -> bool: """Remove a tag Args: - tag (str): Tag to remove + tag: Tag to remove Returns: - bool: True if tag removed or False if not + True if tag removed or False if not """ return self._remove_hdxobject( self.data.get("tags"), tag.lower(), matchon="name" ) - def get_datasets(self) -> List["Dataset"]: # noqa: F821 + def get_datasets(self) -> list["Dataset"]: # noqa: F821 """Get any datasets in the showcase Returns: - List[Dataset]: List of datasets + List of datasets """ assoc_result, datasets_dicts = self._read_from_hdx( "showcase", @@ -235,15 +236,15 @@ def get_datasets(self) -> List["Dataset"]: # noqa: F821 def _get_showcase_dataset_dict( self, - dataset: Union["Dataset", Dict, str], # noqa: F821 - ) -> Dict: + dataset: Union["Dataset", dict, str], # noqa: F821 + ) -> dict: """Get showcase dataset dict Args: - showcase (Union[Showcase,Dict,str]): Either a showcase id or Showcase metadata from a Showcase object or dictionary + showcase: Either a showcase id or Showcase metadata from a Showcase object or dictionary Returns: - Dict: showcase dataset dict + showcase dataset dict """ if isinstance(dataset, hdx.data.dataset.Dataset) or isinstance(dataset, dict): if "id" not in dataset: @@ -259,17 +260,17 @@ def _get_showcase_dataset_dict( def add_dataset( self, - dataset: Union["Dataset", Dict, str], # noqa: F821 - datasets_to_check: ListTuple["Dataset"] = None, # noqa: F821 + dataset: Union["Dataset", dict, str], # noqa: F821 + datasets_to_check: Sequence["Dataset"] = None, # noqa: F821 ) -> bool: """Add a dataset Args: - dataset (Union[Dataset,Dict,str]): Either a dataset id or dataset metadata either from a Dataset object or a dictionary - datasets_to_check (ListTuple[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. + dataset: Either a dataset id or dataset metadata either from a Dataset object or a dictionary + datasets_to_check: List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: - bool: True if the dataset was added, False if already present + True if the dataset was added, False if already present """ showcase_dataset = self._get_showcase_dataset_dict(dataset) if datasets_to_check is None: @@ -282,17 +283,17 @@ def add_dataset( def add_datasets( self, - datasets: ListTuple[Union["Dataset", Dict, str]], # noqa: F821 - datasets_to_check: ListTuple["Dataset"] = None, # noqa: F821 + datasets: Sequence[Union["Dataset", dict, str]], # noqa: F821 + datasets_to_check: Sequence["Dataset"] = None, # noqa: F821 ) -> bool: """Add multiple datasets Args: - datasets (ListTuple[Union[Dataset,Dict,str]]): A list of either dataset ids or dataset metadata from Dataset objects or dictionaries - datasets_to_check (ListTuple[Dataset]): List of datasets against which to check existence of dataset. Defaults to datasets in showcase. + datasets: A list of either dataset ids or dataset metadata from Dataset objects or dictionaries + datasets_to_check: List of datasets against which to check existence of dataset. Defaults to datasets in showcase. Returns: - bool: True if all datasets added or False if any already present + True if all datasets added or False if any already present """ if datasets_to_check is None: datasets_to_check = self.get_datasets() @@ -304,12 +305,12 @@ def add_datasets( def remove_dataset( self, - dataset: Union["Dataset", Dict, str], # noqa: F821 + dataset: Union["Dataset", dict, str], # noqa: F821 ) -> None: """Remove a dataset Args: - dataset (Union[Dataset,Dict,str]): Either a dataset id or dataset metadata either from a Dataset object or a dictionary + dataset: Either a dataset id or dataset metadata either from a Dataset object or a dictionary Returns: None @@ -323,17 +324,17 @@ def remove_dataset( @classmethod def search_in_hdx( cls, - query: Optional[str] = "*:*", - configuration: Optional[Configuration] = None, + query: str | None = "*:*", + configuration: Configuration | None = None, page_size: int = 1000, **kwargs: Any, - ) -> List["Showcase"]: + ) -> list["Showcase"]: """Searches for datasets in HDX Args: - query (Optional[str]): Query (in Solr format). Defaults to '*:*'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - page_size (int): Size of page to return. Defaults to 1000. + query: Query (in Solr format). Defaults to '*:*'. + configuration: HDX configuration. Defaults to global configuration. + page_size: Size of page to return. Defaults to 1000. **kwargs: See below fq (string): Any filter queries to apply sort (string): Sorting of the search results. Defaults to 'relevance asc, metadata_modified desc'. @@ -342,11 +343,11 @@ def search_in_hdx( facet (string): Whether to enable faceted results. Default to True. facet.mincount (int): Minimum counts for facet fields should be included in the results facet.limit (int): Maximum number of values the facet fields return (- = unlimited). Defaults to 50. - facet.field (List[str]): Fields to facet upon. Default is empty. + facet.field (list[str]): Fields to facet upon. Default is empty. use_default_schema (bool): Use default package schema instead of custom schema. Defaults to False. Returns: - List[Dataset]: list of datasets resulting from query + list of datasets resulting from query """ curfq = kwargs.get("fq") kwargs["fq"] = "dataset_type:showcase" @@ -369,21 +370,21 @@ def search_in_hdx( @classmethod def get_all_showcases( cls, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, page_size: int = 1000, **kwargs: Any, - ) -> List["Showcase"]: + ) -> list["Showcase"]: """Get all showcases in HDX Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - page_size (int): Size of page to return. Defaults to 1000. + configuration: HDX configuration. Defaults to global configuration. + page_size: Size of page to return. Defaults to 1000. **kwargs: See below rows (int): Number of rows to return. Defaults to all showcases (sys.maxsize) start (int): Offset in the complete result for where the set of returned showcases should begin Returns: - List[Showcase]: list of all showcases in HDX + list of all showcases in HDX """ return cls.search_in_hdx( configuration=configuration, page_size=page_size, **kwargs diff --git a/src/hdx/data/user.py b/src/hdx/data/user.py index e54c6fa8..9596c98b 100755 --- a/src/hdx/data/user.py +++ b/src/hdx/data/user.py @@ -1,13 +1,13 @@ """User class containing all logic for creating, checking, and updating users.""" import logging +from collections.abc import Sequence from os.path import join -from typing import Any, Dict, List, Optional +from typing import Any, Optional import hdx.data.organization from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError, HDXObject -from hdx.utilities.typehint import ListTuple logger = logging.getLogger(__name__) @@ -16,25 +16,25 @@ class User(HDXObject): """User class containing all logic for creating, checking, and updating users. Args: - initial_data (Optional[Dict]): Initial user metadata dictionary. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial user metadata dictionary. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ def __init__( self, - initial_data: Optional[Dict] = None, - configuration: Optional[Configuration] = None, + initial_data: dict | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} super().__init__(initial_data, configuration=configuration) @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "user_show", @@ -53,7 +53,7 @@ def update_from_yaml( """Update user metadata with static metadata from YAML file Args: - path (Optional[str]): Path to YAML dataset metadata. Defaults to config/hdx_user_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_user_static.yaml. Returns: None @@ -66,7 +66,7 @@ def update_from_json( """Update user metadata with static metadata from JSON file Args: - path (Optional[str]): Path to JSON dataset metadata. Defaults to config/hdx_user_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_user_static.json. Returns: None @@ -75,25 +75,25 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["User"]: """Reads the user given by identifier from HDX and returns User object Args: - identifier (str): Identifier of user - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of user + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[User]: User object if successful read, None if not + User object if successful read, None if not """ return cls._read_from_hdx_class("user", identifier, configuration) - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for user is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -140,17 +140,17 @@ def email( self, subject: str, text_body: str, - html_body: Optional[str] = None, - sender: Optional[str] = None, + html_body: str | None = None, + sender: str | None = None, **kwargs: Any, ) -> None: """Emails a user. Args: - subject (str): Email subject - text_body (str): Plain text email body - html_body (str): HTML email body - sender (Optional[str]): Email sender. Defaults to SMTP username. + subject: Email subject + text_body: Plain text email body + html_body: HTML email body + sender: Email sender. Defaults to SMTP username. **kwargs: See below mail_options (List): Mail options (see smtplib documentation) rcpt_options (List): Recipient options (see smtplib documentation) @@ -168,14 +168,14 @@ def email( ) @staticmethod - def get_current_user(configuration: Optional[Configuration] = None) -> "User": + def get_current_user(configuration: Configuration | None = None) -> "User": """Get current user (based on authorisation from API token) Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: - User: Current user + Current user """ user = User(configuration=configuration) user._save_to_hdx("show", {}) @@ -183,18 +183,18 @@ def get_current_user(configuration: Optional[Configuration] = None) -> "User": @staticmethod def get_all_users( - configuration: Optional[Configuration] = None, **kwargs: Any - ) -> List["User"]: + configuration: Configuration | None = None, **kwargs: Any + ) -> list["User"]: """Get all users in HDX Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. **kwargs: See below q (str): Restrict to names containing a string. Defaults to all users. order_by (str): Field by which to sort - any user field or edits (number_of_edits). Defaults to 'name'. Returns: - List[User]: List of all users in HDX + List of all users in HDX """ user = User(configuration=configuration) result = user._write_to_hdx("list", kwargs) @@ -209,27 +209,27 @@ def get_all_users( @staticmethod def email_users( - users: ListTuple["User"], + users: Sequence["User"], subject: str, text_body: str, - html_body: Optional[str] = None, - sender: Optional[str] = None, - cc: Optional[ListTuple["User"]] = None, - bcc: Optional[ListTuple["User"]] = None, - configuration: Optional[Configuration] = None, + html_body: str | None = None, + sender: str | None = None, + cc: Sequence["User"] | None = None, + bcc: Sequence["User"] | None = None, + configuration: Configuration | None = None, **kwargs: Any, ) -> None: """Email a list of users Args: - users (ListTuple[User]): List of users in To address - subject (str): Email subject - text_body (str): Plain text email body - html_body (str): HTML email body - sender (Optional[str]): Email sender. Defaults to SMTP username. - cc (Optional[ListTuple[User]]: List of users to cc. Defaults to None. - bcc (Optional[ListTuple[User]]: List of users to bcc. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to configuration of first user in list. + users: List of users in To address + subject: Email subject + text_body: Plain text email body + html_body: HTML email body + sender: Email sender. Defaults to SMTP username. + cc (Optional[Sequence[User]]: List of users to cc. Defaults to None. + bcc (Optional[Sequence[User]]: List of users to bcc. Defaults to None. + configuration: HDX configuration. Defaults to configuration of first user in list. **kwargs: See below mail_options (List): Mail options (see smtplib documentation) rcpt_options (List): Recipient options (see smtplib documentation) @@ -261,14 +261,14 @@ def email_users( **kwargs, ) - def get_organization_dicts(self, permission: str = "read") -> List[Dict]: # noqa: F821 + def get_organization_dicts(self, permission: str = "read") -> list[dict]: # noqa: F821 """Get organization dictionaries (not organization objects) in HDX that this user is a member of. Args: - permission (str): Permission to check for. Defaults to 'read'. + permission: Permission to check for. Defaults to 'read'. Returns: - List[Dict]: List of organization dicts in HDX that this user is a member of + List of organization dicts in HDX that this user is a member of """ success, result = self._read_from_hdx( "user", @@ -281,14 +281,14 @@ def get_organization_dicts(self, permission: str = "read") -> List[Dict]: # noq return result return [] - def get_organizations(self, permission: str = "read") -> List["Organization"]: # noqa: F821 + def get_organizations(self, permission: str = "read") -> list["Organization"]: # noqa: F821 """Get organizations in HDX that this user is a member of. Args: - permission (str): Permission to check for. Defaults to 'read'. + permission: Permission to check for. Defaults to 'read'. Returns: - List[Organization]: List of organizations in HDX that this user is a member of + List of organizations in HDX that this user is a member of """ result = self.get_organization_dicts(permission) organizations = [] @@ -305,11 +305,11 @@ def check_organization_access( """Check user is a member of a given organization. Args: - organization (str): Organization id or name. - permission (str): Permission to check for. Defaults to 'read'. + organization: Organization id or name. + permission: Permission to check for. Defaults to 'read'. Returns: - bool: True if the logged in user is a member of the organization. + True if the logged in user is a member of the organization. """ for organization_dict in self.get_organization_dicts(permission): if organization_dict["id"] == organization: @@ -322,16 +322,16 @@ def check_organization_access( def get_current_user_organization_dicts( cls, permission: str = "read", - configuration: Optional[Configuration] = None, - ) -> List["Organization"]: # noqa: F821 + configuration: Configuration | None = None, + ) -> list["Organization"]: # noqa: F821 """Get organization dictionaries (not Organization objects) in HDX that the logged in user is a member of. Args: - permission (str): Permission to check for. Defaults to 'read'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + permission: Permission to check for. Defaults to 'read'. + configuration: HDX configuration. Defaults to global configuration. Returns: - List[Dict]: List of organization dicts in HDX that logged in user is a member of + List of organization dicts in HDX that logged in user is a member of """ user = User(configuration=configuration) try: @@ -345,16 +345,16 @@ def get_current_user_organization_dicts( def get_current_user_organizations( cls, permission: str = "read", - configuration: Optional[Configuration] = None, - ) -> List["Organization"]: # noqa: F821 + configuration: Configuration | None = None, + ) -> list["Organization"]: # noqa: F821 """Get organizations in HDX that the logged in user is a member of. Args: - permission (str): Permission to check for. Defaults to 'read'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + permission: Permission to check for. Defaults to 'read'. + configuration: HDX configuration. Defaults to global configuration. Returns: - List[Organization]: List of organizations in HDX that logged in user is a member of + List of organizations in HDX that logged in user is a member of """ result = cls.get_current_user_organization_dicts(permission, configuration) organizations = [] @@ -370,17 +370,17 @@ def check_current_user_organization_access( cls, organization: str, permission: str = "read", - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, ) -> bool: """Check logged in user is a member of a given organization. Args: - organization (str): Organization id or name. - permission (str): Permission to check for. Defaults to 'read'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + organization: Organization id or name. + permission: Permission to check for. Defaults to 'read'. + configuration: HDX configuration. Defaults to global configuration. Returns: - bool: True if the logged in user is a member of the organization. + True if the logged in user is a member of the organization. """ for organization_dict in cls.get_current_user_organization_dicts( permission, configuration @@ -396,19 +396,19 @@ def check_current_user_write_access( cls, organization: str, permission: str = "create_dataset", - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, ) -> "User": """Check logged in user has write access to a given organization. Raises PermissionError if the user does not have access otherwise logs and returns the current username. Args: - organization (str): Organization id or name. - permission (str): Permission to check for. Defaults to 'create_dataset'. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + organization: Organization id or name. + permission: Permission to check for. Defaults to 'create_dataset'. + configuration: HDX configuration. Defaults to global configuration. Returns: - str: Username of current user + Username of current user """ if configuration is None: configuration = Configuration.read() @@ -441,7 +441,7 @@ def get_token_list(self): """Get API tokens for user. Returns: - List[Dict]: List of API token details + List of API token details """ success, result = self._read_from_hdx( "user", @@ -458,16 +458,16 @@ def autocomplete( cls, name: str, limit: int = 20, - configuration: Optional[Configuration] = None, - ) -> List: + configuration: Configuration | None = None, + ) -> list: """Autocomplete a user name and return matches Args: - name (str): Name to autocomplete - limit (int): Maximum number of matches to return - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + name: Name to autocomplete + limit: Maximum number of matches to return + configuration: HDX configuration. Defaults to global configuration. Returns: - List: Autocomplete matches + Autocomplete matches """ return cls._autocomplete(name, limit, configuration) diff --git a/src/hdx/data/vocabulary.py b/src/hdx/data/vocabulary.py index c7af4a59..9ea27b5e 100755 --- a/src/hdx/data/vocabulary.py +++ b/src/hdx/data/vocabulary.py @@ -2,13 +2,14 @@ import logging from collections import OrderedDict +from collections.abc import Sequence from os.path import join -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional + +from hdx.utilities.downloader import Download from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXObject -from hdx.utilities.downloader import Download -from hdx.utilities.typehint import ListTuple logger = logging.getLogger(__name__) @@ -21,10 +22,10 @@ class Vocabulary(HDXObject): """Vocabulary class containing all logic for creating, checking, and updating vocabularies. Args: - initial_data (Optional[Dict]): Initial dataset metadata dictionary. Defaults to None. - name (str): Name of vocabulary - tags (Optional[List]): Initial list of tags. Defaults to None. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + initial_data: Initial dataset metadata dictionary. Defaults to None. + name: Name of vocabulary + tags: Initial list of tags. Defaults to None. + configuration: HDX configuration. Defaults to global configuration. """ _approved_vocabulary = None @@ -32,10 +33,10 @@ class Vocabulary(HDXObject): def __init__( self, - initial_data: Optional[Dict] = None, + initial_data: dict | None = None, name: str = None, - tags: Optional[List] = None, - configuration: Optional[Configuration] = None, + tags: list | None = None, + configuration: Configuration | None = None, ) -> None: if not initial_data: initial_data = {} @@ -46,11 +47,11 @@ def __init__( self.add_tags(tags) @staticmethod - def actions() -> Dict[str, str]: + def actions() -> dict[str, str]: """Dictionary of actions that can be performed on object Returns: - Dict[str, str]: Dictionary of actions that can be performed on object + Dictionary of actions that can be performed on object """ return { "show": "vocabulary_show", @@ -67,7 +68,7 @@ def update_from_yaml( """Update vocabulary metadata with static metadata from YAML file Args: - path (Optional[str]): Path to YAML dataset metadata. Defaults to config/hdx_vocabulary_static.yaml. + path: Path to YAML dataset metadata. Defaults to config/hdx_vocabulary_static.yaml. Returns: None @@ -80,7 +81,7 @@ def update_from_json( """Update vocabulary metadata with static metadata from JSON file Args: - path (Optional[str]): Path to JSON dataset metadata. Defaults to config/hdx_vocabulary_static.json. + path: Path to JSON dataset metadata. Defaults to config/hdx_vocabulary_static.json. Returns: None @@ -89,31 +90,31 @@ def update_from_json( @classmethod def read_from_hdx( - cls, identifier: str, configuration: Optional[Configuration] = None + cls, identifier: str, configuration: Configuration | None = None ) -> Optional["Vocabulary"]: """Reads the vocabulary given by identifier from HDX and returns Vocabulary object Args: - identifier (str): Identifier of vocabulary - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of vocabulary + configuration: HDX configuration. Defaults to global configuration. Returns: - Optional[Vocabulary]: Vocabulary object if successful read, None if not + Vocabulary object if successful read, None if not """ return cls._read_from_hdx_class("vocabulary", identifier, configuration) @staticmethod def get_all_vocabularies( - configuration: Optional[Configuration] = None, - ) -> List["Vocabulary"]: + configuration: Configuration | None = None, + ) -> list["Vocabulary"]: """Get all vocabulary names in HDX Args: - identifier (str): Identifier of resource - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + identifier: Identifier of resource + configuration: HDX configuration. Defaults to global configuration. Returns: - List[Vocabulary]: List of Vocabulary objects + List of Vocabulary objects """ vocabulary = Vocabulary(configuration=configuration) @@ -123,12 +124,12 @@ def get_all_vocabularies( vocabularies.append(Vocabulary(vocabularydict, configuration=configuration)) return vocabularies - def check_required_fields(self, ignore_fields: ListTuple[str] = tuple()) -> None: + def check_required_fields(self, ignore_fields: Sequence[str] = ()) -> None: """Check that metadata for vocabulary is complete. The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation. Args: - ignore_fields (ListTuple[str]): Fields to ignore. Default is tuple(). + ignore_fields: Fields to ignore. Default is (). Returns: None @@ -155,7 +156,7 @@ def delete_from_hdx(self, empty: bool = True) -> None: """Deletes a vocabulary from HDX. First tags are removed then vocabulary is deleted. Args: - empty (bool): Remove all tags and update before deleting. Defaults to True. + empty: Remove all tags and update before deleting. Defaults to True. Returns: None @@ -167,11 +168,11 @@ def delete_from_hdx(self, empty: bool = True) -> None: ) self._delete_from_hdx("vocabulary", "id") - def get_tags(self) -> List[str]: + def get_tags(self) -> list[str]: """Lists tags in vocabulary Returns: - List[str]: List of tags in vocabulary + List of tags in vocabulary """ return self._get_tags() @@ -179,21 +180,21 @@ def add_tag(self, tag: str) -> bool: """Add a tag Args: - tag (str): Tag to add + tag: Tag to add Returns: - bool: True if tag added or False if tag already present + True if tag added or False if tag already present """ return self._add_tag(tag) - def add_tags(self, tags: ListTuple[str]) -> List[str]: + def add_tags(self, tags: Sequence[str]) -> list[str]: """Add a list of tags Args: - tags (ListTuple[str]): list of tags to add + tags: list of tags to add Returns: - List[str]: Tags that were successfully added + Tags that were successfully added """ return self._add_tags(tags) @@ -201,10 +202,10 @@ def remove_tag(self, tag: str) -> bool: """Remove a tag Args: - tag (str): Tag to remove + tag: Tag to remove Returns: - bool: True if tag removed or False if not + True if tag removed or False if not """ return self._remove_hdxobject( self.data.get("tags"), tag.lower(), matchon="name" @@ -212,16 +213,16 @@ def remove_tag(self, tag: str) -> bool: @classmethod def get_approved_vocabulary( - cls, configuration: Optional[Configuration] = None + cls, configuration: Configuration | None = None ) -> "Vocabulary": """ Get the HDX approved vocabulary Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: - Vocabulary: HDX Vocabulary object + HDX Vocabulary object """ if cls._approved_vocabulary is None: if configuration is None: @@ -235,18 +236,18 @@ def get_approved_vocabulary( @classmethod def _read_approved_tags( cls, - url: Optional[str] = None, - configuration: Optional[Configuration] = None, - ) -> List[str]: + url: str | None = None, + configuration: Configuration | None = None, + ) -> list[str]: """ Read approved tags Args: - url (Optional[str]): Url to read approved tags from. Defaults to None (url in internal configuration). - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + url: Url to read approved tags from. Defaults to None (url in internal configuration). + configuration: HDX configuration. Defaults to global configuration. Returns: - List[str]: List of approved tags + List of approved tags """ with Download( full_agent=configuration.get_user_agent(), use_env=False @@ -262,18 +263,18 @@ def _read_approved_tags( @classmethod def create_approved_vocabulary( cls, - url: Optional[str] = None, - configuration: Optional[Configuration] = None, + url: str | None = None, + configuration: Configuration | None = None, ) -> "Vocabulary": """ Create the HDX approved vocabulary Args: - url (Optional[str]): Tag to check. Defaults to None (url in internal configuration). - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + url: Tag to check. Defaults to None (url in internal configuration). + configuration: HDX configuration. Defaults to global configuration. Returns: - Vocabulary: HDX Vocabulary object + HDX Vocabulary object """ if configuration is None: configuration = Configuration.read() @@ -288,20 +289,20 @@ def create_approved_vocabulary( @classmethod def update_approved_vocabulary( cls, - url: Optional[str] = None, - configuration: Optional[Configuration] = None, + url: str | None = None, + configuration: Configuration | None = None, replace: bool = True, ) -> "Vocabulary": """ Update the HDX approved vocabulary Args: - url (Optional[str]): Tag to check. Defaults to None (url in internal configuration). - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - replace (bool): True to replace existing tags, False to append. Defaults to True. + url: Tag to check. Defaults to None (url in internal configuration). + configuration: HDX configuration. Defaults to global configuration. + replace: True to replace existing tags, False to append. Defaults to True. Returns: - Vocabulary: HDX Vocabulary object + HDX Vocabulary object """ if configuration is None: configuration = Configuration.read() @@ -316,13 +317,13 @@ def update_approved_vocabulary( @classmethod def delete_approved_vocabulary( - cls, configuration: Optional[Configuration] = None + cls, configuration: Configuration | None = None ) -> None: """ Delete the approved vocabulary Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: None @@ -333,15 +334,15 @@ def delete_approved_vocabulary( vocabulary.delete_from_hdx() @classmethod - def approved_tags(cls, configuration: Optional[Configuration] = None) -> List[str]: + def approved_tags(cls, configuration: Configuration | None = None) -> list[str]: """ Return list of approved tags Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + configuration: HDX configuration. Defaults to global configuration. Returns: - List[str]: List of approved tags + List of approved tags """ return [ x["name"] @@ -349,18 +350,16 @@ def approved_tags(cls, configuration: Optional[Configuration] = None) -> List[st ] @classmethod - def is_approved( - cls, tag: str, configuration: Optional[Configuration] = None - ) -> bool: + def is_approved(cls, tag: str, configuration: Configuration | None = None) -> bool: """ Return if tag is an approved one or not Args: - tag (str): Tag to check - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + tag: Tag to check + configuration: HDX configuration. Defaults to global configuration. Returns: - bool: True if tag is approved, False if not + True if tag is approved, False if not """ if tag.lower() in cls.approved_tags(configuration): return True @@ -369,22 +368,22 @@ def is_approved( @classmethod def read_tags_mappings( cls, - configuration: Optional[Configuration] = None, - url: Optional[str] = None, + configuration: Configuration | None = None, + url: str | None = None, keycolumn: int = 1, failchained: bool = True, - ) -> Dict: + ) -> dict: """ Read tag mappings and setup tags cleanup dictionaries Args: - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. - url (Optional[str]): Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). - keycolumn (int): Column number of tag column in spreadsheet. Defaults to 1. - failchained (bool): Fail if chained rules found. Defaults to True. + configuration: HDX configuration. Defaults to global configuration. + url: Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). + keycolumn: Column number of tag column in spreadsheet. Defaults to 1. + failchained: Fail if chained rules found. Defaults to True. Returns: - Dict: Returns Tags dictionary + Returns Tags dictionary """ if not cls._tags_dict: if configuration is None: @@ -434,12 +433,12 @@ def read_tags_mappings( return cls._tags_dict @classmethod - def set_tagsdict(cls, tags_dict: Dict) -> None: + def set_tagsdict(cls, tags_dict: dict) -> None: """ Set tags dictionary Args: - tags_dict (Dict): Tags dictionary + tags_dict: Tags dictionary Returns: None @@ -451,17 +450,17 @@ def get_mapped_tag( cls, tag: str, log_deleted: bool = True, - configuration: Optional[Configuration] = None, - ) -> Tuple[List[str], List[str]]: + configuration: Configuration | None = None, + ) -> tuple[list[str], list[str]]: """Given a tag, return a list of tag(s) to which it maps and any deleted tags Args: - tag (str): Tag to map - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + tag: Tag to map + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. + configuration: HDX configuration. Defaults to global configuration. Returns: - Tuple[List[str], List[str]]: Tuple containing list of mapped tag(s) and list of deleted tags + Tuple containing list of mapped tag(s) and list of deleted tags """ if configuration is None: configuration = Configuration.read() @@ -509,19 +508,19 @@ def get_mapped_tag( @classmethod def get_mapped_tags( cls, - tags: ListTuple[str], + tags: Sequence[str], log_deleted: bool = True, - configuration: Optional[Configuration] = None, - ) -> Tuple[List[str], List[str]]: + configuration: Configuration | None = None, + ) -> tuple[list[str], list[str]]: """Given a list of tags, return a list of tags to which they map and any deleted tags Args: - tags (ListTuple[str]): List of tags to map - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + tags: List of tags to map + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. + configuration: HDX configuration. Defaults to global configuration. Returns: - Tuple[List[str], List[str]]: Tuple containing list of mapped tags and list of deleted tags + Tuple containing list of mapped tags and list of deleted tags """ new_tags = [] deleted_tags = [] @@ -538,16 +537,16 @@ def get_mapped_tags( @classmethod def add_mapped_tag( cls, hdxobject: HDXObject, tag: str, log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Add a tag to an HDX object that has tags Args: - hdxobject (HDXObject): HDX object such as dataset - tag (str): Tag to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + hdxobject: HDX object such as dataset + tag: Tag to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ return cls.add_mapped_tags(hdxobject, [tag], log_deleted=log_deleted) @@ -555,18 +554,18 @@ def add_mapped_tag( def add_mapped_tags( cls, hdxobject: HDXObject, - tags: ListTuple[str], + tags: Sequence[str], log_deleted: bool = True, - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Add a list of tag to an HDX object that has tags Args: - hdxobject (HDXObject): HDX object such as dataset - tags (ListTuple[str]): List of tags to add - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + hdxobject: HDX object such as dataset + tags: List of tags to add + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of added tags and list of deleted tags and tags not added + Tuple containing list of added tags and list of deleted tags and tags not added """ new_tags, deleted_tags = cls.get_mapped_tags( tags, @@ -584,15 +583,15 @@ def add_mapped_tags( @classmethod def clean_tags( cls, hdxobject: HDXObject, log_deleted: bool = True - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """Clean tags in an HDX object according to tags cleanup spreadsheet Args: - hdxobject (HDXObject): HDX object such as dataset - log_deleted (bool): Whether to log informational messages about deleted tags. Defaults to True. + hdxobject: HDX object such as dataset + log_deleted: Whether to log informational messages about deleted tags. Defaults to True. Returns: - Tuple[List[str], List[str]]: Tuple containing list of mapped tags and list of deleted tags and tags not added + Tuple containing list of mapped tags and list of deleted tags and tags not added """ tags = hdxobject._get_tags() hdxobject["tags"] = [] @@ -603,19 +602,19 @@ def autocomplete( cls, name: str, limit: int = 20, - configuration: Optional[Configuration] = None, + configuration: Configuration | None = None, **kwargs: Any, - ) -> List: + ) -> list: """Autocomplete a tag name and return matches Args: - name (str): Name to autocomplete - limit (int): Maximum number of matches to return - configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. + name: Name to autocomplete + limit: Maximum number of matches to return + configuration: HDX configuration. Defaults to global configuration. **kwargs: - offset (int): The offset to start returning tags from. + offset: The offset to start returning tags from. Returns: - List: Autocomplete matches + Autocomplete matches """ return cls._autocomplete(name, limit, configuration, **kwargs) diff --git a/src/hdx/facades/infer_arguments.py b/src/hdx/facades/infer_arguments.py index a3165e9e..5b808b2e 100755 --- a/src/hdx/facades/infer_arguments.py +++ b/src/hdx/facades/infer_arguments.py @@ -2,16 +2,17 @@ import logging import sys +from collections.abc import Callable # noqa: F401 from inspect import getdoc -from typing import Any, Callable, Optional # noqa: F401 +from typing import Any import defopt +from hdx.utilities.easy_logging import setup_logging +from hdx.utilities.useragent import UserAgent from makefun import with_signature from hdx.api import __version__ from hdx.api.configuration import Configuration -from hdx.utilities.easy_logging import setup_logging -from hdx.utilities.useragent import UserAgent logger = logging.getLogger(__name__) setup_logging(log_file="errors.log") @@ -20,12 +21,12 @@ def facade(projectmainfn: Callable[[Any], None], **kwargs: Any): """Facade to simplify project setup that calls project main function. It infers command line arguments from the passed in function using defopt. The function passed - in should have type hints and a docstring from which to infer the command line + in should have type hints from which to infer the command line arguments. Any **kwargs given will be merged with command line arguments, with the command line arguments taking precedence. Args: - projectmainfn ((Any) -> None): main function of project + projectmainfn: main function of project **kwargs: Configuration parameters to pass to HDX Configuration & other parameters to pass to main function Returns: @@ -66,7 +67,7 @@ def facade(projectmainfn: Callable[[Any], None], **kwargs: Any): count += 1 else: if param_type == "str": - param_type = "Optional[str]" + param_type = "str | None" default = None elif param_type == "bool": default = False @@ -75,7 +76,7 @@ def facade(projectmainfn: Callable[[Any], None], **kwargs: Any): "Configuration.create has new parameter with unknown type!" ) param_names.append(f"{param_name}: {param_type} = {default}") - main_doc.append(f"\n {param_name} ({param_type}): {param_info.doc}") + main_doc.append(f"\n {param_name}: {param_info.doc}") main_doc = "".join(main_doc) projectmainname = projectmainfn.__name__ diff --git a/src/hdx/facades/keyword_arguments.py b/src/hdx/facades/keyword_arguments.py index 425591cb..09af9cc3 100755 --- a/src/hdx/facades/keyword_arguments.py +++ b/src/hdx/facades/keyword_arguments.py @@ -1,13 +1,15 @@ """Facade to simplify project setup that calls project main function with kwargs""" import logging -from typing import Any, Callable +from collections.abc import Callable +from typing import Any -from hdx.api import __version__ -from hdx.api.configuration import Configuration from hdx.utilities.easy_logging import setup_logging from hdx.utilities.useragent import UserAgent +from hdx.api import __version__ +from hdx.api.configuration import Configuration + logger = logging.getLogger(__name__) setup_logging(log_file="errors.log") @@ -16,7 +18,7 @@ def facade(projectmainfn: Callable[[Any], None], **kwargs: Any): """Facade to simplify project setup that calls project main function Args: - projectmainfn ((Any) -> None): main function of project + projectmainfn: main function of project **kwargs: configuration parameters to pass to HDX Configuration & other parameters to pass to main function Returns: diff --git a/src/hdx/facades/simple.py b/src/hdx/facades/simple.py index b424d0cd..f0d7808f 100755 --- a/src/hdx/facades/simple.py +++ b/src/hdx/facades/simple.py @@ -1,13 +1,15 @@ """Facade to simplify project setup that calls project main function""" import logging -from typing import Any, Callable +from collections.abc import Callable +from typing import Any -from hdx.api import __version__ -from hdx.api.configuration import Configuration from hdx.utilities.easy_logging import setup_logging from hdx.utilities.useragent import UserAgent +from hdx.api import __version__ +from hdx.api.configuration import Configuration + logger = logging.getLogger(__name__) setup_logging(log_file="errors.log") @@ -16,7 +18,7 @@ def facade(projectmainfn: Callable[[], None], **kwargs: Any): """Facade to simplify project setup that calls project main function Args: - projectmainfn (() -> None): main function of project + projectmainfn: main function of project **kwargs: configuration parameters to pass to HDX Configuration class Returns: diff --git a/tests/hdx/api/test_ckan.py b/tests/hdx/api/test_ckan.py index c653679a..92a449d0 100644 --- a/tests/hdx/api/test_ckan.py +++ b/tests/hdx/api/test_ckan.py @@ -14,6 +14,8 @@ import gspread import pytest from gspread.urls import DRIVE_FILES_API_V3_URL +from hdx.location.country import Country +from hdx.utilities.dateparse import now_utc from hdx.api.configuration import Configuration from hdx.api.locations import Locations @@ -21,8 +23,6 @@ from hdx.data.resource import Resource from hdx.data.user import User from hdx.data.vocabulary import Vocabulary -from hdx.location.country import Country -from hdx.utilities.dateparse import now_utc logger = logging.getLogger(__name__) diff --git a/tests/hdx/api/test_configuration.py b/tests/hdx/api/test_configuration.py index 2ccc2f91..7a7d6929 100755 --- a/tests/hdx/api/test_configuration.py +++ b/tests/hdx/api/test_configuration.py @@ -3,12 +3,12 @@ from os.path import join import pytest +from hdx.utilities.loader import LoadError +from hdx.utilities.useragent import UserAgentError from hdx.api import __version__ from hdx.api.configuration import Configuration, ConfigurationError from hdx.api.remotehdx import RemoteHDX -from hdx.utilities.loader import LoadError -from hdx.utilities.useragent import UserAgentError class TestConfiguration: diff --git a/tests/hdx/api/test_locations.py b/tests/hdx/api/test_locations.py index 75bb6901..acff9cd2 100755 --- a/tests/hdx/api/test_locations.py +++ b/tests/hdx/api/test_locations.py @@ -1,8 +1,9 @@ """HDX Location Tests""" +from hdx.location.country import Country + from hdx.api.configuration import Configuration from hdx.api.locations import Locations -from hdx.location.country import Country class MyConfiguration: diff --git a/tests/hdx/api/utilities/test_hdx_error_handler.py b/tests/hdx/api/utilities/test_hdx_error_handler.py index 03fe9ce9..19df5a93 100644 --- a/tests/hdx/api/utilities/test_hdx_error_handler.py +++ b/tests/hdx/api/utilities/test_hdx_error_handler.py @@ -3,9 +3,9 @@ import logging import pytest +from hdx.utilities.easy_logging import setup_logging from hdx.api.utilities.hdx_error_handler import HDXErrorHandler -from hdx.utilities.easy_logging import setup_logging setup_logging() diff --git a/tests/hdx/api/utilities/test_hdx_state.py b/tests/hdx/api/utilities/test_hdx_state.py index f362618d..f0cf565a 100644 --- a/tests/hdx/api/utilities/test_hdx_state.py +++ b/tests/hdx/api/utilities/test_hdx_state.py @@ -9,12 +9,13 @@ from tempfile import gettempdir import pytest +from hdx.utilities.dateparse import iso_string_from_datetime, parse_date -from ... import MockResponse, dataset_resultdict -from ...data.test_resource import resultdict from hdx.api.configuration import Configuration from hdx.api.utilities.hdx_state import HDXState -from hdx.utilities.dateparse import iso_string_from_datetime, parse_date + +from ... import MockResponse, dataset_resultdict +from ...data.test_resource import resultdict class TestState: diff --git a/tests/hdx/data/test_dataset_add_hapi_error.py b/tests/hdx/data/test_dataset_add_hapi_error.py index ac6cf0ef..5812bace 100644 --- a/tests/hdx/data/test_dataset_add_hapi_error.py +++ b/tests/hdx/data/test_dataset_add_hapi_error.py @@ -3,10 +3,11 @@ import pytest -from .. import MockResponse, dataset_data, dataset_resultdict, resources_data from hdx.api.configuration import Configuration from hdx.data.dataset import Dataset +from .. import MockResponse, dataset_data, dataset_resultdict, resources_data + class TestDatasetAddHAPIError: @pytest.fixture(scope="function") diff --git a/tests/hdx/data/test_dataset_core.py b/tests/hdx/data/test_dataset_core.py index 9e8081c4..671e2bdb 100755 --- a/tests/hdx/data/test_dataset_core.py +++ b/tests/hdx/data/test_dataset_core.py @@ -9,8 +9,18 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.loader import load_yaml +from hdx.utilities.path import temp_dir from pytest_check import check +from hdx.api import __version__ +from hdx.api.configuration import Configuration +from hdx.data.dataset import Dataset, NotRequestableError +from hdx.data.hdxobject import HDXError +from hdx.data.resource import Resource +from hdx.data.resource_view import ResourceView + from .. import ( MockResponse, dataset_data, @@ -25,15 +35,6 @@ resource_view_mockshow, ) from .test_vocabulary import vocabulary_mockshow -from hdx.api import __version__ -from hdx.api.configuration import Configuration -from hdx.data.dataset import Dataset, NotRequestableError -from hdx.data.hdxobject import HDXError -from hdx.data.resource import Resource -from hdx.data.resource_view import ResourceView -from hdx.utilities.dictandlist import merge_two_dictionaries -from hdx.utilities.loader import load_yaml -from hdx.utilities.path import temp_dir searchdict = load_yaml(join("tests", "fixtures", "dataset_search_results.yaml")) dataset_list = [ diff --git a/tests/hdx/data/test_dataset_noncore.py b/tests/hdx/data/test_dataset_noncore.py index 88fbf50f..dab619c8 100755 --- a/tests/hdx/data/test_dataset_noncore.py +++ b/tests/hdx/data/test_dataset_noncore.py @@ -6,6 +6,16 @@ from os.path import join import pytest +from hdx.location.country import Country +from hdx.utilities.path import temp_dir +from hdx.utilities.saver import save_text + +from hdx.api.configuration import Configuration +from hdx.data.dataset import Dataset +from hdx.data.hdxobject import HDXError +from hdx.data.organization import Organization +from hdx.data.user import User +from hdx.data.vocabulary import Vocabulary from .. import ( MockResponse, @@ -28,15 +38,6 @@ from .test_showcase import showcase_resultdict from .test_user import user_mockshow from .test_vocabulary import vocabulary_mockshow -from hdx.api.configuration import Configuration -from hdx.data.dataset import Dataset -from hdx.data.hdxobject import HDXError -from hdx.data.organization import Organization -from hdx.data.user import User -from hdx.data.vocabulary import Vocabulary -from hdx.location.country import Country -from hdx.utilities.path import temp_dir -from hdx.utilities.saver import save_text class TestDatasetNoncore: diff --git a/tests/hdx/data/test_dataset_resource_generation.py b/tests/hdx/data/test_dataset_resource_generation.py index 5dca7faf..f5c4d996 100644 --- a/tests/hdx/data/test_dataset_resource_generation.py +++ b/tests/hdx/data/test_dataset_resource_generation.py @@ -4,14 +4,14 @@ from os.path import join import pytest - -from hdx.data.dataset import Dataset -from hdx.data.hdxobject import HDXError from hdx.utilities.compare import assert_files_same from hdx.utilities.dateparse import parse_date_range from hdx.utilities.downloader import Download from hdx.utilities.path import temp_dir +from hdx.data.dataset import Dataset +from hdx.data.hdxobject import HDXError + class TestDatasetResourceGeneration: url = "https://raw.githubusercontent.com/OCHA-DAP/hdx-python-api/main/tests/fixtures/test_data.csv" diff --git a/tests/hdx/data/test_organization.py b/tests/hdx/data/test_organization.py index 623d96a5..e848b904 100755 --- a/tests/hdx/data/test_organization.py +++ b/tests/hdx/data/test_organization.py @@ -5,15 +5,16 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.loader import load_yaml -from .. import MockResponse, organization_data, user_data -from .test_user import user_mockshow from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError from hdx.data.organization import Organization from hdx.data.user import User -from hdx.utilities.dictandlist import merge_two_dictionaries -from hdx.utilities.loader import load_yaml + +from .. import MockResponse, organization_data, user_data +from .test_user import user_mockshow resultdict = load_yaml(join("tests", "fixtures", "organization_show_results.yaml")) diff --git a/tests/hdx/data/test_resource.py b/tests/hdx/data/test_resource.py index 96b67b58..9326c302 100755 --- a/tests/hdx/data/test_resource.py +++ b/tests/hdx/data/test_resource.py @@ -8,19 +8,20 @@ from os.path import basename, join import pytest - -from .. import MockResponse, dataset_resultdict, resource_data -from .test_resource_view import resource_view_list, resource_view_mocklist -from hdx.api.configuration import Configuration -from hdx.data.dataset import Dataset -from hdx.data.hdxobject import HDXError -from hdx.data.resource import Resource from hdx.utilities.dateparse import parse_date from hdx.utilities.dictandlist import merge_two_dictionaries from hdx.utilities.downloader import Download, DownloadError from hdx.utilities.path import get_temp_dir from hdx.utilities.retriever import Retrieve +from hdx.api.configuration import Configuration +from hdx.data.dataset import Dataset +from hdx.data.hdxobject import HDXError +from hdx.data.resource import Resource + +from .. import MockResponse, dataset_resultdict, resource_data +from .test_resource_view import resource_view_list, resource_view_mocklist + resultdict = { "cache_last_updated": None, "package_id": "6f36a41c-f126-4b18-aaaf-6c2ddfbc5d4d", diff --git a/tests/hdx/data/test_resource_view.py b/tests/hdx/data/test_resource_view.py index ef376a25..db86ca3e 100755 --- a/tests/hdx/data/test_resource_view.py +++ b/tests/hdx/data/test_resource_view.py @@ -5,12 +5,13 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries -from .. import MockResponse from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError from hdx.data.resource_view import ResourceView -from hdx.utilities.dictandlist import merge_two_dictionaries + +from .. import MockResponse hxl_preview_config = '{"configVersion":2,"bites":[{"init":true,"type":"key figure","filteredValues":[],"errorMsg":null,"ingredient":{"aggregateColumn":null,"valueColumn":"#affected+killed","aggregateFunction":"sum"},"dataTitle":"#affected+killed","displayCategory":"Key Figures","unit":null,"hashCode":-1955043658,"title":"Sum of fatalities","value":null},{"init":true,"type":"chart","filteredValues":[],"errorMsg":null,"swapAxis":true,"showGrid":true,"pieChart":false,"ingredient":{"aggregateColumn":"#adm1+name","valueColumn":"#affected+killed","aggregateFunction":"sum"},"dataTitle":"#affected+killed","displayCategory":"Charts","hashCode":738289179,"title":"Sum of fatalities grouped by admin1","values":null,"categories":null},{"init":true,"type":"chart","filteredValues":[],"errorMsg":null,"swapAxis":true,"showGrid":true,"pieChart":false,"ingredient":{"aggregateColumn":"#adm2+name","valueColumn":"#affected+killed","aggregateFunction":"sum"},"dataTitle":"#affected+killed","displayCategory":"Charts","hashCode":766918330,"title":"Sum of fatalities grouped by admin2","values":null,"categories":null}]}' diff --git a/tests/hdx/data/test_showcase.py b/tests/hdx/data/test_showcase.py index 21d16d9b..51c42615 100755 --- a/tests/hdx/data/test_showcase.py +++ b/tests/hdx/data/test_showcase.py @@ -5,15 +5,16 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.loader import load_yaml -from .. import MockResponse -from .test_vocabulary import vocabulary_mockshow from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError from hdx.data.showcase import Showcase from hdx.data.vocabulary import Vocabulary -from hdx.utilities.dictandlist import merge_two_dictionaries -from hdx.utilities.loader import load_yaml + +from .. import MockResponse +from .test_vocabulary import vocabulary_mockshow showcase_resultdict = { "relationships_as_object": [], diff --git a/tests/hdx/data/test_update_dataset_resources.py b/tests/hdx/data/test_update_dataset_resources.py index c7a4a306..1e3c3a7e 100644 --- a/tests/hdx/data/test_update_dataset_resources.py +++ b/tests/hdx/data/test_update_dataset_resources.py @@ -1,14 +1,14 @@ from os.path import join import pytest +from hdx.location.country import Country +from hdx.utilities.loader import load_json from hdx.api.configuration import Configuration from hdx.api.locations import Locations from hdx.data.dataset import Dataset from hdx.data.resource import Resource from hdx.data.vocabulary import Vocabulary -from hdx.location.country import Country -from hdx.utilities.loader import load_json class TestUpdateDatasetResourcesLogic: diff --git a/tests/hdx/data/test_update_logic.py b/tests/hdx/data/test_update_logic.py index d542bddb..e69fac38 100644 --- a/tests/hdx/data/test_update_logic.py +++ b/tests/hdx/data/test_update_logic.py @@ -2,6 +2,10 @@ from os.path import join import pytest +from hdx.location.country import Country +from hdx.utilities.loader import load_yaml +from hdx.utilities.matching import multiple_replace +from hdx.utilities.path import get_temp_dir from slugify import slugify from hdx.api.configuration import Configuration @@ -9,10 +13,6 @@ from hdx.data.dataset import Dataset from hdx.data.resource import Resource from hdx.data.vocabulary import Vocabulary -from hdx.location.country import Country -from hdx.utilities.loader import load_yaml -from hdx.utilities.matching import multiple_replace -from hdx.utilities.path import get_temp_dir class TestUpdateLogic: diff --git a/tests/hdx/data/test_user.py b/tests/hdx/data/test_user.py index 2f9447ed..598558d2 100755 --- a/tests/hdx/data/test_user.py +++ b/tests/hdx/data/test_user.py @@ -5,13 +5,14 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries +from hdx.utilities.loader import load_yaml -from .. import MockResponse, user_data from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError from hdx.data.user import User -from hdx.utilities.dictandlist import merge_two_dictionaries -from hdx.utilities.loader import load_yaml + +from .. import MockResponse, user_data resultdict = { "openid": None, diff --git a/tests/hdx/data/test_vocabulary.py b/tests/hdx/data/test_vocabulary.py index 0232bdeb..99e01191 100755 --- a/tests/hdx/data/test_vocabulary.py +++ b/tests/hdx/data/test_vocabulary.py @@ -5,13 +5,14 @@ from os.path import join import pytest +from hdx.utilities.dictandlist import merge_two_dictionaries from requests.exceptions import RetryError -from .. import MockResponse from hdx.api.configuration import Configuration from hdx.data.hdxobject import HDXError from hdx.data.vocabulary import ChainRuleError, Vocabulary -from hdx.utilities.dictandlist import merge_two_dictionaries + +from .. import MockResponse vocabulary_list = [ { diff --git a/tests/hdx/facades/__init__.py b/tests/hdx/facades/__init__.py index 6248b56d..be4388a4 100755 --- a/tests/hdx/facades/__init__.py +++ b/tests/hdx/facades/__init__.py @@ -40,17 +40,17 @@ def my_testfnkw(**kwargs): raise ValueError("Some failure!") -def my_testfnia(mydata: Optional[str] = None, myflag: bool = False) -> str: +def my_testfnia(mydata: str | None = None, myflag: bool = False) -> str: """My test function. It takes in mydata an optional string which defaults to None. It assigns that to testresult which is an object of type TestResult. It returns mydata. Args: - mydata (Optional[str]): Data. Defaults to None. - myflag (bool): My flag. Defaults to False. + mydata: Data. Defaults to None. + myflag: My flag. Defaults to False. Returns: - str: String + String """ testresult.actual_result = mydata return mydata diff --git a/tests/hdx/facades/test_infer_arguments.py b/tests/hdx/facades/test_infer_arguments.py index b93fd371..3a1a2eb4 100755 --- a/tests/hdx/facades/test_infer_arguments.py +++ b/tests/hdx/facades/test_infer_arguments.py @@ -3,11 +3,12 @@ import sys import pytest +from hdx.utilities.useragent import UserAgent -from . import my_testfnia, testresult from hdx.api.configuration import ConfigurationError from hdx.facades.infer_arguments import facade -from hdx.utilities.useragent import UserAgent + +from . import my_testfnia, testresult class TestInferArguments: diff --git a/tests/hdx/facades/test_keyword_arguments.py b/tests/hdx/facades/test_keyword_arguments.py index 461ae173..642be904 100755 --- a/tests/hdx/facades/test_keyword_arguments.py +++ b/tests/hdx/facades/test_keyword_arguments.py @@ -1,11 +1,12 @@ """Simple Facade Tests""" import pytest +from hdx.utilities.useragent import UserAgent, UserAgentError -from . import my_testfnkw, testresult from hdx.api import __version__ from hdx.facades.keyword_arguments import facade -from hdx.utilities.useragent import UserAgent, UserAgentError + +from . import my_testfnkw, testresult class TestKeywordArguments: diff --git a/tests/hdx/facades/test_simple.py b/tests/hdx/facades/test_simple.py index b2e5db58..8d543ef6 100755 --- a/tests/hdx/facades/test_simple.py +++ b/tests/hdx/facades/test_simple.py @@ -1,11 +1,12 @@ """Simple Facade Tests""" import pytest +from hdx.utilities.useragent import UserAgent, UserAgentError -from . import my_excfn, my_testfn, my_testkeyfn, my_testuafn, testresult from hdx.api import __version__ from hdx.facades.simple import facade -from hdx.utilities.useragent import UserAgent, UserAgentError + +from . import my_excfn, my_testfn, my_testkeyfn, my_testuafn, testresult class TestSimple: diff --git a/workingexample/run.py b/workingexample/run.py index 9fc6d6d1..06c20ddc 100755 --- a/workingexample/run.py +++ b/workingexample/run.py @@ -5,9 +5,10 @@ import logging -from .my_code import generate_dataset from hdx.facades.simple import facade +from .my_code import generate_dataset + logger = logging.getLogger(__name__) From 4c31df48cfbe4e55112cd74befe691b63d68a8dc Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 12:05:18 +1300 Subject: [PATCH 2/6] Modernise to Py310 --- src/hdx/api/utilities/dataset_title_helper.py | 2 +- src/hdx/data/dataset.py | 3 +- src/hdx/data/vocabulary.py | 10 +--- tests/hdx/__init__.py | 9 ++-- tests/hdx/api/utilities/test_hdx_state.py | 12 ++--- tests/hdx/data/test_dataset_add_hapi_error.py | 6 +-- tests/hdx/data/test_dataset_core.py | 53 ++++++------------- tests/hdx/data/test_dataset_noncore.py | 9 ++-- tests/hdx/data/test_organization.py | 21 +++----- tests/hdx/data/test_resource.py | 51 ++++++------------ tests/hdx/data/test_resource_view.py | 15 ++---- tests/hdx/data/test_showcase.py | 27 ++++------ tests/hdx/data/test_user.py | 39 +++++--------- tests/hdx/data/test_vocabulary.py | 27 ++++------ tests/hdx/facades/__init__.py | 2 - 15 files changed, 92 insertions(+), 194 deletions(-) diff --git a/src/hdx/api/utilities/dataset_title_helper.py b/src/hdx/api/utilities/dataset_title_helper.py index ed27bb28..bc6ea93f 100755 --- a/src/hdx/api/utilities/dataset_title_helper.py +++ b/src/hdx/api/utilities/dataset_title_helper.py @@ -26,7 +26,7 @@ class DatasetTitleHelper: r"([12]\d\d\d)(/(\d{1,2}))?(-| & | and )([12]\d\d\d)" ) YEAR_RANGE_PATTERN2 = re.compile(r"([12]\d\d\d)([/-])(\d{1,2})") - PUNCTUATION_PATTERN = re.compile(r"[%s]" % punctuation) + PUNCTUATION_PATTERN = re.compile(rf"[{punctuation}]") EMPTY_BRACKET_PATTERN = re.compile(r"(\s?\(\s*\)\s?)") WORD_RIGHT_BRACKET_PATTERN = re.compile(r"\b(\s*)(\w{2,})\b\)") DATE_INTRO_WORDS = ["on", "at", "for", "of", "in"] diff --git a/src/hdx/data/dataset.py b/src/hdx/data/dataset.py index ed90ec45..103180ad 100755 --- a/src/hdx/data/dataset.py +++ b/src/hdx/data/dataset.py @@ -1786,8 +1786,7 @@ def add_country_location( return self.add_other_location( iso3, exact=exact, - alterror="Country: %s with iso3: %s could not be found in HDX list!" - % (country, iso3), + alterror=f"Country: {country} with iso3: {iso3} could not be found in HDX list!", locations=locations, ) diff --git a/src/hdx/data/vocabulary.py b/src/hdx/data/vocabulary.py index 9ea27b5e..c1f6cd81 100755 --- a/src/hdx/data/vocabulary.py +++ b/src/hdx/data/vocabulary.py @@ -417,15 +417,7 @@ def read_tags_mappings( chainerror = True if failchained: logger.error( - "Chained rules: %s (%s -> %s) | %s (%s -> %s)" - % ( - action, - tag, - final_tags, - action2, - final_tag, - final_tags2, - ) + f"Chained rules: {action} ({tag} -> {final_tags}) | {action2} ({final_tag} -> {final_tags2})" ) if failchained and chainerror: diff --git a/tests/hdx/__init__.py b/tests/hdx/__init__.py index 2c3dde6b..372904ad 100755 --- a/tests/hdx/__init__.py +++ b/tests/hdx/__init__.py @@ -271,16 +271,14 @@ def dataset_mockshow(url, datadict): result = json.dumps(resources_data[0]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) else: if datadict["id"] in ("TEST1", "DatasetExist"): result = json.dumps(dataset_resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -303,8 +301,7 @@ def dataset_mockshow(url, datadict): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}}', ) return MockResponse( diff --git a/tests/hdx/api/utilities/test_hdx_state.py b/tests/hdx/api/utilities/test_hdx_state.py index f0cf565a..cb808762 100644 --- a/tests/hdx/api/utilities/test_hdx_state.py +++ b/tests/hdx/api/utilities/test_hdx_state.py @@ -52,8 +52,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) else: myresultdict = deepcopy(dataset_resultdict) @@ -63,8 +62,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(myresultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}}', ) Configuration.read().remoteckan().session = MockSession() @@ -78,8 +76,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) else: myresultdict = deepcopy(dataset_resultdict) @@ -89,8 +86,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(myresultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/data/test_dataset_add_hapi_error.py b/tests/hdx/data/test_dataset_add_hapi_error.py index 5812bace..260de6f3 100644 --- a/tests/hdx/data/test_dataset_add_hapi_error.py +++ b/tests/hdx/data/test_dataset_add_hapi_error.py @@ -33,8 +33,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resource_json) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}}', ) if "patch" not in url: return MockResponse( @@ -44,8 +43,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(datadict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_patch"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_patch"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/data/test_dataset_core.py b/tests/hdx/data/test_dataset_core.py index 671e2bdb..ee9be5a2 100755 --- a/tests/hdx/data/test_dataset_core.py +++ b/tests/hdx/data/test_dataset_core.py @@ -95,8 +95,7 @@ def mocksearch(url, datadict): result = json.dumps(newsearchdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) if datadict["q"] == '"': return MockResponse( @@ -126,8 +125,7 @@ def mocklist(url, datadict): result = json.dumps(dataset_list[offset : offset + limit]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_list"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_list"}}', ) @@ -168,8 +166,7 @@ def mockall(url, datadict): result = json.dumps(newsearchdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) @@ -182,8 +179,7 @@ def mockhxlupdate(url, datadict): result = json.dumps(hxlupdate_list) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_hxl_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_hxl_update"}}', ) @@ -224,8 +220,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_revise"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_revise"}}', ) return MockResponse( 404, @@ -255,22 +250,19 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resource_view_list) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}}', ) if "resource_view" in url: result = json.dumps(resource_view_list[1]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}}', ) if "resource" in url: result = json.dumps(resources_data[0]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}}', ) if "create" not in url and "revise" not in url: return MockResponse( @@ -287,8 +279,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_create"}}', ) if datadict["name"] == "MyDataset2": return MockResponse( @@ -322,8 +313,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resource_view_list) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}}', ) if "resource_view" in url: if "show" in url: @@ -349,8 +339,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_update"}}', ) else: if "revise" not in url: @@ -379,8 +368,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_update"}}', ) return MockResponse( 404, @@ -408,8 +396,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["id"] == "6f36a41c-f126-4b18-aaaf-6c2ddfbc5d4d": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_resource_reorder"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_resource_reorder"}}', ) return MockResponse( @@ -436,15 +423,13 @@ def post(url, data, headers, files, allow_redirects, auth=None): if "resource" in url: return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_delete"}}', ) if datadict["id"] == "6f36a41c-f126-4b18-aaaf-6c2ddfbc5d4d": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=dataset_delete"}}', ) return MockResponse( @@ -499,8 +484,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(dataset_autocomplete) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_autocomplete"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_autocomplete"}}', ) Configuration.read().remoteckan().session = MockSession() @@ -666,10 +650,7 @@ def test_update_in_hdx(self, configuration, post_update, date_pattern, test_xlsx "crisis-somewhere", ] assert dataset["state"] == "active" - pattern = ( - r"HDXPythonLibrary/%s-test \([12]\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d.\d\d\d\d\d\d\)" - % __version__ - ) + pattern = rf"HDXPythonLibrary/{__version__}-test \([12]\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d.\d\d\d\d\d\d\)" match = re.search(pattern, dataset["updated_by_script"]) assert match resourceviewdata = { diff --git a/tests/hdx/data/test_dataset_noncore.py b/tests/hdx/data/test_dataset_noncore.py index dab619c8..94824d79 100755 --- a/tests/hdx/data/test_dataset_noncore.py +++ b/tests/hdx/data/test_dataset_noncore.py @@ -90,8 +90,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps([showcase_resultdict]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_package_showcase_list"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_package_showcase_list"}}', ) if "association_delete" in url: TestDatasetNoncore.association = "delete" @@ -104,8 +103,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(datadict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_association_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_association_create"}}', ) return dataset_mockshow(url, datadict) @@ -126,8 +124,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resource_view_list) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_create_default_resource_views"}}', ) if "resource_view" in url: if "show" in url: diff --git a/tests/hdx/data/test_organization.py b/tests/hdx/data/test_organization.py index e848b904..800d79a2 100755 --- a/tests/hdx/data/test_organization.py +++ b/tests/hdx/data/test_organization.py @@ -54,8 +54,7 @@ def organization_mockshow(url, datadict): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -81,8 +80,7 @@ def mocklist(url): ) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}' - % json.dumps(organization_list), + f'{{"success": true, "result": {json.dumps(organization_list)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}}', ) @@ -95,8 +93,7 @@ def mockgetdatasets(url, datadict): if datadict["fq"] == "organization:acled": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % json.dumps(searchdict), + f'{{"success": true, "result": {json.dumps(searchdict)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) @@ -139,8 +136,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["name"] == "MyOrganization1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_create"}}', ) if datadict["name"] == "MyOrganization2": return MockResponse( @@ -180,8 +176,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["name"] == "MyOrganization1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_update"}}', ) if datadict["name"] == "MyOrganization2": return MockResponse( @@ -218,8 +213,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["id"] == "b67e6c74-c185-4f43-b561-0e114a736f19": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_delete"}}', ) return MockResponse( @@ -274,8 +268,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(organization_autocomplete) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_autocomplete"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_autocomplete"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/data/test_resource.py b/tests/hdx/data/test_resource.py index 9326c302..8d998dc4 100755 --- a/tests/hdx/data/test_resource.py +++ b/tests/hdx/data/test_resource.py @@ -160,8 +160,7 @@ def mockshow(url, datadict): if datadict_id == "74b74ae1-df0c-4716-829f-4f939a046811": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) if datadict_id == "74b74ae1-df0c-4716-829f-4f939a046812": return MockResponse( @@ -179,8 +178,7 @@ def mockshow(url, datadict): result = json.dumps(resdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) if datadict_id == "74b74ae1-df0c-4716-829f-4f939a046815": resdictcopy = copy.deepcopy(resultdict) @@ -188,8 +186,7 @@ def mockshow(url, datadict): result = json.dumps(resdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) if datadict_id in ( "74b74ae1-df0c-4716-829f-4f939a046816", @@ -207,8 +204,7 @@ def mockshow(url, datadict): result = json.dumps(resdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_show"}}', ) return MockResponse( 404, @@ -225,8 +221,7 @@ def mockpatch(url, datadict): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_patch"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_patch"}}', ) @@ -244,8 +239,7 @@ def mockdataset(url, datadict): result = json.dumps(dataset_resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_show"}}', ) return MockResponse( 404, @@ -263,8 +257,7 @@ def mocksearch(url, datadict): if datadict["query"] == "name:ACLED": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_search"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_search"}}', ) if datadict["query"] == "fail": return MockResponse( @@ -286,8 +279,7 @@ def mockresourceview(url, decodedata): if "delete" in url: return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_delete"}}', ) datadict = json.loads(decodedata) if "show" in url: @@ -300,15 +292,13 @@ def mockresourceview(url, decodedata): result = json.dumps(resource_view_list[0]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}}', ) if datadict["title"] == "Quick Charts": result = json.dumps(resource_view_list[1]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}}', ) if "list" in url: return resource_view_mocklist(url, datadict) @@ -318,15 +308,13 @@ def mockresourceview(url, decodedata): result = json.dumps(resource_view_list[0]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}}', ) if datadict["title"] == "Quick Charts": result = json.dumps(resource_view_list[1]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}}', ) if "reorder" in url: result = json.dumps( @@ -340,8 +328,7 @@ def mockresourceview(url, decodedata): ) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_reorder"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_reorder"}}', ) return MockResponse( @@ -422,8 +409,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_create"}}', ) if datadict["name"] == "MyResource2": return MockResponse( @@ -482,8 +468,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_update"}}', ) if datadict["name"] == "MyResource2": return MockResponse( @@ -520,8 +505,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["id"] == "de6549d8-268b-4dfe-adaf-a4ae5c8510d5": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_delete"}}', ) return MockResponse( @@ -673,8 +657,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=hdx_mark_broken_link_in_resource"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=hdx_mark_broken_link_in_resource"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/data/test_resource_view.py b/tests/hdx/data/test_resource_view.py index db86ca3e..e7426a0d 100755 --- a/tests/hdx/data/test_resource_view.py +++ b/tests/hdx/data/test_resource_view.py @@ -51,8 +51,7 @@ def resource_view_mockshow(url, datadict): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -79,8 +78,7 @@ def resource_view_mocklist(url, datadict): if datadict["id"] == "25982d1c-f45a-45e1-b14e-87d367413045": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_list"}' - % json.dumps(resource_view_list), + f'{{"success": true, "result": {json.dumps(resource_view_list)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_list"}}', ) return MockResponse( 404, @@ -98,16 +96,14 @@ def resource_view_mockcreate(url, datadict): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}}', ) if datadict["title"] == "Quick Charts": resultdictcopy = copy.deepcopy(resultdict) result = json.dumps(merge_two_dictionaries(resultdictcopy, datadict)) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_create"}}', ) if datadict["title"] == "XXX": return MockResponse( @@ -192,8 +188,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["title"] == "Quick Charts": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=resource_view_update"}}', ) if datadict["title"] == "XXX": return MockResponse( diff --git a/tests/hdx/data/test_showcase.py b/tests/hdx/data/test_showcase.py index 51c42615..a5e439ce 100755 --- a/tests/hdx/data/test_showcase.py +++ b/tests/hdx/data/test_showcase.py @@ -67,8 +67,7 @@ def mockshow(url, datadict): result = json.dumps(datasetsdict["results"]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_list"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_list"}}', ) if "_show" not in url: return MockResponse( @@ -82,8 +81,7 @@ def mockshow(url, datadict): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -112,8 +110,7 @@ def mockallsearch(url, datadict): newsearchdict["results"] = [newsearchdict["results"][0]] return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % json.dumps(newsearchdict), + f'{{"success": true, "result": {json.dumps(newsearchdict)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) if datadict["q"] == "ACLED": newsearchdict = copy.deepcopy(allsearchdict) @@ -135,8 +132,7 @@ def mockallsearch(url, datadict): result = json.dumps(newsearchdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) if datadict["q"] == "*:*": newsearchdict = copy.deepcopy(allsearchdict) @@ -145,8 +141,7 @@ def mockallsearch(url, datadict): x["id"] = f"{x['id']}{i}" return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}' - % json.dumps(newsearchdict), + f'{{"success": true, "result": {json.dumps(newsearchdict)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=package_search"}}', ) if datadict["q"] == '"': return MockResponse( @@ -202,8 +197,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(datadict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_association_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_package_association_create"}}', ) return mockshow(url, datadict) @@ -231,8 +225,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["title"] == "MyShowcase1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_create"}}', ) if datadict["title"] == "MyShowcase2": return MockResponse( @@ -277,8 +270,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["title"] == "MyShowcase1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_update"}}', ) if datadict["title"] == "MyShowcase2": return MockResponse( @@ -315,8 +307,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["id"] == "05e392bf-04e0-4ca6-848c-4e87bba10746": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=ckanext_showcase_delete"}}', ) return MockResponse( diff --git a/tests/hdx/data/test_user.py b/tests/hdx/data/test_user.py index 598558d2..f0db6192 100755 --- a/tests/hdx/data/test_user.py +++ b/tests/hdx/data/test_user.py @@ -73,8 +73,7 @@ def user_mockshow(url, datadict): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -100,8 +99,7 @@ def mocklist(url): ) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_list"}' - % json.dumps(user_list), + f'{{"success": true, "result": {json.dumps(user_list)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_list"}}', ) @@ -174,8 +172,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["name"] == "MyUser1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_create"}}', ) if datadict["name"] == "MyUser2": return MockResponse( @@ -215,8 +212,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["name"] == "MyUser1": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_update"}}', ) if datadict["name"] == "MyUser2": return MockResponse( @@ -253,8 +249,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): if datadict["id"] == "9f3e9973-7dbe-4c65-8820-f48578e3ffea": return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_delete"}' - % decodedata, + f'{{"success": true, "result": {decodedata}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_delete"}}', ) return MockResponse( @@ -277,8 +272,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}}', ) Configuration.read().remoteckan().session = MockSession() @@ -306,8 +300,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): elif "list" in url: return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}' - % json.dumps(orglist), + f'{{"success": true, "result": {json.dumps(orglist)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}}', ) elif "organization" in url: if "show" in url: @@ -318,8 +311,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}}', ) Configuration.read().remoteckan().session = MockSession() @@ -343,14 +335,12 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}}', ) elif "list" in url: return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}' - % json.dumps(orglist), + f'{{"success": true, "result": {json.dumps(orglist)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=organization_list"}}', ) elif "organization" in url: if "show" in url: @@ -361,8 +351,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): ): return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_show"}}', ) Configuration.read().remoteckan().session = MockSession() @@ -399,8 +388,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(user_tokenlist) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_tokenlist"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_tokenlist"}}', ) return MockResponse( 404, @@ -424,8 +412,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(user_autocomplete) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_autocomplete"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=user_autocomplete"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/data/test_vocabulary.py b/tests/hdx/data/test_vocabulary.py index 99e01191..9d8487fe 100755 --- a/tests/hdx/data/test_vocabulary.py +++ b/tests/hdx/data/test_vocabulary.py @@ -1055,8 +1055,7 @@ def vocabulary_mockshow(url, datadict): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}}', ) if ( datadict["id"] == "Topics" @@ -1065,8 +1064,7 @@ def vocabulary_mockshow(url, datadict): result = json.dumps(vocabulary_list[2]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}}', ) if datadict["id"] == "TEST2": return MockResponse( @@ -1093,8 +1091,7 @@ def vocabulary_delete(url, datadict): result = json.dumps(resultdictcopy) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_update"}}', ) if "delete" not in url: return MockResponse( @@ -1126,8 +1123,7 @@ def vocabulary_mocklist(url, datadict): ) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_list"}' - % json.dumps(vocabulary_list), + f'{{"success": true, "result": {json.dumps(vocabulary_list)}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_list"}}', ) @@ -1175,15 +1171,13 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(resultdict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_create"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_create"}}', ) if datadict["name"] == "Topics": result = json.dumps(vocabulary_list[2]) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}}', ) if datadict["name"] == "XXX": return MockResponse( @@ -1222,8 +1216,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(datadict) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_show"}}', ) resultdictcopy = copy.deepcopy(resultdict) merge_two_dictionaries(resultdictcopy, datadict) @@ -1236,8 +1229,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): ]: return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_update"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=vocabulary_update"}}', ) if datadict["name"] == "XXX": return MockResponse( @@ -1292,8 +1284,7 @@ def post(url, data, headers, files, allow_redirects, auth=None): result = json.dumps(tag_autocomplete) return MockResponse( 200, - '{"success": true, "result": %s, "help": "http://test-data.humdata.org/api/3/action/help_show?name=tag_autocomplete"}' - % result, + f'{{"success": true, "result": {result}, "help": "http://test-data.humdata.org/api/3/action/help_show?name=tag_autocomplete"}}', ) Configuration.read().remoteckan().session = MockSession() diff --git a/tests/hdx/facades/__init__.py b/tests/hdx/facades/__init__.py index be4388a4..1c1e7676 100755 --- a/tests/hdx/facades/__init__.py +++ b/tests/hdx/facades/__init__.py @@ -1,5 +1,3 @@ -from typing import Optional - from hdx.api.configuration import Configuration From 593d7e9db493d5a920331c1b32013a91bb59072f Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 12:30:47 +1300 Subject: [PATCH 3/6] Modernise to Py310 --- hatch.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hatch.toml b/hatch.toml index a7a7610d..0e363efe 100644 --- a/hatch.toml +++ b/hatch.toml @@ -34,4 +34,4 @@ run = """ [envs.hatch-static-analysis] config-path = "none" -dependencies = ["ruff==0.12.0"] +dependencies = ["ruff==0.14.10"] From cb039a7c815fe9c463716db86c17eb0523930296 Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 15:36:54 +1300 Subject: [PATCH 4/6] Modernise to Py310 --- documentation/index.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/documentation/index.md b/documentation/index.md index 074612bb..fa8d226b 100755 --- a/documentation/index.md +++ b/documentation/index.md @@ -54,6 +54,8 @@ The library has detailed API documentation which can be found in the menu at the ## Breaking Changes +From 6.6.0, Python 3.10 up is required + From 6.5.7, get_size_and_hash moved to HDX Python Utilities From 6.5.2, remove unused `generate_qc_resource_from_rows` method. From 576e242ad3335d2f5ce159557713fa7b204f3fe8 Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 15:38:25 +1300 Subject: [PATCH 5/6] Modernise to Py310 --- documentation/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/documentation/index.md b/documentation/index.md index fa8d226b..dde6478b 100755 --- a/documentation/index.md +++ b/documentation/index.md @@ -54,7 +54,7 @@ The library has detailed API documentation which can be found in the menu at the ## Breaking Changes -From 6.6.0, Python 3.10 up is required +From 6.6.0, Python 3.10 or later is required From 6.5.7, get_size_and_hash moved to HDX Python Utilities From 24a5a0f5344c5d4ff2bbcdb1fab73dced2c25761 Mon Sep 17 00:00:00 2001 From: mcarans Date: Tue, 13 Jan 2026 15:50:04 +1300 Subject: [PATCH 6/6] Use latest HDX Python Country and Utilities --- documentation/index.md | 2 +- pyproject.toml | 4 ++-- requirements.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/documentation/index.md b/documentation/index.md index dde6478b..999575b9 100755 --- a/documentation/index.md +++ b/documentation/index.md @@ -54,7 +54,7 @@ The library has detailed API documentation which can be found in the menu at the ## Breaking Changes -From 6.6.0, Python 3.10 or later is required +From 6.6.0, Python 3.10 or later is required From 6.5.7, get_size_and_hash moved to HDX Python Utilities diff --git a/pyproject.toml b/pyproject.toml index b2fcb5ac..141ec337 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,8 +37,8 @@ dependencies = [ "ckanapi>=4.8", "defopt>=7.0.0", "email_validator", - "hdx-python-country>=3.9.8", - "hdx-python-utilities>=3.9.9", + "hdx-python-country>=4.0.0", + "hdx-python-utilities>=4.0.0", "libhxl>=5.2.2", "makefun", "quantulum3", diff --git a/requirements.txt b/requirements.txt index 5084e890..a36af053 100644 --- a/requirements.txt +++ b/requirements.txt @@ -34,9 +34,9 @@ et-xmlfile==2.0.0 # via openpyxl frictionless==5.18.1 # via hdx-python-utilities -hdx-python-country==3.9.8 +hdx-python-country==4.0.0 # via hdx-python-api (pyproject.toml) -hdx-python-utilities==3.9.9 +hdx-python-utilities==4.0.0 # via # hdx-python-api (pyproject.toml) # hdx-python-country