From 8eada189fc8d08473c0141c1072aba24ce152a4e Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 11:47:38 -0400 Subject: [PATCH 01/25] chore: migrate from pip-compile to uv for dependency management - Run migrate-to-uv to bootstrap pyproject.toml from requirements/base.txt and requirements/test.txt - Add full project metadata: name, version, description, requires-python>=3.11, license, hatchling build backend, entry point xqueue-watcher -> manager:main - Add newrelic as [project.optional-dependencies.production] - Add dev dependency group: coverage, mock, pytest-cov - Remove setup.py (replaced by pyproject.toml) - Remove all requirements/*.in and requirements/*.txt files (14 files) - Generate uv.lock with pinned dependency graph - Update Makefile: replace pip/pip-compile targets with uv sync / uv run pytest - Update .github/workflows/ci.yml: use astral-sh/setup-uv@v4, drop ubuntu-20.04 and Python 3.8, add Python 3.13, update to actions/checkout@v4 - Replace upgrade-python-requirements workflow with uv lock --upgrade + create-pull-request workflow Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/ci.yml | 16 +- .../workflows/upgrade-python-requirements.yml | 41 +- .gitignore | 6 + Makefile | 58 +- pyproject.toml | 40 ++ requirements/base.in | 10 - requirements/base.txt | 26 - requirements/ci.in | 6 - requirements/ci.txt | 66 -- requirements/common_constraints.txt | 20 - requirements/constraints.txt | 11 - requirements/pip.in | 7 - requirements/pip.txt | 16 - requirements/pip_tools.in | 4 - requirements/pip_tools.txt | 26 - requirements/production.in | 5 - requirements/production.txt | 36 - requirements/test.in | 8 - requirements/test.txt | 54 -- setup.py | 14 - uv.lock | 622 ++++++++++++++++++ 21 files changed, 719 insertions(+), 373 deletions(-) create mode 100644 pyproject.toml delete mode 100644 requirements/base.in delete mode 100644 requirements/base.txt delete mode 100644 requirements/ci.in delete mode 100644 requirements/ci.txt delete mode 100644 requirements/common_constraints.txt delete mode 100644 requirements/constraints.txt delete mode 100644 requirements/pip.in delete mode 100644 requirements/pip.txt delete mode 100644 requirements/pip_tools.in delete mode 100644 requirements/pip_tools.txt delete mode 100644 requirements/production.in delete mode 100644 requirements/production.txt delete mode 100644 requirements/test.in delete mode 100644 requirements/test.txt delete mode 100644 setup.py create mode 100644 uv.lock diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 90bcd21..dcf2daf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,16 +16,20 @@ jobs: matrix: os: - ubuntu-latest - python-version: ['3.12'] + python-version: ['3.12', '3.13'] steps: - uses: actions/checkout@v4 - - name: setup python - uses: actions/setup-python@v5 + + - name: Install uv + uses: astral-sh/setup-uv@v4 with: - python-version: ${{ matrix.python-version }} + enable-cache: true + + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} - - name: Install requirements and Run Tests - run: make test + - name: Run Tests + run: uv run --python ${{ matrix.python-version }} pytest --cov=xqueue_watcher --cov-report=xml tests - name: Run Coverage uses: codecov/codecov-action@v4 diff --git a/.github/workflows/upgrade-python-requirements.yml b/.github/workflows/upgrade-python-requirements.yml index 0cadb2c..845c53a 100644 --- a/.github/workflows/upgrade-python-requirements.yml +++ b/.github/workflows/upgrade-python-requirements.yml @@ -1,27 +1,28 @@ -name: Upgrade Python Requirements +name: Update Dependencies on: schedule: - cron: "15 15 1/14 * *" workflow_dispatch: - inputs: - branch: - description: "Target branch against which to create requirements PR" - required: true - default: 'master' jobs: - call-upgrade-python-requirements-workflow: - uses: openedx/.github/.github/workflows/upgrade-python-requirements.yml@master - with: - branch: ${{ github.event.inputs.branch || 'master' }} - # optional parameters below; fill in if you'd like github or email notifications - # user_reviewers: "" - # team_reviewers: "" - email_address: "aurora-requirements-update@2u-internal.opsgenie.net" - send_success_notification: true - secrets: - requirements_bot_github_token: ${{ secrets.REQUIREMENTS_BOT_GITHUB_TOKEN }} - requirements_bot_github_email: ${{ secrets.REQUIREMENTS_BOT_GITHUB_EMAIL }} - edx_smtp_username: ${{ secrets.EDX_SMTP_USERNAME }} - edx_smtp_password: ${{ secrets.EDX_SMTP_PASSWORD }} + update-dependencies: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: Update uv.lock + run: uv lock --upgrade + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.REQUIREMENTS_BOT_GITHUB_TOKEN }} + commit-message: "chore: update uv.lock with latest dependency versions" + title: "chore: update dependencies" + body: "Automated dependency update via `uv lock --upgrade`." + branch: "chore/update-dependencies" + delete-branch: true diff --git a/.gitignore b/.gitignore index 9fc011c..dc97db5 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,9 @@ reports/ \#*\# *.egg-info .idea/ + +# uv +.venv/ + +# Kubernetes secrets — never commit real values +deploy/kubernetes/secret.yaml diff --git a/Makefile b/Makefile index 8a66fde..9c4e7f7 100644 --- a/Makefile +++ b/Makefile @@ -1,47 +1,29 @@ -NODE_BIN=./node_modules/.bin - help: - @echo ' ' - @echo 'Makefile for the xqueue-watcher ' - @echo ' ' - @echo 'Usage: ' - @echo ' make requirements install requirements for local development ' - @echo ' make test run python unit-tests ' - @echo ' make clean delete generated byte code and coverage reports ' - @echo ' ' - -COMMON_CONSTRAINTS_TXT=requirements/common_constraints.txt -.PHONY: $(COMMON_CONSTRAINTS_TXT) -$(COMMON_CONSTRAINTS_TXT): - wget -O "$(@)" https://raw.githubusercontent.com/edx/edx-lint/master/edx_lint/files/common_constraints.txt || touch "$(@)" - -upgrade: export CUSTOM_COMPILE_COMMAND=make upgrade -upgrade: $(COMMON_CONSTRAINTS_TXT) - ## update the requirements/*.txt files with the latest packages satisfying requirements/*.in - pip install -q -r requirements/pip_tools.txt - pip-compile --allow-unsafe --rebuild --upgrade -o requirements/pip.txt requirements/pip.in - pip-compile --upgrade -o requirements/pip_tools.txt requirements/pip_tools.in - pip install -q -r requirements/pip.txt - pip install -q -r requirements/pip_tools.txt - pip-compile --upgrade -o requirements/base.txt requirements/base.in - pip-compile --upgrade -o requirements/production.txt requirements/production.in - pip-compile --upgrade -o requirements/test.txt requirements/test.in - pip-compile --upgrade -o requirements/ci.txt requirements/ci.in +@echo '' +@echo 'Makefile for the xqueue-watcher' +@echo '' +@echo 'Usage:' +@echo ' make requirements sync dev dependencies with uv' +@echo ' make test run python unit-tests' +@echo ' make docker-build build the grader base Docker image' +@echo ' make local-run run locally with docker-compose' +@echo ' make clean delete generated byte code' +@echo '' requirements: - pip install -qr requirements/production.txt --exists-action w +uv sync -test.requirements: - pip install -q -r requirements/test.txt --exists-action w +test: requirements +uv run pytest --cov=xqueue_watcher --cov-report=xml tests -ci.requirements: - pip install -q -r requirements/ci.txt --exists-action w +docker-build: +docker build -t xqueue-watcher:local . +docker build -t grader-base:local -f grader_support/Dockerfile.base . -test: test.requirements - pytest --cov=xqueue_watcher --cov-report=xml tests +local-run: +docker compose up clean: - find . -name '*.pyc' -delete +find . -name '*.pyc' -delete -# Targets in a Makefile which do not produce an output file with the same name as the target name -.PHONY: help requirements clean +.PHONY: help requirements test docker-build local-run clean diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..c5aea90 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,40 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "xqueue-watcher" +version = "0.3" +description = "XQueue Pull Grader" +readme = "README.md" +requires-python = ">=3.11" +license = { file = "LICENSE.TXT" } +dependencies = [ + "dogstatsd-python", + "docker>=7.0.0", + "kubernetes>=29.0.0", + "path-py", + "requests", +] + +[project.optional-dependencies] +production = [ + "newrelic", +] + +[project.scripts] +xqueue-watcher = "xqueue_watcher.manager:main" + +[dependency-groups] +dev = [ + "coverage", + "mock", + "pytest-cov", +] + +[tool.uv] +package = true +default-groups = ["dev"] + +[tool.hatch.build.targets.wheel] +packages = ["xqueue_watcher", "grader_support"] diff --git a/requirements/base.in b/requirements/base.in deleted file mode 100644 index b8c4b5d..0000000 --- a/requirements/base.in +++ /dev/null @@ -1,10 +0,0 @@ -# Core requirements for using this package - --c constraints.txt - -dogstatsd-python -path.py -requests -six - --e git+https://github.com/openedx/codejail.git@4127fc4bd5775cc72aee8d7f0a70e31405e22439#egg=codejail diff --git a/requirements/base.txt b/requirements/base.txt deleted file mode 100644 index efc6c39..0000000 --- a/requirements/base.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# --e git+https://github.com/openedx/codejail.git@4127fc4bd5775cc72aee8d7f0a70e31405e22439#egg=codejail - # via -r requirements/base.in -certifi==2026.2.25 - # via requests -charset-normalizer==3.4.5 - # via requests -dogstatsd-python==0.5.6 - # via -r requirements/base.in -idna==3.11 - # via requests -path==17.1.1 - # via path-py -path-py==12.5.0 - # via -r requirements/base.in -requests==2.32.5 - # via -r requirements/base.in -six==1.17.0 - # via -r requirements/base.in -urllib3==2.6.3 - # via requests diff --git a/requirements/ci.in b/requirements/ci.in deleted file mode 100644 index 44cd1b5..0000000 --- a/requirements/ci.in +++ /dev/null @@ -1,6 +0,0 @@ -# Requirements for running tests in CI --c constraints.txt - --r test.txt - -coverage diff --git a/requirements/ci.txt b/requirements/ci.txt deleted file mode 100644 index 65193c0..0000000 --- a/requirements/ci.txt +++ /dev/null @@ -1,66 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# --e git+https://github.com/openedx/codejail.git@4127fc4bd5775cc72aee8d7f0a70e31405e22439#egg=codejail - # via -r requirements/test.txt -certifi==2026.2.25 - # via - # -r requirements/test.txt - # requests -charset-normalizer==3.4.5 - # via - # -r requirements/test.txt - # requests -coverage[toml]==7.13.4 - # via - # -r requirements/ci.in - # -r requirements/test.txt - # pytest-cov -dogstatsd-python==0.5.6 - # via -r requirements/test.txt -idna==3.11 - # via - # -r requirements/test.txt - # requests -iniconfig==2.3.0 - # via - # -r requirements/test.txt - # pytest -mock==5.2.0 - # via -r requirements/test.txt -packaging==26.0 - # via - # -r requirements/test.txt - # pytest -path==17.1.1 - # via - # -r requirements/test.txt - # path-py -path-py==12.5.0 - # via -r requirements/test.txt -pluggy==1.6.0 - # via - # -r requirements/test.txt - # pytest - # pytest-cov -pygments==2.19.2 - # via - # -r requirements/test.txt - # pytest -pytest==9.0.2 - # via - # -r requirements/test.txt - # pytest-cov -pytest-cov==7.0.0 - # via -r requirements/test.txt -requests==2.32.5 - # via -r requirements/test.txt -six==1.17.0 - # via -r requirements/test.txt -urllib3==2.6.3 - # via - # -r requirements/test.txt - # requests diff --git a/requirements/common_constraints.txt b/requirements/common_constraints.txt deleted file mode 100644 index 72cc4cc..0000000 --- a/requirements/common_constraints.txt +++ /dev/null @@ -1,20 +0,0 @@ -# A central location for most common version constraints -# (across edx repos) for pip-installation. -# -# Similar to other constraint files this file doesn't install any packages. -# It specifies version constraints that will be applied if a package is needed. -# When pinning something here, please provide an explanation of why it is a good -# idea to pin this package across all edx repos, Ideally, link to other information -# that will help people in the future to remove the pin when possible. -# Writing an issue against the offending project and linking to it here is good. -# -# Note: Changes to this file will automatically be used by other repos, referencing -# this file from Github directly. It does not require packaging in edx-lint. - -# using LTS django version -Django<6.0 - -# elasticsearch>=7.14.0 includes breaking changes in it which caused issues in discovery upgrade process. -# elastic search changelog: https://www.elastic.co/guide/en/enterprise-search/master/release-notes-7.14.0.html -# See https://github.com/openedx/edx-platform/issues/35126 for more info -elasticsearch<7.14.0 diff --git a/requirements/constraints.txt b/requirements/constraints.txt deleted file mode 100644 index 9d005a4..0000000 --- a/requirements/constraints.txt +++ /dev/null @@ -1,11 +0,0 @@ -# Version constraints for pip-installation. -# -# This file doesn't install any packages. It specifies version constraints -# that will be applied if a package is needed. -# -# When pinning something here, please provide an explanation of why. Ideally, -# link to other information that will help people in the future to remove the -# pin when possible. Writing an issue against the offending project and -# linking to it here is good. - --c common_constraints.txt \ No newline at end of file diff --git a/requirements/pip.in b/requirements/pip.in deleted file mode 100644 index 715478c..0000000 --- a/requirements/pip.in +++ /dev/null @@ -1,7 +0,0 @@ --c constraints.txt -# Core dependencies for installing other packages - -pip -setuptools -wheel - diff --git a/requirements/pip.txt b/requirements/pip.txt deleted file mode 100644 index 084d708..0000000 --- a/requirements/pip.txt +++ /dev/null @@ -1,16 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# -packaging==26.0 - # via wheel -wheel==0.46.3 - # via -r requirements/pip.in - -# The following packages are considered to be unsafe in a requirements file: -pip==26.0.1 - # via -r requirements/pip.in -setuptools==82.0.0 - # via -r requirements/pip.in diff --git a/requirements/pip_tools.in b/requirements/pip_tools.in deleted file mode 100644 index caf45a9..0000000 --- a/requirements/pip_tools.in +++ /dev/null @@ -1,4 +0,0 @@ - # Dependencies to run compile tools --c constraints.txt - -pip-tools # Contains pip-compile, used to generate pip requirements files diff --git a/requirements/pip_tools.txt b/requirements/pip_tools.txt deleted file mode 100644 index 107789a..0000000 --- a/requirements/pip_tools.txt +++ /dev/null @@ -1,26 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# -build==1.4.0 - # via pip-tools -click==8.3.1 - # via pip-tools -packaging==26.0 - # via - # build - # wheel -pip-tools==7.5.3 - # via -r requirements/pip_tools.in -pyproject-hooks==1.2.0 - # via - # build - # pip-tools -wheel==0.46.3 - # via pip-tools - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/requirements/production.in b/requirements/production.in deleted file mode 100644 index b739cac..0000000 --- a/requirements/production.in +++ /dev/null @@ -1,5 +0,0 @@ -# Production requirements for using this package - --c constraints.txt - --r base.txt diff --git a/requirements/production.txt b/requirements/production.txt deleted file mode 100644 index d60125e..0000000 --- a/requirements/production.txt +++ /dev/null @@ -1,36 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# --e git+https://github.com/openedx/codejail.git@4127fc4bd5775cc72aee8d7f0a70e31405e22439#egg=codejail - # via -r requirements/base.txt -certifi==2026.2.25 - # via - # -r requirements/base.txt - # requests -charset-normalizer==3.4.5 - # via - # -r requirements/base.txt - # requests -dogstatsd-python==0.5.6 - # via -r requirements/base.txt -idna==3.11 - # via - # -r requirements/base.txt - # requests -path==17.1.1 - # via - # -r requirements/base.txt - # path-py -path-py==12.5.0 - # via -r requirements/base.txt -requests==2.32.5 - # via -r requirements/base.txt -six==1.17.0 - # via -r requirements/base.txt -urllib3==2.6.3 - # via - # -r requirements/base.txt - # requests diff --git a/requirements/test.in b/requirements/test.in deleted file mode 100644 index a536201..0000000 --- a/requirements/test.in +++ /dev/null @@ -1,8 +0,0 @@ -# Requirements for test runs - --c constraints.txt - --r production.txt - -mock -pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt deleted file mode 100644 index 6ef7fc0..0000000 --- a/requirements/test.txt +++ /dev/null @@ -1,54 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.12 -# by the following command: -# -# make upgrade -# --e git+https://github.com/openedx/codejail.git@4127fc4bd5775cc72aee8d7f0a70e31405e22439#egg=codejail - # via -r requirements/production.txt -certifi==2026.2.25 - # via - # -r requirements/production.txt - # requests -charset-normalizer==3.4.5 - # via - # -r requirements/production.txt - # requests -coverage[toml]==7.13.4 - # via pytest-cov -dogstatsd-python==0.5.6 - # via -r requirements/production.txt -idna==3.11 - # via - # -r requirements/production.txt - # requests -iniconfig==2.3.0 - # via pytest -mock==5.2.0 - # via -r requirements/test.in -packaging==26.0 - # via pytest -path==17.1.1 - # via - # -r requirements/production.txt - # path-py -path-py==12.5.0 - # via -r requirements/production.txt -pluggy==1.6.0 - # via - # pytest - # pytest-cov -pygments==2.19.2 - # via pytest -pytest==9.0.2 - # via pytest-cov -pytest-cov==7.0.0 - # via -r requirements/test.in -requests==2.32.5 - # via -r requirements/production.txt -six==1.17.0 - # via -r requirements/production.txt -urllib3==2.6.3 - # via - # -r requirements/production.txt - # requests diff --git a/setup.py b/setup.py deleted file mode 100644 index d8ce0ec..0000000 --- a/setup.py +++ /dev/null @@ -1,14 +0,0 @@ -from setuptools import setup - - -setup( - name='xqueue_watcher', - version='1.0.0', - description='XQueue Pull Grader', - packages=[ - 'grader_support', - 'xqueue_watcher', - ], - install_requires=open('requirements/production.txt', - 'rt', encoding='utf-8').readlines(), -) diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..7604c1b --- /dev/null +++ b/uv.lock @@ -0,0 +1,622 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/9e/bcec3b22c64ecec47d39bf5167c2613efd41898c019dccd4183f6aa5d6a7/charset_normalizer-3.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:610f72c0ee565dfb8ae1241b666119582fdbfe7c0975c175be719f940e110694", size = 279531, upload-time = "2026-03-06T06:00:52.252Z" }, + { url = "https://files.pythonhosted.org/packages/58/12/81fd25f7e7078ab5d1eedbb0fac44be4904ae3370a3bf4533c8f2d159acd/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60d68e820af339df4ae8358c7a2e7596badeb61e544438e489035f9fbf3246a5", size = 188006, upload-time = "2026-03-06T06:00:53.8Z" }, + { url = "https://files.pythonhosted.org/packages/ae/6e/f2d30e8c27c1b0736a6520311982cf5286cfc7f6cac77d7bc1325e3a23f2/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b473fc8dca1c3ad8559985794815f06ca3fc71942c969129070f2c3cdf7281", size = 205085, upload-time = "2026-03-06T06:00:55.311Z" }, + { url = "https://files.pythonhosted.org/packages/d0/90/d12cefcb53b5931e2cf792a33718d7126efb116a320eaa0742c7059a95e4/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d4eb8ac7469b2a5d64b5b8c04f84d8bf3ad340f4514b98523805cbf46e3b3923", size = 200545, upload-time = "2026-03-06T06:00:56.532Z" }, + { url = "https://files.pythonhosted.org/packages/03/f4/44d3b830a20e89ff82a3134912d9a1cf6084d64f3b95dcad40f74449a654/charset_normalizer-3.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bcb3227c3d9aaf73eaaab1db7ccd80a8995c509ee9941e2aae060ca6e4e5d81", size = 193863, upload-time = "2026-03-06T06:00:57.823Z" }, + { url = "https://files.pythonhosted.org/packages/25/4b/f212119c18a6320a9d4a730d1b4057875cdeabf21b3614f76549042ef8a8/charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:75ee9c1cce2911581a70a3c0919d8bccf5b1cbc9b0e5171400ec736b4b569497", size = 181827, upload-time = "2026-03-06T06:00:59.323Z" }, + { url = "https://files.pythonhosted.org/packages/74/00/b26158e48b425a202a92965f8069e8a63d9af1481dfa206825d7f74d2a3c/charset_normalizer-3.4.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d1401945cb77787dbd3af2446ff2d75912327c4c3a1526ab7955ecf8600687c", size = 191085, upload-time = "2026-03-06T06:01:00.546Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c2/1c1737bf6fd40335fe53d28fe49afd99ee4143cc57a845e99635ce0b9b6d/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a45e504f5e1be0bd385935a8e1507c442349ca36f511a47057a71c9d1d6ea9e", size = 190688, upload-time = "2026-03-06T06:01:02.479Z" }, + { url = "https://files.pythonhosted.org/packages/5a/3d/abb5c22dc2ef493cd56522f811246a63c5427c08f3e3e50ab663de27fcf4/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e09f671a54ce70b79a1fc1dc6da3072b7ef7251fadb894ed92d9aa8218465a5f", size = 183077, upload-time = "2026-03-06T06:01:04.231Z" }, + { url = "https://files.pythonhosted.org/packages/44/33/5298ad4d419a58e25b3508e87f2758d1442ff00c2471f8e0403dab8edad5/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d01de5e768328646e6a3fa9e562706f8f6641708c115c62588aef2b941a4f88e", size = 206706, upload-time = "2026-03-06T06:01:05.773Z" }, + { url = "https://files.pythonhosted.org/packages/7b/17/51e7895ac0f87c3b91d276a449ef09f5532a7529818f59646d7a55089432/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:131716d6786ad5e3dc542f5cc6f397ba3339dc0fb87f87ac30e550e8987756af", size = 191665, upload-time = "2026-03-06T06:01:07.473Z" }, + { url = "https://files.pythonhosted.org/packages/90/8f/cce9adf1883e98906dbae380d769b4852bb0fa0004bc7d7a2243418d3ea8/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a374cc0b88aa710e8865dc1bd6edb3743c59f27830f0293ab101e4cf3ce9f85", size = 201950, upload-time = "2026-03-06T06:01:08.973Z" }, + { url = "https://files.pythonhosted.org/packages/08/ca/bce99cd5c397a52919e2769d126723f27a4c037130374c051c00470bcd38/charset_normalizer-3.4.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d31f0d1671e1534e395f9eb84a68e0fb670e1edb1fe819a9d7f564ae3bc4e53f", size = 195830, upload-time = "2026-03-06T06:01:10.155Z" }, + { url = "https://files.pythonhosted.org/packages/87/4f/2e3d023a06911f1281f97b8f036edc9872167036ca6f55cc874a0be6c12c/charset_normalizer-3.4.5-cp311-cp311-win32.whl", hash = "sha256:cace89841c0599d736d3d74a27bc5821288bb47c5441923277afc6059d7fbcb4", size = 132029, upload-time = "2026-03-06T06:01:11.706Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1f/a853b73d386521fd44b7f67ded6b17b7b2367067d9106a5c4b44f9a34274/charset_normalizer-3.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:f8102ae93c0bc863b1d41ea0f4499c20a83229f52ed870850892df555187154a", size = 142404, upload-time = "2026-03-06T06:01:12.865Z" }, + { url = "https://files.pythonhosted.org/packages/b4/10/dba36f76b71c38e9d391abe0fd8a5b818790e053c431adecfc98c35cd2a9/charset_normalizer-3.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:ed98364e1c262cf5f9363c3eca8c2df37024f52a8fa1180a3610014f26eac51c", size = 132796, upload-time = "2026-03-06T06:01:14.106Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b6/9ee9c1a608916ca5feae81a344dffbaa53b26b90be58cc2159e3332d44ec/charset_normalizer-3.4.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed97c282ee4f994ef814042423a529df9497e3c666dca19be1d4cd1129dc7ade", size = 280976, upload-time = "2026-03-06T06:01:15.276Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d8/a54f7c0b96f1df3563e9190f04daf981e365a9b397eedfdfb5dbef7e5c6c/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0294916d6ccf2d069727d65973c3a1ca477d68708db25fd758dd28b0827cff54", size = 189356, upload-time = "2026-03-06T06:01:16.511Z" }, + { url = "https://files.pythonhosted.org/packages/42/69/2bf7f76ce1446759a5787cb87d38f6a61eb47dbbdf035cfebf6347292a65/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dc57a0baa3eeedd99fafaef7511b5a6ef4581494e8168ee086031744e2679467", size = 206369, upload-time = "2026-03-06T06:01:17.853Z" }, + { url = "https://files.pythonhosted.org/packages/10/9c/949d1a46dab56b959d9a87272482195f1840b515a3380e39986989a893ae/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ed1a9a204f317ef879b32f9af507d47e49cd5e7f8e8d5d96358c98373314fc60", size = 203285, upload-time = "2026-03-06T06:01:19.473Z" }, + { url = "https://files.pythonhosted.org/packages/67/5c/ae30362a88b4da237d71ea214a8c7eb915db3eec941adda511729ac25fa2/charset_normalizer-3.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad83b8f9379176c841f8865884f3514d905bcd2a9a3b210eaa446e7d2223e4d", size = 196274, upload-time = "2026-03-06T06:01:20.728Z" }, + { url = "https://files.pythonhosted.org/packages/b2/07/c9f2cb0e46cb6d64fdcc4f95953747b843bb2181bda678dc4e699b8f0f9a/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:a118e2e0b5ae6b0120d5efa5f866e58f2bb826067a646431da4d6a2bdae7950e", size = 184715, upload-time = "2026-03-06T06:01:22.194Z" }, + { url = "https://files.pythonhosted.org/packages/36/64/6b0ca95c44fddf692cd06d642b28f63009d0ce325fad6e9b2b4d0ef86a52/charset_normalizer-3.4.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:754f96058e61a5e22e91483f823e07df16416ce76afa4ebf306f8e1d1296d43f", size = 193426, upload-time = "2026-03-06T06:01:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/50/bc/a730690d726403743795ca3f5bb2baf67838c5fea78236098f324b965e40/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0c300cefd9b0970381a46394902cd18eaf2aa00163f999590ace991989dcd0fc", size = 191780, upload-time = "2026-03-06T06:01:25.053Z" }, + { url = "https://files.pythonhosted.org/packages/97/4f/6c0bc9af68222b22951552d73df4532b5be6447cee32d58e7e8c74ecbb7b/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c108f8619e504140569ee7de3f97d234f0fbae338a7f9f360455071ef9855a95", size = 185805, upload-time = "2026-03-06T06:01:26.294Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b9/a523fb9b0ee90814b503452b2600e4cbc118cd68714d57041564886e7325/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d1028de43596a315e2720a9849ee79007ab742c06ad8b45a50db8cdb7ed4a82a", size = 208342, upload-time = "2026-03-06T06:01:27.55Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/c59e761dee4464050713e50e27b58266cc8e209e518c0b378c1580c959ba/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:19092dde50335accf365cce21998a1c6dd8eafd42c7b226eb54b2747cdce2fac", size = 193661, upload-time = "2026-03-06T06:01:29.051Z" }, + { url = "https://files.pythonhosted.org/packages/1c/43/729fa30aad69783f755c5ad8649da17ee095311ca42024742701e202dc59/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4354e401eb6dab9aed3c7b4030514328a6c748d05e1c3e19175008ca7de84fb1", size = 204819, upload-time = "2026-03-06T06:01:30.298Z" }, + { url = "https://files.pythonhosted.org/packages/87/33/d9b442ce5a91b96fc0840455a9e49a611bbadae6122778d0a6a79683dd31/charset_normalizer-3.4.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a68766a3c58fde7f9aaa22b3786276f62ab2f594efb02d0a1421b6282e852e98", size = 198080, upload-time = "2026-03-06T06:01:31.478Z" }, + { url = "https://files.pythonhosted.org/packages/56/5a/b8b5a23134978ee9885cee2d6995f4c27cc41f9baded0a9685eabc5338f0/charset_normalizer-3.4.5-cp312-cp312-win32.whl", hash = "sha256:1827734a5b308b65ac54e86a618de66f935a4f63a8a462ff1e19a6788d6c2262", size = 132630, upload-time = "2026-03-06T06:01:33.056Z" }, + { url = "https://files.pythonhosted.org/packages/70/53/e44a4c07e8904500aec95865dc3f6464dc3586a039ef0df606eb3ac38e35/charset_normalizer-3.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:728c6a963dfab66ef865f49286e45239384249672cd598576765acc2a640a636", size = 142856, upload-time = "2026-03-06T06:01:34.489Z" }, + { url = "https://files.pythonhosted.org/packages/ea/aa/c5628f7cad591b1cf45790b7a61483c3e36cf41349c98af7813c483fd6e8/charset_normalizer-3.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:75dfd1afe0b1647449e852f4fb428195a7ed0588947218f7ba929f6538487f02", size = 132982, upload-time = "2026-03-06T06:01:35.641Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/9f34ec4bb24aa3fdba1890c1bddb97c8a4be1bd84ef5c42ac2352563ad05/charset_normalizer-3.4.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ac59c15e3f1465f722607800c68713f9fbc2f672b9eb649fe831da4019ae9b23", size = 280788, upload-time = "2026-03-06T06:01:37.126Z" }, + { url = "https://files.pythonhosted.org/packages/0e/09/6003e7ffeb90cc0560da893e3208396a44c210c5ee42efff539639def59b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165c7b21d19365464e8f70e5ce5e12524c58b48c78c1f5a57524603c1ab003f8", size = 188890, upload-time = "2026-03-06T06:01:38.73Z" }, + { url = "https://files.pythonhosted.org/packages/42/1e/02706edf19e390680daa694d17e2b8eab4b5f7ac285e2a51168b4b22ee6b/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:28269983f25a4da0425743d0d257a2d6921ea7d9b83599d4039486ec5b9f911d", size = 206136, upload-time = "2026-03-06T06:01:40.016Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/942c3def1b37baf3cf786bad01249190f3ca3d5e63a84f831e704977de1f/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d27ce22ec453564770d29d03a9506d449efbb9fa13c00842262b2f6801c48cce", size = 202551, upload-time = "2026-03-06T06:01:41.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/0a/af49691938dfe175d71b8a929bd7e4ace2809c0c5134e28bc535660d5262/charset_normalizer-3.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0625665e4ebdddb553ab185de5db7054393af8879fb0c87bd5690d14379d6819", size = 195572, upload-time = "2026-03-06T06:01:43.208Z" }, + { url = "https://files.pythonhosted.org/packages/20/ea/dfb1792a8050a8e694cfbde1570ff97ff74e48afd874152d38163d1df9ae/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:c23eb3263356d94858655b3e63f85ac5d50970c6e8febcdde7830209139cc37d", size = 184438, upload-time = "2026-03-06T06:01:44.755Z" }, + { url = "https://files.pythonhosted.org/packages/72/12/c281e2067466e3ddd0595bfaea58a6946765ace5c72dfa3edc2f5f118026/charset_normalizer-3.4.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e6302ca4ae283deb0af68d2fbf467474b8b6aedcd3dab4db187e07f94c109763", size = 193035, upload-time = "2026-03-06T06:01:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4f/3792c056e7708e10464bad0438a44708886fb8f92e3c3d29ec5e2d964d42/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e51ae7d81c825761d941962450f50d041db028b7278e7b08930b4541b3e45cb9", size = 191340, upload-time = "2026-03-06T06:01:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/e7/86/80ddba897127b5c7a9bccc481b0cd36c8fefa485d113262f0fe4332f0bf4/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:597d10dec876923e5c59e48dbd366e852eacb2b806029491d307daea6b917d7c", size = 185464, upload-time = "2026-03-06T06:01:48.764Z" }, + { url = "https://files.pythonhosted.org/packages/4d/00/b5eff85ba198faacab83e0e4b6f0648155f072278e3b392a82478f8b988b/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5cffde4032a197bd3b42fd0b9509ec60fb70918d6970e4cc773f20fc9180ca67", size = 208014, upload-time = "2026-03-06T06:01:50.371Z" }, + { url = "https://files.pythonhosted.org/packages/c8/11/d36f70be01597fd30850dde8a1269ebc8efadd23ba5785808454f2389bde/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2da4eedcb6338e2321e831a0165759c0c620e37f8cd044a263ff67493be8ffb3", size = 193297, upload-time = "2026-03-06T06:01:51.933Z" }, + { url = "https://files.pythonhosted.org/packages/1a/1d/259eb0a53d4910536c7c2abb9cb25f4153548efb42800c6a9456764649c0/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:65a126fb4b070d05340a84fc709dd9e7c75d9b063b610ece8a60197a291d0adf", size = 204321, upload-time = "2026-03-06T06:01:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/84/31/faa6c5b9d3688715e1ed1bb9d124c384fe2fc1633a409e503ffe1c6398c1/charset_normalizer-3.4.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7a80a9242963416bd81f99349d5f3fce1843c303bd404f204918b6d75a75fd6", size = 197509, upload-time = "2026-03-06T06:01:56.439Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a5/c7d9dd1503ffc08950b3260f5d39ec2366dd08254f0900ecbcf3a6197c7c/charset_normalizer-3.4.5-cp313-cp313-win32.whl", hash = "sha256:f1d725b754e967e648046f00c4facc42d414840f5ccc670c5670f59f83693e4f", size = 132284, upload-time = "2026-03-06T06:01:57.812Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0f/57072b253af40c8aa6636e6de7d75985624c1eb392815b2f934199340a89/charset_normalizer-3.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:e37bd100d2c5d3ba35db9c7c5ba5a9228cbcffe5c4778dc824b164e5257813d7", size = 142630, upload-time = "2026-03-06T06:01:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/31/41/1c4b7cc9f13bd9d369ce3bc993e13d374ce25fa38a2663644283ecf422c1/charset_normalizer-3.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:93b3b2cc5cf1b8743660ce77a4f45f3f6d1172068207c1defc779a36eea6bb36", size = 133254, upload-time = "2026-03-06T06:02:00.281Z" }, + { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, + { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, + { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, + { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, + { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, + { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, + { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, + { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, + { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, + { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, + { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, + { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, + { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, + { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, + { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, + { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, + { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, + { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, + { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, + { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, + { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, + { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, + { url = "https://files.pythonhosted.org/packages/db/23/aad45061a31677d68e47499197a131eea55da4875d16c1f42021ab963503/coverage-7.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b66a2da594b6068b48b2692f043f35d4d3693fb639d5ea8b39533c2ad9ac3ab9", size = 219474, upload-time = "2026-02-09T12:57:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/a5/70/9b8b67a0945f3dfec1fd896c5cefb7c19d5a3a6d74630b99a895170999ae/coverage-7.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3599eb3992d814d23b35c536c28df1a882caa950f8f507cef23d1cbf334995ac", size = 219844, upload-time = "2026-02-09T12:57:20.66Z" }, + { url = "https://files.pythonhosted.org/packages/97/fd/7e859f8fab324cef6c4ad7cff156ca7c489fef9179d5749b0c8d321281c2/coverage-7.13.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:93550784d9281e374fb5a12bf1324cc8a963fd63b2d2f223503ef0fd4aa339ea", size = 250832, upload-time = "2026-02-09T12:57:22.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/dc/b2442d10020c2f52617828862d8b6ee337859cd8f3a1f13d607dddda9cf7/coverage-7.13.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b720ce6a88a2755f7c697c23268ddc47a571b88052e6b155224347389fdf6a3b", size = 253434, upload-time = "2026-02-09T12:57:23.339Z" }, + { url = "https://files.pythonhosted.org/packages/5a/88/6728a7ad17428b18d836540630487231f5470fb82454871149502f5e5aa2/coverage-7.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b322db1284a2ed3aa28ffd8ebe3db91c929b7a333c0820abec3d838ef5b3525", size = 254676, upload-time = "2026-02-09T12:57:24.774Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bc/21244b1b8cedf0dff0a2b53b208015fe798d5f2a8d5348dbfece04224fff/coverage-7.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4594c67d8a7c89cf922d9df0438c7c7bb022ad506eddb0fdb2863359ff78242", size = 256807, upload-time = "2026-02-09T12:57:26.125Z" }, + { url = "https://files.pythonhosted.org/packages/97/a0/ddba7ed3251cff51006737a727d84e05b61517d1784a9988a846ba508877/coverage-7.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:53d133df809c743eb8bce33b24bcababb371f4441340578cd406e084d94a6148", size = 251058, upload-time = "2026-02-09T12:57:27.614Z" }, + { url = "https://files.pythonhosted.org/packages/9b/55/e289addf7ff54d3a540526f33751951bf0878f3809b47f6dfb3def69c6f7/coverage-7.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76451d1978b95ba6507a039090ba076105c87cc76fc3efd5d35d72093964d49a", size = 252805, upload-time = "2026-02-09T12:57:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/13/4e/cc276b1fa4a59be56d96f1dabddbdc30f4ba22e3b1cd42504c37b3313255/coverage-7.13.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f57b33491e281e962021de110b451ab8a24182589be17e12a22c79047935e23", size = 250766, upload-time = "2026-02-09T12:57:30.522Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/1093b8f93018f8b41a8cf29636c9292502f05e4a113d4d107d14a3acd044/coverage-7.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1731dc33dc276dafc410a885cbf5992f1ff171393e48a21453b78727d090de80", size = 254923, upload-time = "2026-02-09T12:57:31.946Z" }, + { url = "https://files.pythonhosted.org/packages/8b/55/ea2796da2d42257f37dbea1aab239ba9263b31bd91d5527cdd6db5efe174/coverage-7.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:bd60d4fe2f6fa7dff9223ca1bbc9f05d2b6697bc5961072e5d3b952d46e1b1ea", size = 250591, upload-time = "2026-02-09T12:57:33.842Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/7c4bb72aacf8af5020675aa633e59c1fbe296d22aed191b6a5b711eb2bc7/coverage-7.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9181a3ccead280b828fae232df12b16652702b49d41e99d657f46cc7b1f6ec7a", size = 252364, upload-time = "2026-02-09T12:57:35.743Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/a8d2ec0146479c20bbaa7181b5b455a0c41101eed57f10dd19a78ab44c80/coverage-7.13.4-cp313-cp313-win32.whl", hash = "sha256:f53d492307962561ac7de4cd1de3e363589b000ab69617c6156a16ba7237998d", size = 222010, upload-time = "2026-02-09T12:57:37.25Z" }, + { url = "https://files.pythonhosted.org/packages/e2/0c/dbfafbe90a185943dcfbc766fe0e1909f658811492d79b741523a414a6cc/coverage-7.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:e6f70dec1cc557e52df5306d051ef56003f74d56e9c4dd7ddb07e07ef32a84dd", size = 222818, upload-time = "2026-02-09T12:57:38.734Z" }, + { url = "https://files.pythonhosted.org/packages/04/d1/934918a138c932c90d78301f45f677fb05c39a3112b96fd2c8e60503cdc7/coverage-7.13.4-cp313-cp313-win_arm64.whl", hash = "sha256:fb07dc5da7e849e2ad31a5d74e9bece81f30ecf5a42909d0a695f8bd1874d6af", size = 221438, upload-time = "2026-02-09T12:57:40.223Z" }, + { url = "https://files.pythonhosted.org/packages/52/57/ee93ced533bcb3e6df961c0c6e42da2fc6addae53fb95b94a89b1e33ebd7/coverage-7.13.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40d74da8e6c4b9ac18b15331c4b5ebc35a17069410cad462ad4f40dcd2d50c0d", size = 220165, upload-time = "2026-02-09T12:57:41.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/969fc285a6fbdda49d91af278488d904dcd7651b2693872f0ff94e40e84a/coverage-7.13.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4223b4230a376138939a9173f1bdd6521994f2aff8047fae100d6d94d50c5a12", size = 220516, upload-time = "2026-02-09T12:57:44.215Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b8/9531944e16267e2735a30a9641ff49671f07e8138ecf1ca13db9fd2560c7/coverage-7.13.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1d4be36a5114c499f9f1f9195e95ebf979460dbe2d88e6816ea202010ba1c34b", size = 261804, upload-time = "2026-02-09T12:57:45.989Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f3/e63df6d500314a2a60390d1989240d5f27318a7a68fa30ad3806e2a9323e/coverage-7.13.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:200dea7d1e8095cc6e98cdabe3fd1d21ab17d3cee6dab00cadbb2fe35d9c15b9", size = 263885, upload-time = "2026-02-09T12:57:47.42Z" }, + { url = "https://files.pythonhosted.org/packages/f3/67/7654810de580e14b37670b60a09c599fa348e48312db5b216d730857ffe6/coverage-7.13.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8eb931ee8e6d8243e253e5ed7336deea6904369d2fd8ae6e43f68abbf167092", size = 266308, upload-time = "2026-02-09T12:57:49.345Z" }, + { url = "https://files.pythonhosted.org/packages/37/6f/39d41eca0eab3cc82115953ad41c4e77935286c930e8fad15eaed1389d83/coverage-7.13.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:75eab1ebe4f2f64d9509b984f9314d4aa788540368218b858dad56dc8f3e5eb9", size = 267452, upload-time = "2026-02-09T12:57:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/50/6d/39c0fbb8fc5cd4d2090811e553c2108cf5112e882f82505ee7495349a6bf/coverage-7.13.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c35eb28c1d085eb7d8c9b3296567a1bebe03ce72962e932431b9a61f28facf26", size = 261057, upload-time = "2026-02-09T12:57:52.447Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/60010c669df5fa603bb5a97fb75407e191a846510da70ac657eb696b7fce/coverage-7.13.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb88b316ec33760714a4720feb2816a3a59180fd58c1985012054fa7aebee4c2", size = 263875, upload-time = "2026-02-09T12:57:53.938Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/63b22a6bdbd17f1f96e9ed58604c2a6b0e72a9133e37d663bef185877cf6/coverage-7.13.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d41eead3cc673cbd38a4417deb7fd0b4ca26954ff7dc6078e33f6ff97bed940", size = 261500, upload-time = "2026-02-09T12:57:56.012Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/69f86ba1ad85bc3ad240e4c0e57a2e620fbc0e1645a47b5c62f0e941ad7f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:fb26a934946a6afe0e326aebe0730cdff393a8bc0bbb65a2f41e30feddca399c", size = 265212, upload-time = "2026-02-09T12:57:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/5f65a278a8c2148731831574c73e42f57204243d33bedaaf18fa79c5958f/coverage-7.13.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:dae88bc0fc77edaa65c14be099bd57ee140cf507e6bfdeea7938457ab387efb0", size = 260398, upload-time = "2026-02-09T12:57:59.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/80/6e8280a350ee9fea92f14b8357448a242dcaa243cb2c72ab0ca591f66c8c/coverage-7.13.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:845f352911777a8e722bfce168958214951e07e47e5d5d9744109fa5fe77f79b", size = 262584, upload-time = "2026-02-09T12:58:01.129Z" }, + { url = "https://files.pythonhosted.org/packages/22/63/01ff182fc95f260b539590fb12c11ad3e21332c15f9799cb5e2386f71d9f/coverage-7.13.4-cp313-cp313t-win32.whl", hash = "sha256:2fa8d5f8de70688a28240de9e139fa16b153cc3cbb01c5f16d88d6505ebdadf9", size = 222688, upload-time = "2026-02-09T12:58:02.736Z" }, + { url = "https://files.pythonhosted.org/packages/a9/43/89de4ef5d3cd53b886afa114065f7e9d3707bdb3e5efae13535b46ae483d/coverage-7.13.4-cp313-cp313t-win_amd64.whl", hash = "sha256:9351229c8c8407645840edcc277f4a2d44814d1bc34a2128c11c2a031d45a5dd", size = 223746, upload-time = "2026-02-09T12:58:05.362Z" }, + { url = "https://files.pythonhosted.org/packages/35/39/7cf0aa9a10d470a5309b38b289b9bb07ddeac5d61af9b664fe9775a4cb3e/coverage-7.13.4-cp313-cp313t-win_arm64.whl", hash = "sha256:30b8d0512f2dc8c8747557e8fb459d6176a2c9e5731e2b74d311c03b78451997", size = 222003, upload-time = "2026-02-09T12:58:06.952Z" }, + { url = "https://files.pythonhosted.org/packages/92/11/a9cf762bb83386467737d32187756a42094927150c3e107df4cb078e8590/coverage-7.13.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:300deaee342f90696ed186e3a00c71b5b3d27bffe9e827677954f4ee56969601", size = 219522, upload-time = "2026-02-09T12:58:08.623Z" }, + { url = "https://files.pythonhosted.org/packages/d3/28/56e6d892b7b052236d67c95f1936b6a7cf7c3e2634bf27610b8cbd7f9c60/coverage-7.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29e3220258d682b6226a9b0925bc563ed9a1ebcff3cad30f043eceea7eaf2689", size = 219855, upload-time = "2026-02-09T12:58:10.176Z" }, + { url = "https://files.pythonhosted.org/packages/e5/69/233459ee9eb0c0d10fcc2fe425a029b3fa5ce0f040c966ebce851d030c70/coverage-7.13.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:391ee8f19bef69210978363ca930f7328081c6a0152f1166c91f0b5fdd2a773c", size = 250887, upload-time = "2026-02-09T12:58:12.503Z" }, + { url = "https://files.pythonhosted.org/packages/06/90/2cdab0974b9b5bbc1623f7876b73603aecac11b8d95b85b5b86b32de5eab/coverage-7.13.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0dd7ab8278f0d58a0128ba2fca25824321f05d059c1441800e934ff2efa52129", size = 253396, upload-time = "2026-02-09T12:58:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/ac/15/ea4da0f85bf7d7b27635039e649e99deb8173fe551096ea15017f7053537/coverage-7.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78cdf0d578b15148b009ccf18c686aa4f719d887e76e6b40c38ffb61d264a552", size = 254745, upload-time = "2026-02-09T12:58:16.162Z" }, + { url = "https://files.pythonhosted.org/packages/99/11/bb356e86920c655ca4d61daee4e2bbc7258f0a37de0be32d233b561134ff/coverage-7.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:48685fee12c2eb3b27c62f2658e7ea21e9c3239cba5a8a242801a0a3f6a8c62a", size = 257055, upload-time = "2026-02-09T12:58:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0f/9ae1f8cb17029e09da06ca4e28c9e1d5c1c0a511c7074592e37e0836c915/coverage-7.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4e83efc079eb39480e6346a15a1bcb3e9b04759c5202d157e1dd4303cd619356", size = 250911, upload-time = "2026-02-09T12:58:19.495Z" }, + { url = "https://files.pythonhosted.org/packages/89/3a/adfb68558fa815cbc29747b553bc833d2150228f251b127f1ce97e48547c/coverage-7.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecae9737b72408d6a950f7e525f30aca12d4bd8dd95e37342e5beb3a2a8c4f71", size = 252754, upload-time = "2026-02-09T12:58:21.064Z" }, + { url = "https://files.pythonhosted.org/packages/32/b1/540d0c27c4e748bd3cd0bd001076ee416eda993c2bae47a73b7cc9357931/coverage-7.13.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ae4578f8528569d3cf303fef2ea569c7f4c4059a38c8667ccef15c6e1f118aa5", size = 250720, upload-time = "2026-02-09T12:58:22.622Z" }, + { url = "https://files.pythonhosted.org/packages/c7/95/383609462b3ffb1fe133014a7c84fc0dd01ed55ac6140fa1093b5af7ebb1/coverage-7.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6fdef321fdfbb30a197efa02d48fcd9981f0d8ad2ae8903ac318adc653f5df98", size = 254994, upload-time = "2026-02-09T12:58:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ba/1761138e86c81680bfc3c49579d66312865457f9fe405b033184e5793cb3/coverage-7.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b0f6ccf3dbe577170bebfce1318707d0e8c3650003cb4b3a9dd744575daa8b5", size = 250531, upload-time = "2026-02-09T12:58:26.271Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8e/05900df797a9c11837ab59c4d6fe94094e029582aab75c3309a93e6fb4e3/coverage-7.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75fcd519f2a5765db3f0e391eb3b7d150cce1a771bf4c9f861aeab86c767a3c0", size = 252189, upload-time = "2026-02-09T12:58:27.807Z" }, + { url = "https://files.pythonhosted.org/packages/00/bd/29c9f2db9ea4ed2738b8a9508c35626eb205d51af4ab7bf56a21a2e49926/coverage-7.13.4-cp314-cp314-win32.whl", hash = "sha256:8e798c266c378da2bd819b0677df41ab46d78065fb2a399558f3f6cae78b2fbb", size = 222258, upload-time = "2026-02-09T12:58:29.441Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4d/1f8e723f6829977410efeb88f73673d794075091c8c7c18848d273dc9d73/coverage-7.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:245e37f664d89861cf2329c9afa2c1fe9e6d4e1a09d872c947e70718aeeac505", size = 223073, upload-time = "2026-02-09T12:58:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/51/5b/84100025be913b44e082ea32abcf1afbf4e872f5120b7a1cab1d331b1e13/coverage-7.13.4-cp314-cp314-win_arm64.whl", hash = "sha256:ad27098a189e5838900ce4c2a99f2fe42a0bf0c2093c17c69b45a71579e8d4a2", size = 221638, upload-time = "2026-02-09T12:58:32.599Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e4/c884a405d6ead1370433dad1e3720216b4f9fd8ef5b64bfd984a2a60a11a/coverage-7.13.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:85480adfb35ffc32d40918aad81b89c69c9cc5661a9b8a81476d3e645321a056", size = 220246, upload-time = "2026-02-09T12:58:34.181Z" }, + { url = "https://files.pythonhosted.org/packages/81/5c/4d7ed8b23b233b0fffbc9dfec53c232be2e695468523242ea9fd30f97ad2/coverage-7.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79be69cf7f3bf9b0deeeb062eab7ac7f36cd4cc4c4dd694bd28921ba4d8596cc", size = 220514, upload-time = "2026-02-09T12:58:35.704Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6f/3284d4203fd2f28edd73034968398cd2d4cb04ab192abc8cff007ea35679/coverage-7.13.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:caa421e2684e382c5d8973ac55e4f36bed6821a9bad5c953494de960c74595c9", size = 261877, upload-time = "2026-02-09T12:58:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/09/aa/b672a647bbe1556a85337dc95bfd40d146e9965ead9cc2fe81bde1e5cbce/coverage-7.13.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:14375934243ee05f56c45393fe2ce81fe5cc503c07cee2bdf1725fb8bef3ffaf", size = 264004, upload-time = "2026-02-09T12:58:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/a1/aa384dbe9181f98bba87dd23dda436f0c6cf2e148aecbb4e50fc51c1a656/coverage-7.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25a41c3104d08edb094d9db0d905ca54d0cd41c928bb6be3c4c799a54753af55", size = 266408, upload-time = "2026-02-09T12:58:41.852Z" }, + { url = "https://files.pythonhosted.org/packages/53/5e/5150bf17b4019bc600799f376bb9606941e55bd5a775dc1e096b6ffea952/coverage-7.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f01afcff62bf9a08fb32b2c1d6e924236c0383c02c790732b6537269e466a72", size = 267544, upload-time = "2026-02-09T12:58:44.093Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/f1de5c675987a4a7a672250d2c5c9d73d289dbf13410f00ed7181d8017dd/coverage-7.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eb9078108fbf0bcdde37c3f4779303673c2fa1fe8f7956e68d447d0dd426d38a", size = 260980, upload-time = "2026-02-09T12:58:45.721Z" }, + { url = "https://files.pythonhosted.org/packages/b3/e3/fe758d01850aa172419a6743fe76ba8b92c29d181d4f676ffe2dae2ba631/coverage-7.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e086334e8537ddd17e5f16a344777c1ab8194986ec533711cbe6c41cde841b6", size = 263871, upload-time = "2026-02-09T12:58:47.334Z" }, + { url = "https://files.pythonhosted.org/packages/b6/76/b829869d464115e22499541def9796b25312b8cf235d3bb00b39f1675395/coverage-7.13.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:725d985c5ab621268b2edb8e50dfe57633dc69bda071abc470fed55a14935fd3", size = 261472, upload-time = "2026-02-09T12:58:48.995Z" }, + { url = "https://files.pythonhosted.org/packages/14/9e/caedb1679e73e2f6ad240173f55218488bfe043e38da577c4ec977489915/coverage-7.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3c06f0f1337c667b971ca2f975523347e63ec5e500b9aa5882d91931cd3ef750", size = 265210, upload-time = "2026-02-09T12:58:51.178Z" }, + { url = "https://files.pythonhosted.org/packages/3a/10/0dd02cb009b16ede425b49ec344aba13a6ae1dc39600840ea6abcb085ac4/coverage-7.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:590c0ed4bf8e85f745e6b805b2e1c457b2e33d5255dd9729743165253bc9ad39", size = 260319, upload-time = "2026-02-09T12:58:53.081Z" }, + { url = "https://files.pythonhosted.org/packages/92/8e/234d2c927af27c6d7a5ffad5bd2cf31634c46a477b4c7adfbfa66baf7ebb/coverage-7.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:eb30bf180de3f632cd043322dad5751390e5385108b2807368997d1a92a509d0", size = 262638, upload-time = "2026-02-09T12:58:55.258Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/e5547c8ff6964e5965c35a480855911b61509cce544f4d442caa759a0702/coverage-7.13.4-cp314-cp314t-win32.whl", hash = "sha256:c4240e7eded42d131a2d2c4dec70374b781b043ddc79a9de4d55ca71f8e98aea", size = 223040, upload-time = "2026-02-09T12:58:56.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/96/38086d58a181aac86d503dfa9c47eb20715a79c3e3acbdf786e92e5c09a8/coverage-7.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:4c7d3cc01e7350f2f0f6f7036caaf5673fb56b6998889ccfe9e1c1fe75a9c932", size = 224148, upload-time = "2026-02-09T12:58:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/ce/72/8d10abd3740a0beb98c305e0c3faf454366221c0f37a8bcf8f60020bb65a/coverage-7.13.4-cp314-cp314t-win_arm64.whl", hash = "sha256:23e3f687cf945070d1c90f85db66d11e3025665d8dafa831301a0e0038f3db9b", size = 222172, upload-time = "2026-02-09T12:59:00.396Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + +[[package]] +name = "dogstatsd-python" +version = "0.5.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/1e/5214d0636f0c3ac0647c70b7c47b96a8b8c5e0202726b0a6cf332c8492ed/dogstatsd-python-0.5.6.tar.gz", hash = "sha256:e2ff2b817995d2ce01bc3d66572be0608b9371f29aae16aa93c0d6a3a58489de", size = 3272, upload-time = "2015-03-20T14:15:50.887Z" } + +[[package]] +name = "durationpy" +version = "0.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/a4/e44218c2b394e31a6dd0d6b095c4e1f32d0be54c2a4b250032d717647bab/durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba", size = 3335, upload-time = "2025-05-17T13:52:37.26Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "kubernetes" +version = "35.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "durationpy" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "requests-oauthlib" }, + { name = "six" }, + { name = "urllib3" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/8f/85bf51ad4150f64e8c665daf0d9dfe9787ae92005efb9a4d1cba592bd79d/kubernetes-35.0.0.tar.gz", hash = "sha256:3d00d344944239821458b9efd484d6df9f011da367ecb155dadf9513f05f09ee", size = 1094642, upload-time = "2026-01-16T01:05:27.76Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/70/05b685ea2dffcb2adbf3cdcea5d8865b7bc66f67249084cf845012a0ff13/kubernetes-35.0.0-py2.py3-none-any.whl", hash = "sha256:39e2b33b46e5834ef6c3985ebfe2047ab39135d41de51ce7641a7ca5b372a13d", size = 2017602, upload-time = "2026-01-16T01:05:25.991Z" }, +] + +[[package]] +name = "mock" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/8c/14c2ae915e5f9dca5a22edd68b35be94400719ccfa068a03e0fb63d0f6f6/mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0", size = 92796, upload-time = "2025-03-03T12:31:42.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/d9/617e6af809bf3a1d468e0d58c3997b1dc219a9a9202e650d30c2fc85d481/mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f", size = 31617, upload-time = "2025-03-03T12:31:41.518Z" }, +] + +[[package]] +name = "newrelic" +version = "11.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/09/7031e28416bd79111e4dc2986f54bbfdbd707bf712210388ff649a2528f0/newrelic-11.5.0.tar.gz", hash = "sha256:39607be7a43d1bff59119039b0e9c145bf27debbc2075ec24549817a8faf5a5c", size = 1415240, upload-time = "2026-02-20T00:17:29.971Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/14/19628204ea57072ee70e1aab87896058b731ad32aa2c62d0b3f5257417d5/newrelic-11.5.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3adbe17625cb62495b108b3ed9ecb3f2ed248e024c1f07e6fce8dd86d8394d1", size = 921510, upload-time = "2026-02-20T00:16:43.125Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b7/ae54f75597b9c61fcde31745ca007f9a157db9ecef416451193bad323fe1/newrelic-11.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de40cb14a0dea7c29d9d0155c2a3aebd56c2fa12c9066692bf38a8df90f3d6a6", size = 923513, upload-time = "2026-02-20T00:16:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/ca/93/b4d424bbc65cd208f0cc8aa468b1b95b718bf00c6880f9f5ea8d2b90f488/newrelic-11.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6616b785a2deccb74ae7313799dcba4929ee9c707fbf1f15bc7ef0364facf7f4", size = 921816, upload-time = "2026-02-20T00:16:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/af/ba/a2d09b8dc0029626155d671ae8c78e0b29461a9af4cd5103ee7d4c99a484/newrelic-11.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:07b0dce0d4d55679a80f617e03d8292b1dc1e8b4391f5b76cf13dc0768ed5eeb", size = 921002, upload-time = "2026-02-20T00:16:49.185Z" }, + { url = "https://files.pythonhosted.org/packages/dd/1e/12979ec4cfc91ab44b27a7038b95d97f54ec47d9ecd1fc4518ded4c9a5cd/newrelic-11.5.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6d47ba87b86d49b977e96add31c0d41fa2cf7044622e49080d5fb1b0a5715799", size = 928435, upload-time = "2026-02-20T00:16:51.163Z" }, + { url = "https://files.pythonhosted.org/packages/0e/34/502ee15f0357f01a9430a371f5b01aec7f0be2038e94f46f650c9481c703/newrelic-11.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:710bc92d0a74ca429b19d39d04b3e619f789547f25f2c160da2844de8d5cc688", size = 929082, upload-time = "2026-02-20T00:16:53.027Z" }, + { url = "https://files.pythonhosted.org/packages/86/20/d5af4fec5c812d87965bcc538efb559a48d026799d535d8908166a53cde7/newrelic-11.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:445293bd72050f04eb15385327903a2d2e339e42b1d284c850cf0686c37eec4a", size = 927249, upload-time = "2026-02-20T00:16:54.973Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ce/6c4a535344a3675deea29cf8b9e12ae75cf74377976cdea44fdf6f650072/newrelic-11.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0c48016cb9ed11e5dbf9ffd41ed4b2e0e3223a94677574ab9f892d2c2010f181", size = 927830, upload-time = "2026-02-20T00:16:56.963Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ff/efa664e6328127b64e1a225d5fc7680208db5cea9154d33e638759c029d9/newrelic-11.5.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0dd6bc4518c40dd93b00d39fbb6fbe73db0baa7bc78c5ffab8fac645467f818", size = 928545, upload-time = "2026-02-20T00:16:59.049Z" }, + { url = "https://files.pythonhosted.org/packages/52/a6/040d2b15c1f93a6ca97507042cbe533c7493898b94d2ea0444de273ffe58/newrelic-11.5.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2ad2f2d5edb209c64ad68055ac41ae5e40a644a29729eb9fb3f564b8ee07974", size = 929206, upload-time = "2026-02-20T00:17:00.679Z" }, + { url = "https://files.pythonhosted.org/packages/09/f9/bcb9392e8ed966e907e7445d65316503c6b5948f135a3890164e91435bc5/newrelic-11.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:884c789ab4850dfae8d6cfc9441ff4ab0b82636a6e6564d04a30392ae6c23bdc", size = 927368, upload-time = "2026-02-20T00:17:02.997Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8c/97c2bdc7d45161d601f839c737c0dd1b53351e3819e35b6c29945444c231/newrelic-11.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc8f15350f75eed04b64a4258038d7b33fdc568e3c4ef2dad4088302d20126ff", size = 927956, upload-time = "2026-02-20T00:17:04.655Z" }, + { url = "https://files.pythonhosted.org/packages/59/23/d626af55d35e46f822da657f5a0f117b4947b1e36b90371352d7e9b3c304/newrelic-11.5.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:051227445ab789128a490eb9cb3fe50af271488c9f15012e3eb937c46c4c5a1c", size = 927552, upload-time = "2026-02-20T00:17:06.283Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/2bb56313a48ce69ddbdca25cf49ea4deca76bd9922e30f2b07bf0279861c/newrelic-11.5.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e7085cbe9e2fd2c98fbcd67e82b245a9924522373033d80a712bb68b534fffa8", size = 928985, upload-time = "2026-02-20T00:17:08.846Z" }, + { url = "https://files.pythonhosted.org/packages/07/a6/0e5843797442620b8874ec91f5c242dddb49361c3edbf3b9879c683d8e48/newrelic-11.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:799c7c7b1cfa0e7a69982ef4f356eba6063f234d49c2f66e01205220768b89c8", size = 927158, upload-time = "2026-02-20T00:17:10.52Z" }, + { url = "https://files.pythonhosted.org/packages/e7/60/5727f3a6c64374e39f78ef1434fbb77a032c7a2d6d570c4fb9c81154f823/newrelic-11.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c2d61a523a5fcdaee58c547081b81d61ae613dd64a1c4c781c715c6e4bf5a54a", size = 927059, upload-time = "2026-02-20T00:17:12.439Z" }, +] + +[[package]] +name = "oauthlib" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "path" +version = "17.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/52/a7bdd5ef8488977d354b7915d1e75009bebbd04f73eff14e52372d5e9435/path-17.1.1.tar.gz", hash = "sha256:2dfcbfec8b4d960f3469c52acf133113c2a8bf12ac7b98d629fa91af87248d42", size = 50528, upload-time = "2025-07-27T20:40:23.79Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/50/11c9ee1ede64b45d687fd36eb8768dafc57afc78b4d83396920cfd69ed30/path-17.1.1-py3-none-any.whl", hash = "sha256:ec7e136df29172e5030dd07e037d55f676bdb29d15bfa09b80da29d07d3b9303", size = 23936, upload-time = "2025-07-27T20:40:22.453Z" }, +] + +[[package]] +name = "path-py" +version = "12.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "path" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e3/81be70016d58ade0f516191fa80152daba5453d0b07ce648d9daae86a188/path.py-12.5.0.tar.gz", hash = "sha256:8d885e8b2497aed005703d94e0fd97943401f035e42a136810308bff034529a8", size = 15713, upload-time = "2020-07-27T23:53:38.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/04/130b7a538c25693c85c4dee7e25d126ebf5511b1eb7320e64906687b159e/path.py-12.5.0-py3-none-any.whl", hash = "sha256:a43e82eb2c344c3fd0b9d6352f6b856f40b8b7d3d65cc05978b42c3715668496", size = 2251, upload-time = "2020-07-27T23:53:37.293Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "oauthlib" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, +] + +[[package]] +name = "xqueue-watcher" +version = "0.3" +source = { editable = "." } +dependencies = [ + { name = "docker" }, + { name = "dogstatsd-python" }, + { name = "kubernetes" }, + { name = "path-py" }, + { name = "requests" }, +] + +[package.optional-dependencies] +production = [ + { name = "newrelic" }, +] + +[package.dev-dependencies] +dev = [ + { name = "coverage" }, + { name = "mock" }, + { name = "pytest-cov" }, +] + +[package.metadata] +requires-dist = [ + { name = "docker", specifier = ">=7.0.0" }, + { name = "dogstatsd-python" }, + { name = "kubernetes", specifier = ">=29.0.0" }, + { name = "newrelic", marker = "extra == 'production'" }, + { name = "path-py" }, + { name = "requests" }, +] +provides-extras = ["production"] + +[package.metadata.requires-dev] +dev = [ + { name = "coverage" }, + { name = "mock" }, + { name = "pytest-cov" }, +] From 1d5db680f2115c88521635b315562e1ec1b0d71f Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 11:47:49 -0400 Subject: [PATCH 02/25] fix: remove AppArmor/codejail hard dependency; make codejail optional MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Remove six (Python 2 compat shim) from imports and SUPPORT_FILES in jailedgrader.py — Python 3 only going forward - Wrap codejail imports in try/except in jailedgrader.py and manager.py; raise RuntimeError with clear message directing users to ContainerGrader - Fix Path.abspath() -> Path.absolute() (breaking API change in path v17) in grader.py and jailedgrader.py - Update Dockerfile: ubuntu:xenial -> python:3.11-slim, remove apparmor and language-pack-en packages, fix layer ordering - Update test_codejail_config to use fork_per_item=False to avoid multiprocessing state-inheritance failure on Python 3.14 forkserver default - Update conf.d/600.json example to use ContainerGrader handler Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 36 +++++++++++++++++++--------------- conf.d/600.json | 8 ++++++-- tests/test_manager.py | 5 ++++- xqueue_watcher/grader.py | 4 ++-- xqueue_watcher/jailedgrader.py | 33 ++++++++++++++++--------------- xqueue_watcher/manager.py | 16 +++++++++++---- 6 files changed, 61 insertions(+), 41 deletions(-) diff --git a/Dockerfile b/Dockerfile index 89ee5bb..d199d17 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,26 +1,30 @@ -FROM ubuntu:xenial as openedx +FROM python:3.11-slim AS base -RUN apt update && \ - apt install -y git-core language-pack-en apparmor apparmor-utils python python-pip python-dev && \ - pip install --upgrade pip setuptools && \ - rm -rf /var/lib/apt/lists/* +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + LANG=en_US.UTF-8 \ + LANGUAGE=en_US:en \ + LC_ALL=en_US.UTF-8 -RUN locale-gen en_US.UTF-8 -ENV LANG en_US.UTF-8 -ENV LANGUAGE en_US:en -ENV LC_ALL en_US.UTF-8 +RUN apt-get update && \ + apt-get install -y --no-install-recommends git-core && \ + rm -rf /var/lib/apt/lists/* + +RUN useradd -m --shell /bin/false app WORKDIR /edx/app/xqueue_watcher COPY requirements /edx/app/xqueue_watcher/requirements -RUN pip install -r requirements/production.txt +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r requirements/production.txt -CMD python -m xqueue_watcher -d /edx/etc/xqueue_watcher +COPY . /edx/app/xqueue_watcher -RUN useradd -m --shell /bin/false app USER app -COPY . /edx/app/xqueue_watcher +CMD ["python", "-m", "xqueue_watcher", "-d", "/edx/etc/xqueue_watcher"] -FROM openedx as edx.org -RUN pip install newrelic -CMD newrelic-admin run-program python -m xqueue_watcher -d /edx/etc/xqueue_watcher +FROM base AS edx.org +USER root +RUN pip install --no-cache-dir newrelic +USER app +CMD ["newrelic-admin", "run-program", "python", "-m", "xqueue_watcher", "-d", "/edx/etc/xqueue_watcher"] diff --git a/conf.d/600.json b/conf.d/600.json index 10565e8..8f0632e 100644 --- a/conf.d/600.json +++ b/conf.d/600.json @@ -5,10 +5,14 @@ "AUTH": ["lms", "lms"], "HANDLERS": [ { - "HANDLER": "xqueue_watcher.grader.Grader", + "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", "KWARGS": { "grader_root": "../data/6.00x/graders/", - "gradepy": "../data/6.00x/graders/grade.py" + "image": "grader-base:local", + "backend": "docker", + "cpu_limit": "500m", + "memory_limit": "256Mi", + "timeout": 20 } } ] diff --git a/tests/test_manager.py b/tests/test_manager.py index f50f0d4..fa407a6 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -90,8 +90,11 @@ def test_codejail_config(self): }) self.assertTrue(codejail.jail_code.is_configured("other-python")) - # now we'll see if the codejail config is inherited in the handler subprocess + # Verify codejail config is visible to the grader running in the same process. + # (fork_per_item=False avoids relying on multiprocessing start-method-specific + # state inheritance, which varies between 'fork' and 'forkserver'.) handler_config = self.config['test1'].copy() + handler_config['HANDLERS'][0]['KWARGS'] = {'fork_per_item': False} client = self.m.client_from_config("test", handler_config) client.session = MockXQueueServer() client._handle_submission(json.dumps({ diff --git a/xqueue_watcher/grader.py b/xqueue_watcher/grader.py index ed3744d..db69a09 100644 --- a/xqueue_watcher/grader.py +++ b/xqueue_watcher/grader.py @@ -2,7 +2,7 @@ Implementation of a grader compatible with XServer """ import html -import os +import sys import time import json from path import Path @@ -130,7 +130,7 @@ def process_item(self, content, queue=None): self.log.debug(f"Processing submission, grader payload: {payload}") relative_grader_path = grader_config['grader'] - grader_path = os.path.abspath(self.grader_root / relative_grader_path) + grader_path = (self.grader_root / relative_grader_path).absolute() start = time.time() results = self.grade(grader_path, grader_config, student_response) diff --git a/xqueue_watcher/jailedgrader.py b/xqueue_watcher/jailedgrader.py index 3692bab..56bf8c4 100644 --- a/xqueue_watcher/jailedgrader.py +++ b/xqueue_watcher/jailedgrader.py @@ -1,5 +1,8 @@ """ An implementation of a grader that uses codejail to sandbox submission execution. + +NOTE: This grader requires codejail (an optional dependency) and an AppArmor-enabled +host OS. For Kubernetes deployments, use ContainerGrader instead. """ import codecs import os @@ -9,9 +12,12 @@ import random import gettext from path import Path -import six -import codejail +try: + import codejail + import codejail.jail_code +except ImportError: + codejail = None from grader_support.gradelib import EndTest from grader_support.graderutil import LANGUAGE @@ -21,21 +27,8 @@ TIMEOUT = 1 -def path_to_six(): - """ - Return the full path to six.py - """ - if any(six.__file__.endswith(suffix) for suffix in ('.pyc', '.pyo')): - # __file__ points to the compiled bytecode in python 2 - return Path(six.__file__[:-1]) - else: - # __file__ points to the .py file in python 3 - return Path(six.__file__) - - SUPPORT_FILES = [ Path(grader_support.__file__).dirname(), - path_to_six(), ] @@ -63,8 +56,16 @@ class JailedGrader(Grader): A grader implementation that uses codejail. Instantiate it with grader_root="path/to/graders" and optionally codejail_python="python name" (the name that you used to configure codejail) + + NOTE: Requires codejail (optional dependency) and an AppArmor-enabled host. + For Kubernetes deployments, use ContainerGrader instead. """ def __init__(self, *args, **kwargs): + if codejail is None: + raise RuntimeError( + "codejail is not installed. JailedGrader requires codejail and an " + "AppArmor-enabled host. For containerized deployments use ContainerGrader." + ) self.codejail_python = kwargs.pop("codejail_python", "python") super().__init__(*args, **kwargs) self.locale_dir = self.grader_root / "conf" / "locale" @@ -279,7 +280,7 @@ def main(args): # pragma: no cover submission = f.read().decode('utf-8') grader_config = {"lang": "eo"} - grader_path = path(grader_path).abspath() + grader_path = Path(grader_path).absolute() g = JailedGrader(grader_root=grader_path.dirname().parent.parent) pprint(g.grade(grader_path, grader_config, submission)) diff --git a/xqueue_watcher/manager.py b/xqueue_watcher/manager.py index d699e5c..1652e42 100644 --- a/xqueue_watcher/manager.py +++ b/xqueue_watcher/manager.py @@ -10,7 +10,10 @@ import sys import time -from codejail import jail_code +try: + from codejail import jail_code as _codejail_jail_code +except ImportError: + _codejail_jail_code = None from .settings import get_manager_config_values, MANAGER_CONFIG_DEFAULTS @@ -100,7 +103,7 @@ def configure_from_directory(self, directory): def enable_codejail(self, codejail_config): """ - Enable codejail for the process. + Enable codejail for the process (legacy AppArmor-based sandbox). codejail_config is a dict like this: { "name": "python", @@ -114,13 +117,18 @@ def enable_codejail(self, codejail_config): limits are optional user defaults to the current user """ + if _codejail_jail_code is None: + raise RuntimeError( + "codejail is not installed. Cannot configure AppArmor-based sandboxing. " + "Use ContainerGrader for containerized deployments." + ) name = codejail_config["name"] bin_path = codejail_config['bin_path'] user = codejail_config.get('user', getpass.getuser()) - jail_code.configure(name, bin_path, user=user) + _codejail_jail_code.configure(name, bin_path, user=user) limits = codejail_config.get("limits", {}) for limit_name, value in limits.items(): - jail_code.set_limit(limit_name, value) + _codejail_jail_code.set_limit(limit_name, value) self.log.info("configured codejail -> %s %s %s", name, bin_path, user) return name From 37100adf8cc4a90880d17d023e8156bce758a2bf Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 11:48:08 -0400 Subject: [PATCH 03/25] feat: add ContainerGrader for Kubernetes/Docker-based sandboxed grading MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds xqueue_watcher/containergrader.py — a drop-in replacement for JailedGrader that executes student code inside an isolated container instead of using AppArmor/codejail. Security model (replaces AppArmor): - Container isolation (Linux namespaces + cgroups) - Non-root user (UID 1000), read-only root filesystem - CPU/memory resource limits enforced by container runtime - Network disabled for grader containers (no egress) - Hard wall-clock timeout via activeDeadlineSeconds (k8s) or timeout (Docker) Two pluggable backends selected via the 'backend' KWARGS option: kubernetes (default / production): - Creates a batch/v1 Job per submission using the kubernetes Python client - Auto-detects in-cluster vs kubeconfig credentials - Polls until Job completes, collects stdout from pod logs - Deletes the Job after result collection (ttlSecondsAfterFinished=300) - Job pod spec includes: securityContext, resource limits, activeDeadlineSeconds, and labels for observability docker (local dev / CI): - Runs a container via the docker Python SDK - Bind-mounts the grader directory read-only - Passes SUBMISSION_CODE as an environment variable - Network disabled, memory + CPU limits applied Student code is passed via SUBMISSION_CODE env var (avoids argv length limits and shell injection). The entrypoint writes it to /tmp before invoking grader_support.run, producing the same JSON output format that JailedGrader already expects — so no changes to grader test framework or course team grader code are required. Configuration example (conf.d/my-course.json): { "my-course": { "HANDLERS": [{ "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", "KWARGS": { "grader_root": "/graders/my-course/", "image": "registry.example.com/my-course:latest", "backend": "kubernetes", "cpu_limit": "500m", "memory_limit": "256Mi", "timeout": 20 } }] } } Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- xqueue_watcher/containergrader.py | 438 ++++++++++++++++++++++++++++++ 1 file changed, 438 insertions(+) create mode 100644 xqueue_watcher/containergrader.py diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py new file mode 100644 index 0000000..1580c89 --- /dev/null +++ b/xqueue_watcher/containergrader.py @@ -0,0 +1,438 @@ +""" +A grader implementation that executes student code inside an isolated container. + +Supports two backends: + - "kubernetes": creates a batch/v1 Job per submission (production) + - "docker": runs a local Docker container (local dev / CI) + +This is the recommended replacement for JailedGrader on Kubernetes deployments. +No AppArmor or elevated host privileges are required. +""" + +import importlib +import json +import logging +import os +import random +import tempfile +import time +import uuid +from path import Path + +from .grader import Grader +from grader_support.gradelib import EndTest +from grader_support.graderutil import LANGUAGE + + +_BACKEND_KUBERNETES = "kubernetes" +_BACKEND_DOCKER = "docker" +_SUPPORTED_BACKENDS = (_BACKEND_KUBERNETES, _BACKEND_DOCKER) + +# Label applied to all grading Jobs so they can be bulk-cleaned up if needed. +_JOB_LABEL = "app.kubernetes.io/component=xqueue-grader" + + +def _truncate(out): + TOO_LONG = 5000 + if len(out) > TOO_LONG: + out = out[:TOO_LONG] + "...OUTPUT TRUNCATED" + return out + + +def _prepend_coding(code): + return "# coding: utf8\n" + code + + +class ContainerGrader(Grader): + """ + Grades student submissions by running them inside an isolated container. + + Configuration (passed as KWARGS in the conf.d JSON handler config): + + grader_root - Path to the directory containing grader scripts. + Must be accessible inside the container at the same path, + either via a PVC (Kubernetes) or a bind mount (Docker). + image - Docker image to run. Should extend grader-base and include + all course-specific dependencies. + backend - "kubernetes" (default) or "docker". + namespace - Kubernetes namespace to create Jobs in (default: "default"). + cpu_limit - CPU limit for the grading container (default: "500m"). + memory_limit - Memory limit for the grading container (default: "256Mi"). + timeout - Maximum wall-clock seconds a grading job may run (default: 20). + """ + + def __init__( + self, + grader_root, + image, + backend=_BACKEND_KUBERNETES, + namespace="default", + cpu_limit="500m", + memory_limit="256Mi", + timeout=20, + **kwargs, + ): + if backend not in _SUPPORTED_BACKENDS: + raise ValueError( + f"Unsupported backend {backend!r}. Choose from {_SUPPORTED_BACKENDS}." + ) + super().__init__(grader_root=grader_root, fork_per_item=False, **kwargs) + self.image = image + self.backend = backend + self.namespace = namespace + self.cpu_limit = cpu_limit + self.memory_limit = memory_limit + self.timeout = timeout + + # ------------------------------------------------------------------ + # Internal: container execution + # ------------------------------------------------------------------ + + def _run(self, grader_path, code, seed): + """ + Invoke the grader_support runner inside a container. + + Returns the raw stdout bytes from the container (JSON from run.py). + Raises RuntimeError on timeout or non-zero exit. + """ + if self.backend == _BACKEND_KUBERNETES: + return self._run_kubernetes(grader_path, code, seed) + return self._run_docker(grader_path, code, seed) + + def _run_kubernetes(self, grader_path, code, seed): + """Create a Kubernetes Job, wait for it, collect stdout, delete it.""" + try: + from kubernetes import client as k8s_client, config as k8s_config, watch as k8s_watch + except ImportError: + raise RuntimeError( + "The 'kubernetes' package is required for the kubernetes backend. " + "Install it with: uv add kubernetes" + ) + + try: + k8s_config.load_incluster_config() + except k8s_config.ConfigException: + k8s_config.load_kube_config() + + job_name = f"xqueue-grader-{uuid.uuid4().hex[:12]}" + batch_v1 = k8s_client.BatchV1Api() + core_v1 = k8s_client.CoreV1Api() + + job_manifest = self._build_k8s_job(job_name, grader_path, code, seed) + + try: + batch_v1.create_namespaced_job(namespace=self.namespace, body=job_manifest) + self.log.debug("Created Job %s", job_name) + + stdout = self._wait_and_collect_k8s( + batch_v1, core_v1, job_name, timeout=self.timeout + ) + return stdout + finally: + try: + batch_v1.delete_namespaced_job( + name=job_name, + namespace=self.namespace, + body=k8s_client.V1DeleteOptions(propagation_policy="Foreground"), + ) + except Exception: + self.log.warning("Failed to delete Job %s", job_name, exc_info=True) + + def _build_k8s_job(self, job_name, grader_path, code, seed): + """Return a kubernetes Job manifest dict for the given grading run.""" + from kubernetes import client as k8s_client + + grader_rel = str(Path(grader_path).basename()) + grader_dir = str(Path(grader_path).dirname()) + + return k8s_client.V1Job( + api_version="batch/v1", + kind="Job", + metadata=k8s_client.V1ObjectMeta( + name=job_name, + labels={ + "app.kubernetes.io/component": "xqueue-grader", + "app.kubernetes.io/managed-by": "xqueue-watcher", + }, + ), + spec=k8s_client.V1JobSpec( + backoff_limit=0, + active_deadline_seconds=self.timeout, + ttl_seconds_after_finished=300, + template=k8s_client.V1PodTemplateSpec( + spec=k8s_client.V1PodSpec( + restart_policy="Never", + security_context=k8s_client.V1PodSecurityContext( + run_as_non_root=True, + run_as_user=1000, + ), + containers=[ + k8s_client.V1Container( + name="grader", + image=self.image, + args=[grader_rel, "submission.py", str(seed)], + working_dir=grader_dir, + env=[ + k8s_client.V1EnvVar( + name="SUBMISSION_CODE", + value=code, + ), + ], + resources=k8s_client.V1ResourceRequirements( + limits={ + "cpu": self.cpu_limit, + "memory": self.memory_limit, + }, + requests={ + "cpu": "100m", + "memory": "64Mi", + }, + ), + security_context=k8s_client.V1SecurityContext( + allow_privilege_escalation=False, + read_only_root_filesystem=True, + capabilities=k8s_client.V1Capabilities(drop=["ALL"]), + ), + ) + ], + ) + ), + ), + ) + + def _wait_and_collect_k8s(self, batch_v1, core_v1, job_name, timeout): + """Poll until the Job completes, then return its pod's stdout bytes.""" + deadline = time.monotonic() + timeout + while time.monotonic() < deadline: + job = batch_v1.read_namespaced_job(name=job_name, namespace=self.namespace) + if job.status.succeeded: + break + if job.status.failed: + raise RuntimeError(f"Grading Job {job_name} failed.") + time.sleep(1) + else: + raise RuntimeError( + f"Grading Job {job_name} exceeded timeout of {timeout}s." + ) + + pods = core_v1.list_namespaced_pod( + namespace=self.namespace, + label_selector=f"job-name={job_name}", + ) + if not pods.items: + raise RuntimeError(f"No pods found for Job {job_name}.") + + pod_name = pods.items[0].metadata.name + log = core_v1.read_namespaced_pod_log(name=pod_name, namespace=self.namespace) + return log.encode("utf-8") + + def _run_docker(self, grader_path, code, seed): + """Run a local Docker container and return stdout bytes.""" + try: + import docker as docker_sdk + except ImportError: + raise RuntimeError( + "The 'docker' package is required for the docker backend. " + "Install it with: uv add docker" + ) + + grader_dir = str(Path(grader_path).dirname().absolute()) + grader_rel = str(Path(grader_path).basename()) + + client = docker_sdk.from_env() + try: + result = client.containers.run( + image=self.image, + command=[grader_rel, "submission.py", str(seed)], + working_dir="/grader", + environment={"SUBMISSION_CODE": code}, + volumes={grader_dir: {"bind": "/grader", "mode": "ro"}}, + mem_limit=self.memory_limit, + nano_cpus=int(_parse_cpu_millis(self.cpu_limit) * 1_000_000), + network_disabled=True, + read_only=True, + remove=True, + stdout=True, + stderr=False, + timeout=self.timeout, + ) + except docker_sdk.errors.ContainerError as exc: + raise RuntimeError( + f"Grading container exited with error: {exc}" + ) from exc + + return result if isinstance(result, bytes) else result.encode("utf-8") + + # ------------------------------------------------------------------ + # Public grading interface (mirrors JailedGrader.grade) + # ------------------------------------------------------------------ + + def grade(self, grader_path, grader_config, submission): + import gettext + import sys + + if not isinstance(submission, str): + self.log.warning("Submission is NOT unicode") + + results = { + "errors": [], + "tests": [], + "correct": False, + "score": 0, + } + + if grader_config.get("skip_grader", False): + results["correct"] = True + results["score"] = 1 + self.log.debug("Skipping the grader.") + return results + + lang = grader_config.get("lang", LANGUAGE) + locale_dir = self.grader_root / "conf" / "locale" + if locale_dir.exists(): + trans = gettext.translation( + "graders", localedir=str(locale_dir), fallback=True, languages=[lang] + ) + trans.install(names=None) + + # Load the grader module to access its test definitions and preprocessors. + # The module runs outside the sandbox (trusted code). + sf_loader = importlib.machinery.SourceFileLoader("grader_module", str(grader_path)) + grader_module = sf_loader.load_module() + grader = grader_module.grader + + errors = grader.input_errors(submission) + if errors: + results["errors"].extend(errors) + return results + + answer_path = Path(grader_path).dirname() / "answer.py" + with open(answer_path, "rb") as f: + answer = f.read().decode("utf-8") + + processed_answer = _prepend_coding(grader.preprocess(answer)) + processed_submission = _prepend_coding(grader.preprocess(submission)) + + seed = str(random.randint(0, 20000)) + + # Run the staff answer inside the container. + expected_ok = False + expected_outputs = None + expected_exc = None + try: + expected_outputs = self._run(grader_path, processed_answer, seed) + if expected_outputs: + expected = json.loads(expected_outputs.decode("utf-8")) + expected_ok = True + except Exception: + expected_exc = sys.exc_info() + else: + if expected_ok: + if ( + expected["exceptions"] + or expected["grader"]["status"] != "ok" + or expected["submission"]["status"] != "ok" + ): + expected_ok = False + + if not expected_ok: + results["errors"].append( + "There was a problem running the staff solution (Staff debug: L364)" + ) + self.log.error( + "Couldn't run staff solution. grader = %s, output: %r", + grader_path, + expected_outputs, + exc_info=expected_exc, + ) + return results + + # Run the student submission inside the container. + actual_ok = False + actual_outputs = None + actual_exc = None + try: + actual_outputs = self._run(grader_path, processed_submission, seed) + if actual_outputs: + actual = json.loads(actual_outputs.decode("utf-8")) + actual_ok = True + else: + results["errors"].append( + "There was a problem running your solution (Staff debug: L379)." + ) + except Exception: + actual_exc = sys.exc_info() + else: + if actual_ok and actual["grader"]["status"] == "ok": + if actual["submission"]["status"] != "ok": + shown_error = actual["submission"]["exception"] or ( + "There was an error thrown while running your solution." + ) + results["errors"].append(shown_error) + else: + actual_ok = False + + if not actual_ok: + results["errors"].append( + "We couldn't run your solution (Staff debug: L397)." + ) + self.log.error( + "Couldn't run student solution. grader = %s, output: %r", + grader_path, + actual_outputs, + exc_info=actual_exc, + ) + return results + + corrects = [] + if not results["errors"]: + expected_results = expected["results"] + actual_results = actual["results"] + if len(expected_results) != len(actual_results): + results["errors"].append( + "Something went wrong: different numbers of tests ran for " + "your code and for our reference code." + ) + return results + + for test, exp, act in zip(grader.tests(), expected_results, actual_results): + exp_short_desc, exp_long_desc, exp_output = exp + act_short_desc, act_long_desc, act_output = act + if exp_short_desc != act_short_desc: + results["errors"].append( + "Something went wrong: tests don't match up." + ) + return results + act_output = _truncate(act_output) + try: + correct = test.compare_results(exp_output, act_output) + except EndTest as e: + if e is not None: + act_output += "\n" + act_output += f"*** ERROR: {e} ***" + correct = False + corrects.append(correct) + if not grader_config.get("hide_output", False): + results["tests"].append( + (exp_short_desc, exp_long_desc, correct, exp_output, act_output) + ) + + n = len(corrects) + results["correct"] = all(corrects) and n > 0 + results["score"] = float(sum(corrects)) / n if n > 0 else 0 + + if n == 0 and not results["errors"]: + results["errors"] = [ + "There was a problem while running your code (Staff debug: L450). " + "Please contact the course staff for assistance." + ] + + return results + + +def _parse_cpu_millis(cpu_str): + """Convert a Kubernetes CPU string like '500m' or '1' to a float of millicores.""" + cpu_str = str(cpu_str).strip() + if cpu_str.endswith("m"): + return float(cpu_str[:-1]) + return float(cpu_str) * 1000 From a66b70359c1697d5989c633f4eb73eb8737d586b Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 11:48:19 -0400 Subject: [PATCH 04/25] feat: add grader base Docker image and container entrypoint grader_support/Dockerfile.base: - python:3.11-slim base, non-root grader user (UID 1000) - Copies grader_support framework; installs path-py - ENTRYPOINT: python -m grader_support.entrypoint - /tmp volume for submission files (writable even with read-only root fs) - Course teams extend this image to add their deps and grader scripts grader_support/entrypoint.py: - Reads SUBMISSION_CODE env var, writes to /tmp/submission.py - Adds /tmp and cwd to sys.path, then delegates to grader_support.run - Prints JSON result to stdout (same schema JailedGrader already parses) grader_support/README.md: - Course team authoring guide: how to extend the base image, configure the handler, and understand the security properties Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- grader_support/Dockerfile.base | 22 +++++++++++ grader_support/README.md | 69 +++++++++++++++++++++++++++++++++ grader_support/entrypoint.py | 70 ++++++++++++++++++++++++++++++++++ 3 files changed, 161 insertions(+) create mode 100644 grader_support/Dockerfile.base create mode 100644 grader_support/README.md create mode 100644 grader_support/entrypoint.py diff --git a/grader_support/Dockerfile.base b/grader_support/Dockerfile.base new file mode 100644 index 0000000..5c659e2 --- /dev/null +++ b/grader_support/Dockerfile.base @@ -0,0 +1,22 @@ +FROM python:3.11-slim AS grader-base + +# Create a non-root user for running student code +RUN useradd -m -u 1000 --shell /bin/false grader + +WORKDIR /grader + +# Install minimal runtime dependency needed by grader_support +RUN pip install --no-cache-dir path-py + +# Copy the grader_support framework into the image +COPY grader_support/ ./grader_support/ + +# /tmp is always writable (tmpfs) even with read_only_root_filesystem=true. +# Student submission files are written there by the entrypoint. +VOLUME ["/tmp"] + +USER grader + +# The entrypoint reads SUBMISSION_CODE from the environment, writes it to /tmp, +# then invokes grader_support.run and prints JSON results to stdout. +ENTRYPOINT ["python", "-m", "grader_support.entrypoint"] diff --git a/grader_support/README.md b/grader_support/README.md new file mode 100644 index 0000000..c96d2cb --- /dev/null +++ b/grader_support/README.md @@ -0,0 +1,69 @@ +## Grader Base Image + +This Dockerfile builds the base image that all course-specific grader images extend. + +### What it contains + +- Python 3.11 (slim) +- The `grader_support` package (test framework and runner used by all graders) +- An entrypoint that reads student submissions from the `SUBMISSION_CODE` environment variable + +### Building + +```bash +docker build -t grader-base:latest -f grader_support/Dockerfile.base . +``` + +Or via the Makefile: + +```bash +make docker-build +``` + +### Course team usage + +Course teams create their own image `FROM grader-base` and add their grader scripts and any Python dependencies their graders require: + +```dockerfile +FROM registry.example.com/grader-base:latest + +# Install course-specific Python dependencies +RUN pip install --no-cache-dir numpy==1.26.4 scipy==1.12.0 + +# Copy grader scripts into the image +COPY graders/ /graders/ +``` + +Then reference the image in the xqueue-watcher handler config (`conf.d/my-course.json`): + +```json +{ + "my-course-queue": { + "SERVER": "http://xqueue:18040", + "CONNECTIONS": 2, + "AUTH": ["lms", "lms"], + "HANDLERS": [ + { + "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", + "KWARGS": { + "grader_root": "/graders/my-course/", + "image": "registry.example.com/my-course-grader:latest", + "backend": "kubernetes", + "cpu_limit": "500m", + "memory_limit": "256Mi", + "timeout": 20 + } + } + ] + } +} +``` + +### Security properties + +Grader containers run with: +- Non-root user (UID 1000) +- Read-only root filesystem (`/tmp` is a tmpfs for submission files) +- No network access (`network_disabled: true` / Kubernetes NetworkPolicy) +- CPU and memory limits enforced by the container runtime +- Hard wall-clock timeout via `activeDeadlineSeconds` (Kubernetes) or `timeout` (Docker) diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py new file mode 100644 index 0000000..750763e --- /dev/null +++ b/grader_support/entrypoint.py @@ -0,0 +1,70 @@ +""" +Entrypoint wrapper for running grader_support.run inside a container. + +Reads the student submission from the SUBMISSION_CODE environment variable +and writes it to a temporary file before invoking the standard runner. +This avoids having to pass code through argv (length limits) or requiring +a writable submission directory when the root filesystem is read-only. + +Usage (set by Dockerfile ENTRYPOINT): + python -m grader_support.entrypoint GRADER_FILE submission.py SEED +""" + +import os +import sys +import tempfile + +from . import run as _run_module + + +def main(): + if len(sys.argv) != 4: + print( + "Usage: python -m grader_support.entrypoint GRADER_FILE submission.py SEED", + file=sys.stderr, + ) + sys.exit(1) + + grader_name, submission_name, seed = sys.argv[1], sys.argv[2], sys.argv[3] + code = os.environ.get("SUBMISSION_CODE", "") + + # Write the submission to a temp file in /tmp (always writable even with + # read_only_root_filesystem=true, because /tmp is a separate tmpfs). + with tempfile.NamedTemporaryFile( + mode="w", + suffix=".py", + prefix="submission_", + dir="/tmp", + delete=False, + encoding="utf-8", + ) as f: + f.write(code) + tmp_path = f.name + + # Temporarily add /tmp and the working directory to sys.path so that + # grader_support.run can import the submission and grader by name. + cwd = os.getcwd() + sys.path.insert(0, "/tmp") + sys.path.insert(0, cwd) + + # Rename so the module name matches what run.py expects (submission_name + # without the .py extension becomes the importable module name). + import shutil + + dest = os.path.join("/tmp", submission_name) + shutil.move(tmp_path, dest) + + seed_int = int(seed) + output = _run_module.run( + grader_name[:-3] if grader_name.endswith(".py") else grader_name, + submission_name[:-3] if submission_name.endswith(".py") else submission_name, + seed_int, + ) + + import json + + print(json.dumps(output)) + + +if __name__ == "__main__": + main() From 01c65bad2393d08110159d51b68819af951962d3 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 11:48:30 -0400 Subject: [PATCH 05/25] feat: add Kubernetes deployment manifests and Docker Compose local dev MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit deploy/kubernetes/ (Kustomize-compatible): - serviceaccount.yaml — dedicated SA for xqueue-watcher pods - rbac.yaml — Role + RoleBinding: create/delete Jobs, read pod logs - configmap.yaml — watcher xqwatcher.json config (edit for your queues) - deployment.yaml — 2 replicas, topologySpreadConstraints, securityContext, resource limits, readinessProbe - networkpolicy.yaml — deny all ingress/egress on grader Job pods (label: role=grader-job); allow xqueue-watcher egress to xqueue - secret.yaml.template — placeholder: copy to secret.yaml, fill in credentials, do not commit secret.yaml (added to .gitignore) - kustomization.yaml — Kustomize entry point for the base directory docker-compose.yml (local dev): - xqueue-watcher container with docker socket access (for docker backend) - Mounts conf.d/ and grader directories - Includes a sample xqueue service reference for full local stack Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Automated code Graders With xqueue-watcher.md | 69 ++++++++++++ deploy/kubernetes/configmap.yaml | 65 ++++++++++++ deploy/kubernetes/deployment.yaml | 100 ++++++++++++++++++ deploy/kubernetes/kustomization.yaml | 25 +++++ deploy/kubernetes/networkpolicy.yaml | 19 ++++ deploy/kubernetes/rbac.yaml | 37 +++++++ deploy/kubernetes/secret.yaml.template | 15 +++ deploy/kubernetes/serviceaccount.yaml | 8 ++ docker-compose.yml | 47 ++++++++ 9 files changed, 385 insertions(+) create mode 100755 Automated code Graders With xqueue-watcher.md create mode 100644 deploy/kubernetes/configmap.yaml create mode 100644 deploy/kubernetes/deployment.yaml create mode 100644 deploy/kubernetes/kustomization.yaml create mode 100644 deploy/kubernetes/networkpolicy.yaml create mode 100644 deploy/kubernetes/rbac.yaml create mode 100644 deploy/kubernetes/secret.yaml.template create mode 100644 deploy/kubernetes/serviceaccount.yaml create mode 100644 docker-compose.yml diff --git a/Automated code Graders With xqueue-watcher.md b/Automated code Graders With xqueue-watcher.md new file mode 100755 index 0000000..c2f3997 --- /dev/null +++ b/Automated code Graders With xqueue-watcher.md @@ -0,0 +1,69 @@ +## Current Status + +Student-submitted code questions are part of the course material for the 6.00x series, 6.86, and 7.8QBWx courses. These are automatically evaluated and graded in the context of the edX interface. The current implementation of this feature relies on the combination of [xqueue](https://github.com/openedx/xqueue) and [xqueue-watcher](https://github.com/openedx/xqueue-watcher). The xqueue and xqueue-watcher applications are [deprecated](https://github.com/openedx/public-engineering/issues/214) and slated for removal from the Open edX ecosystem in October 2024\. + +The user experience for course teams and students is quite poor. Course teams have no reliable means of validating their grader tests without deploying them to a running environment, leading to longer cycle times and a lack of visibility into errors. This also contributes to a substantial support burden on engineering to provide debugging information and onboarding for deployment. Students get opaque responses when their submissions are invalid or incorrect. Instructors have limited visibility into the code that their students are submitting and no ability to provide in-context feedback to the students. + +As an alternative to the combination of xqueue and xqueue-watcher, there are several paid services that offer automatic grading of student code. These platforms also offer many additional features beyond the capabilities of xqueue/xqueue-watcher. Examples include in-browser editor support (e.g., Visual Studio Code), in-context feedback mechanisms for instructors to comment on student code, and workspace management for students and instructors to keep track of what code has been written. A non-exhaustive list of companies providing this service includes [Codio](https://www.codio.com/), [Vocareum](https://www.vocareum.com/), and [CodeGrade](https://www.codegrade.com/). + +The core capability of the xqueue-watcher code grading system is to facilitate a request/response interaction for evaluating student-submitted code in a stateless manner. This is a benefit in that it allows for scaling to a large number of students. If there is a desire to have a more stateful experience for the learners, then instructors are advised to take advantage of services as noted above. + +## Planned Improvements + +In order to provide continuity of service to course teams currently using code grading in their classes, the engineering team will continue to provide the xqueue/xqueue-watcher systems as a service for Open edX courses. This will require investing engineering resources to upgrade the current code to be compatible with newer versions of the edX platform, as well as updating the system dependencies. The deployment architecture will also require upgrades to be compatible with the Open Learning infrastructure platform. + +## Scope Of Work + +The focus of the improvements and upgrades to xqueue and xqueue-watcher will be on the local development experience for course instructors, and reducing the maintenance burden on infrastructure engineers. + +### Upgrade Service Deployment + +The initial scope of work will be focused on migrating the deployment of the xqueue-watcher service. This will include the ability to more easily scale capacity, allowing for the migration of current grader workloads hosted by edx.org/2U onto our infrastructure.This will require the dedicated attention of one member of the infrastructure engineering team for approximately 2 months to accomplish. + +### Enable Local Testing + +A significant source of frustration with the xqueue-watcher system is the inability to effectively test the functionality of the evaluation functions during their development. In addition, the course teams have no control over the runtime environment and system dependencies available to the student code. The current state of the service is that course teams are only able to use Python code and there is no means of specifying which version of the language to use. + +In order to make this easier, a new execution context that uses Docker images will be added to the xqueue-watcher service. This will allow course teams to develop their own images, specify the language and version that students can write their code in, and specify the dependencies available to them. Having a pre-built image to use for run-time execution also reduces the work required of the infrastructure team, allowing them to focus on the runtime of the core service rather than being responsible for the evaluation context of student code. + +For course teams that have existing grader logic, the engineering team will assist them in the initial creation of a Docker image that is compatible with their current code. This will not require substantial modifications to the grader code as it is presently written in order to function under the new execution context. + +The work to add and validate this functionality will require the attention of 2 engineers for approximately 4 months. + +### Provide Debugging Information To Course Teams + +The other major source of friction for both course instructors and platform operators is the inability to debug issues that students encounter. To allow course teams to diagnose and debug errors, the infrastructure team will collect log messages emitted by the grader into a system that allows for searching of that data. Course teams will be granted access to their logs for the purpose of supporting their students in the event that there are logical or resource-related failures due to student submissions and/or grader logic. This will require the focus of 1 infrastructure engineer for approximately 1 month to configure, integrate, and validate the functionality and permission scoping. If there is a desire to integrate this experience into the Open edX interface then it will require an additional developer for approximately 1 \- 2 months. + +### Ongoing Maintenance, Support, and Improvements + +To support the ongoing operation and support of the automated grader service will require the attention of 1 full time DevOps engineer. The responsibilities of this engineer include: + +* Helping course teams with deployment and debugging of their graders +* Build utilities to simplify the creation of graders by course teams +* Perform ongoing maintenance and upgrades of the xqueue and xqueue-watcher projects +* Manage deployment, monitoring, and scaling of the automated graders + +## Explicitly Out Of Scope + +There are a number of features that we are explicitly not going to offer to course teams. In particular, we have no intention of adding features around dedicated lab spaces for students, editor integrations, or support for other courseware systems. For any use cases that go beyond a simple request/response interchange for evaluating student code the course team is advised to engage with services such as Codio, CodeGrade, etc. as noted above. + +## Estimated Costs + +As noted in the scope of work, the ongoing support, maintenance, and upgrades of the service will require an additional dedicated DevOps engineer. + +The infrastructure costs for the current installation of xqueue-watchers is approximately $700/month. With increased scaling to account for the course load currently being managed on edx.org that number is anticipated to reach \~$1,500/month. + +The total estimated cost per learner is \~$5/learner/month. This is based on the above costs and an enrollment rate of \~3,700 verified learners per course term. + +## + +## Comparison To Alternatives + +| | Xqueue/Xqueue-Watcher | Cod.io | +| :---- | :---- | :---- | +| Cost | \~$5/student/month | \~$12/student/month minimum | +| Compatibility | Open edX only | Open edX, Canvas, any LMS that supports LTI | +| Features | Automated evaluation of student code submissions | In-browser IDE, Automated code evaluation, Cloud lab environments, Content authoring | +| Instructor Support | Business hours by staff FTE | [Assistance with development and maintenance of problems and graders](https://docs.codio.com/index.html) | +| Student Support | 24/7 2U or MITx Online escalated to course team | [https://docs.codio.com/student.html](https://docs.codio.com/student.html) | + diff --git a/deploy/kubernetes/configmap.yaml b/deploy/kubernetes/configmap.yaml new file mode 100644 index 0000000..5933eda --- /dev/null +++ b/deploy/kubernetes/configmap.yaml @@ -0,0 +1,65 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: xqueue-watcher-config + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher +data: + # Main watcher settings. See xqueue_watcher/settings.py for all keys. + xqwatcher.json: | + { + "POLL_INTERVAL": 1, + "LOGIN_POLL_INTERVAL": 5, + "REQUESTS_TIMEOUT": 5, + "POLL_TIME": 10 + } + + # Logging configuration + logging.json: | + { + "version": 1, + "disable_existing_loggers": false, + "formatters": { + "standard": { + "format": "%(asctime)s %(levelname)s %(name)s %(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "standard", + "stream": "ext://sys.stdout" + } + }, + "root": { + "handlers": ["console"], + "level": "INFO" + } + } + + # Example queue config — copy this pattern for each course queue. + # Real configs live in conf.d/ mounted from a separate ConfigMap or Secret. + example-queue.json.sample: | + { + "my-course-queue": { + "SERVER": "http://xqueue:18040", + "CONNECTIONS": 2, + "AUTH": ["xqueue_user", "xqueue_pass"], + "HANDLERS": [ + { + "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", + "KWARGS": { + "grader_root": "/graders/my-course/", + "image": "registry.example.com/my-course-grader:latest", + "backend": "kubernetes", + "namespace": "xqueue-watcher", + "cpu_limit": "500m", + "memory_limit": "256Mi", + "timeout": 20 + } + } + ] + } + } diff --git a/deploy/kubernetes/deployment.yaml b/deploy/kubernetes/deployment.yaml new file mode 100644 index 0000000..e05854e --- /dev/null +++ b/deploy/kubernetes/deployment.yaml @@ -0,0 +1,100 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: xqueue-watcher + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher +spec: + # Scale horizontally by increasing replicas. Each replica polls xqueue + # independently — no coordination is required between replicas. + replicas: 2 + selector: + matchLabels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher + template: + metadata: + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher + spec: + serviceAccountName: xqueue-watcher + + # Spread replicas across nodes for availability + topologySpreadConstraints: + - maxSkew: 1 + topologyKey: kubernetes.io/hostname + whenUnsatisfiable: ScheduleAnyway + labelSelector: + matchLabels: + app.kubernetes.io/name: xqueue-watcher + + containers: + - name: xqueue-watcher + image: registry.example.com/xqueue-watcher:latest + imagePullPolicy: Always + command: ["python", "-m", "xqueue_watcher", "-d", "/etc/xqueue-watcher"] + + env: + # NEW_RELIC_LICENSE_KEY injected from secret if using newrelic extra + - name: NEW_RELIC_LICENSE_KEY + valueFrom: + secretKeyRef: + name: xqueue-watcher-secrets + key: new-relic-license-key + optional: true + + resources: + requests: + cpu: "100m" + memory: "128Mi" + limits: + cpu: "500m" + memory: "512Mi" + + volumeMounts: + # Main watcher config (xqwatcher.json + logging.json) + - name: config + mountPath: /etc/xqueue-watcher + readOnly: true + # Queue-specific conf.d configs (one JSON file per course queue) + - name: queue-configs + mountPath: /etc/xqueue-watcher/conf.d + readOnly: true + + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: true + runAsNonRoot: true + runAsUser: 1000 + capabilities: + drop: ["ALL"] + + # Basic liveness: the process exits on fatal errors, so k8s will restart it. + # A more sophisticated probe could hit a /healthz endpoint if one is added. + livenessProbe: + exec: + command: ["python", "-c", "import xqueue_watcher"] + initialDelaySeconds: 10 + periodSeconds: 30 + failureThreshold: 3 + + volumes: + - name: config + configMap: + name: xqueue-watcher-config + items: + - key: xqwatcher.json + path: xqwatcher.json + - key: logging.json + path: logging.json + - name: queue-configs + # Replace with a Secret if the configs contain credentials. + # In practice, AUTH credentials should come from a Secret and be + # mounted separately or passed as environment variables. + configMap: + name: xqueue-watcher-queue-configs + + restartPolicy: Always diff --git a/deploy/kubernetes/kustomization.yaml b/deploy/kubernetes/kustomization.yaml new file mode 100644 index 0000000..23b7ddc --- /dev/null +++ b/deploy/kubernetes/kustomization.yaml @@ -0,0 +1,25 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +namespace: xqueue-watcher + +resources: + - serviceaccount.yaml + - rbac.yaml + - configmap.yaml + - deployment.yaml + - networkpolicy.yaml + +# Override the image tag for a specific environment by adding a patch in an +# overlay directory. Example overlay (deploy/kubernetes/overlays/production/): +# +# kustomization.yaml: +# resources: +# - ../../ +# images: +# - name: registry.example.com/xqueue-watcher +# newTag: "v1.2.3" +# +images: + - name: registry.example.com/xqueue-watcher + newTag: latest diff --git a/deploy/kubernetes/networkpolicy.yaml b/deploy/kubernetes/networkpolicy.yaml new file mode 100644 index 0000000..c591138 --- /dev/null +++ b/deploy/kubernetes/networkpolicy.yaml @@ -0,0 +1,19 @@ +# Deny all egress from grading Job pods. +# xqueue-watcher pods themselves still need egress to reach the xqueue server. +apiVersion: networking.k8s.io/v1 +kind: NetworkPolicy +metadata: + name: deny-grader-egress + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: network-policy +spec: + podSelector: + matchLabels: + app.kubernetes.io/component: xqueue-grader + policyTypes: + - Egress + # No egress rules = deny all outbound traffic from grader pods. + # Student code cannot make network calls, exfiltrate data, or reach external services. + egress: [] diff --git a/deploy/kubernetes/rbac.yaml b/deploy/kubernetes/rbac.yaml new file mode 100644 index 0000000..5a18123 --- /dev/null +++ b/deploy/kubernetes/rbac.yaml @@ -0,0 +1,37 @@ +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: xqueue-watcher-grader + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher +rules: + # Create and manage grading Jobs + - apiGroups: ["batch"] + resources: ["jobs"] + verbs: ["create", "get", "list", "watch", "delete"] + # Read pod logs to collect grading results + - apiGroups: [""] + resources: ["pods"] + verbs: ["get", "list", "watch"] + - apiGroups: [""] + resources: ["pods/log"] + verbs: ["get"] +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: xqueue-watcher-grader + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher +subjects: + - kind: ServiceAccount + name: xqueue-watcher + namespace: xqueue-watcher +roleRef: + kind: Role + apiGroup: rbac.authorization.k8s.io + name: xqueue-watcher-grader diff --git a/deploy/kubernetes/secret.yaml.template b/deploy/kubernetes/secret.yaml.template new file mode 100644 index 0000000..0a6fe3c --- /dev/null +++ b/deploy/kubernetes/secret.yaml.template @@ -0,0 +1,15 @@ +# Secret template — do NOT commit real values. +# Copy this to secret.yaml.local (gitignored) and fill in real values, +# or provision via your secrets management tool (Vault, AWS Secrets Manager, etc.) +apiVersion: v1 +kind: Secret +metadata: + name: xqueue-watcher-secrets + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher +type: Opaque +stringData: + # New Relic license key (only required if using the production newrelic extra) + new-relic-license-key: "REPLACE_ME" diff --git a/deploy/kubernetes/serviceaccount.yaml b/deploy/kubernetes/serviceaccount.yaml new file mode 100644 index 0000000..1abe93b --- /dev/null +++ b/deploy/kubernetes/serviceaccount.yaml @@ -0,0 +1,8 @@ +apiVersion: v1 +kind: ServiceAccount +metadata: + name: xqueue-watcher + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: watcher diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..d2cb78c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,47 @@ +services: + # xqueue: the submission queue that xqueue-watcher polls. + # Uses the official Open edX xqueue image. + xqueue: + image: openedx/xqueue:latest + ports: + - "18040:18040" + environment: + DJANGO_SETTINGS_MODULE: xqueue.settings.devstack + XQUEUE_DJANGO_SECRET_KEY: dev-secret-key + healthcheck: + test: ["CMD", "curl", "-sf", "http://localhost:18040/xqueue/status/"] + interval: 10s + timeout: 5s + retries: 5 + + # xqueue-watcher: polls xqueue and routes submissions to the grader. + xqueue-watcher: + build: + context: . + dockerfile: Dockerfile + depends_on: + xqueue: + condition: service_healthy + volumes: + # Mount the local conf.d so you can edit queue configs without rebuilding. + - ./conf.d:/etc/xqueue-watcher/conf.d:ro + # Mount local grader scripts for rapid iteration. + - ./data:/graders:ro + environment: + # Use docker backend so grading containers run locally via the Docker socket. + GRADER_BACKEND: docker + # Give xqueue-watcher access to the Docker socket so it can spawn grader containers. + # Remove this if you don't need the docker backend locally. + extra_hosts: + - "host.docker.internal:host-gateway" + command: python -m xqueue_watcher -d /etc/xqueue-watcher + + # sample-grader: an example grader image for local testing. + # Course teams replace this with their own image. + sample-grader: + build: + context: . + dockerfile: grader_support/Dockerfile.base + # This service is not started automatically — it exists so `docker compose build` + # builds the base image that course grader images extend. + profiles: ["build-only"] From 18c7151747681c10967a108870be835a3887bd8e Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 12:11:03 -0400 Subject: [PATCH 06/25] fix: correct grader path handling in ContainerGrader and entrypoint ContainerGrader had two bugs affecting how grader files were located inside the container at runtime: 1. Docker backend bind-mounted the grader problem directory at /grader, overwriting the grader_support package that the base image copies there. Fixed by binding at /graders instead and passing the resulting absolute in-container path (/graders/) to the entrypoint. 2. Kubernetes backend set working_dir to the grader problem directory (e.g. /graders/ps07/Robot/), preventing Python from finding the grader_support package which lives at /grader/grader_support/. Fixed by keeping working_dir=/grader (the base image WORKDIR) and passing the absolute grader path in args instead of just the basename. entrypoint.py previously passed the full absolute path verbatim to __import__(), which fails for paths containing slashes. It now detects absolute paths, inserts the parent directory into sys.path, and uses only the basename as the importable module name. Also updates grader_support/README.md to document the correct layout (/graders/ for course grader scripts, /grader/ for grader_support) and the gradelib compatibility note for course teams migrating from Python 2 graders. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- grader_support/README.md | 55 ++++++++++++++++++++++++++----- grader_support/entrypoint.py | 14 +++++++- xqueue_watcher/containergrader.py | 22 +++++++++---- 3 files changed, 75 insertions(+), 16 deletions(-) diff --git a/grader_support/README.md b/grader_support/README.md index c96d2cb..f15b4f2 100644 --- a/grader_support/README.md +++ b/grader_support/README.md @@ -5,7 +5,8 @@ This Dockerfile builds the base image that all course-specific grader images ext ### What it contains - Python 3.11 (slim) -- The `grader_support` package (test framework and runner used by all graders) +- The `grader_support` package (test framework and runner used by all graders) at `/grader/grader_support/` +- A non-root `grader` user (UID 1000) - An entrypoint that reads student submissions from the `SUBMISSION_CODE` environment variable ### Building @@ -22,19 +23,45 @@ make docker-build ### Course team usage -Course teams create their own image `FROM grader-base` and add their grader scripts and any Python dependencies their graders require: +Course teams create their own image `FROM grader-base` and add their grader scripts plus any Python dependencies required by the graders. + +#### Directory layout inside the container + +``` +/grader/ ← WORKDIR (base image); grader_support lives here +└── grader_support/ ← test framework (gradelib, run, entrypoint) + +/graders/ ← course grader scripts (course team copies these) +└── ps01/ +│ └── Problem1/ +│ ├── grade_Problem1.py ← grader script (defines `grader = Grader()`) +│ └── answer.py ← reference solution +└── ml/ + └── cluster/ + └── grade_cluster.py +``` + +`grader_root` in the handler config should point to `/graders/` (or a subdirectory of it). The `SUBMISSION_CODE` env var carries student code; the entrypoint writes it to `/tmp` (a writable tmpfs even when the root filesystem is read-only). + +#### Example course Dockerfile ```dockerfile -FROM registry.example.com/grader-base:latest +# syntax=docker/dockerfile:1 +ARG GRADER_BASE_IMAGE=ghcr.io/mitodl/xqueue-watcher-grader-base:latest +FROM ${GRADER_BASE_IMAGE} + +# pip must run as root; the base image ends with USER grader. +USER root +RUN pip install --no-cache-dir numpy==1.26.4 scipy==1.13.0 -# Install course-specific Python dependencies -RUN pip install --no-cache-dir numpy==1.26.4 scipy==1.12.0 +# Copy grader scripts to /graders/. Do NOT copy them to /grader/ — that +# would overwrite the grader_support package from the base image. +COPY --chown=grader:grader graders/ /graders/ -# Copy grader scripts into the image -COPY graders/ /graders/ +USER grader ``` -Then reference the image in the xqueue-watcher handler config (`conf.d/my-course.json`): +#### Example handler config (`conf.d/my-course.json`) ```json { @@ -46,7 +73,7 @@ Then reference the image in the xqueue-watcher handler config (`conf.d/my-course { "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", "KWARGS": { - "grader_root": "/graders/my-course/", + "grader_root": "/graders/", "image": "registry.example.com/my-course-grader:latest", "backend": "kubernetes", "cpu_limit": "500m", @@ -59,6 +86,8 @@ Then reference the image in the xqueue-watcher handler config (`conf.d/my-course } ``` +The `grader` field inside each xqueue submission payload should be a path **relative to `grader_root`**, e.g. `"ps01/Problem1/grade_Problem1.py"`. + ### Security properties Grader containers run with: @@ -67,3 +96,11 @@ Grader containers run with: - No network access (`network_disabled: true` / Kubernetes NetworkPolicy) - CPU and memory limits enforced by the container runtime - Hard wall-clock timeout via `activeDeadlineSeconds` (Kubernetes) or `timeout` (Docker) + +### Important: `gradelib` compatibility + +The `grader_support/__init__.py` injects the framework's Python 3 `gradelib` and +`graderutil` modules into `sys.modules` before any grader file is imported. This +means grader scripts that do `from gradelib import *` receive the framework version +automatically, even if a legacy `gradelib.py` exists elsewhere on disk. Course teams +do not need to ship their own copy of `gradelib.py`. diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py index 750763e..c40385b 100644 --- a/grader_support/entrypoint.py +++ b/grader_support/entrypoint.py @@ -47,6 +47,18 @@ def main(): sys.path.insert(0, "/tmp") sys.path.insert(0, cwd) + # grader_name may be an absolute path (e.g. /graders/ps07/Robot/grade_Robot.py) + # when the containergrader passes the full in-container path. In that case, add + # the grader's directory to sys.path and use only the basename as the module name + # so that __import__() can find it. + if os.path.isabs(grader_name): + grader_dir = os.path.dirname(grader_name) + grader_module = os.path.basename(grader_name) + if grader_dir not in sys.path: + sys.path.insert(0, grader_dir) + else: + grader_module = grader_name + # Rename so the module name matches what run.py expects (submission_name # without the .py extension becomes the importable module name). import shutil @@ -56,7 +68,7 @@ def main(): seed_int = int(seed) output = _run_module.run( - grader_name[:-3] if grader_name.endswith(".py") else grader_name, + grader_module[:-3] if grader_module.endswith(".py") else grader_module, submission_name[:-3] if submission_name.endswith(".py") else submission_name, seed_int, ) diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 1580c89..7a97433 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -142,8 +142,13 @@ def _build_k8s_job(self, job_name, grader_path, code, seed): """Return a kubernetes Job manifest dict for the given grading run.""" from kubernetes import client as k8s_client - grader_rel = str(Path(grader_path).basename()) - grader_dir = str(Path(grader_path).dirname()) + # Pass the absolute in-container path to the grader file. The + # entrypoint handles absolute paths by adding the parent directory to + # sys.path before importing the module, so Python can find the file. + # working_dir must stay at /grader (the WORKDIR of the base image) + # so that `python -m grader_support.entrypoint` can locate the + # grader_support package. + grader_abs = str(grader_path) return k8s_client.V1Job( api_version="batch/v1", @@ -170,8 +175,8 @@ def _build_k8s_job(self, job_name, grader_path, code, seed): k8s_client.V1Container( name="grader", image=self.image, - args=[grader_rel, "submission.py", str(seed)], - working_dir=grader_dir, + args=[grader_abs, "submission.py", str(seed)], + working_dir="/grader", env=[ k8s_client.V1EnvVar( name="SUBMISSION_CODE", @@ -238,15 +243,20 @@ def _run_docker(self, grader_path, code, seed): grader_dir = str(Path(grader_path).dirname().absolute()) grader_rel = str(Path(grader_path).basename()) + # Mount the problem directory at /graders/ (not /grader/ which would + # overwrite the base image's grader_support package). Pass the grader + # as an absolute in-container path so the entrypoint can add its parent + # directory to sys.path before importing. + container_grader_path = f"/graders/{grader_rel}" client = docker_sdk.from_env() try: result = client.containers.run( image=self.image, - command=[grader_rel, "submission.py", str(seed)], + command=[container_grader_path, "submission.py", str(seed)], working_dir="/grader", environment={"SUBMISSION_CODE": code}, - volumes={grader_dir: {"bind": "/grader", "mode": "ro"}}, + volumes={grader_dir: {"bind": "/graders", "mode": "ro"}}, mem_limit=self.memory_limit, nano_cpus=int(_parse_cpu_millis(self.cpu_limit) * 1_000_000), network_disabled=True, From 0be09261eb73c7b8b3aed407a02bedab601e6229 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 12:47:53 -0400 Subject: [PATCH 07/25] fix(tests): skip jailed grader tests when codejail is not installed codejail is an optional dependency (not installed in CI). Guard the import with a try/except and apply @pytest.mark.skipif to the test class so collection succeeds and tests are skipped gracefully when codejail is absent. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/test_jailed_grader.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_jailed_grader.py b/tests/test_jailed_grader.py index e4ade77..c1b25fd 100644 --- a/tests/test_jailed_grader.py +++ b/tests/test_jailed_grader.py @@ -5,10 +5,18 @@ import unittest from path import Path +import pytest + +try: + from codejail.jail_code import configure + HAS_CODEJAIL = True +except ImportError: + HAS_CODEJAIL = False + from xqueue_watcher.jailedgrader import JailedGrader -from codejail.jail_code import configure +@pytest.mark.skipif(not HAS_CODEJAIL, reason="codejail not installed") class JailedGraderTests(unittest.TestCase): def setUp(self): configure("python", sys.executable, user=getpass.getuser()) From 3f7944d1ff8664c31f6b2bc183d130f6fa41644b Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 12:54:52 -0400 Subject: [PATCH 08/25] fix: address PR review feedback - Dockerfile: replace deleted requirements/ pip install with uv sync (copies uv binary from ghcr.io/astral-sh/uv and uses uv sync --frozen) - grader.py: guard against path traversal in grader_config['grader']; validate that the resolved grader path stays within grader_root - containergrader.py: fix Docker SDK TypeError - containers.run() does not accept a timeout kwarg; switch to detach=True + container.wait() to enforce the timeout, then collect logs and remove the container - containergrader.py: remove brittle hardcoded line numbers (L364, L379, L397, L450) from user-facing error messages - docker-compose.yml: change conf.d and data volumes from :ro to :rw so local edits take effect without rebuild (matches comment intent) - upgrade-python-requirements.yml: add explicit permissions block (contents: write, pull-requests: write) as required by security policy - Automated code Graders With xqueue-watcher.md: remove empty heading, add 'Property' header to comparison table Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../workflows/upgrade-python-requirements.yml | 3 +++ Automated code Graders With xqueue-watcher.md | 4 +-- Dockerfile | 15 ++++++----- docker-compose.yml | 4 +-- xqueue_watcher/containergrader.py | 25 +++++++++++++------ xqueue_watcher/grader.py | 12 +++++++++ 6 files changed, 45 insertions(+), 18 deletions(-) diff --git a/.github/workflows/upgrade-python-requirements.yml b/.github/workflows/upgrade-python-requirements.yml index 845c53a..6d1a48a 100644 --- a/.github/workflows/upgrade-python-requirements.yml +++ b/.github/workflows/upgrade-python-requirements.yml @@ -8,6 +8,9 @@ on: jobs: update-dependencies: runs-on: ubuntu-24.04 + permissions: + contents: write + pull-requests: write steps: - uses: actions/checkout@v4 diff --git a/Automated code Graders With xqueue-watcher.md b/Automated code Graders With xqueue-watcher.md index c2f3997..af0baf1 100755 --- a/Automated code Graders With xqueue-watcher.md +++ b/Automated code Graders With xqueue-watcher.md @@ -55,11 +55,9 @@ The infrastructure costs for the current installation of xqueue-watchers is appr The total estimated cost per learner is \~$5/learner/month. This is based on the above costs and an enrollment rate of \~3,700 verified learners per course term. -## - ## Comparison To Alternatives -| | Xqueue/Xqueue-Watcher | Cod.io | +| Property | Xqueue/Xqueue-Watcher | Cod.io | | :---- | :---- | :---- | | Cost | \~$5/student/month | \~$12/student/month minimum | | Compatibility | Open edX only | Open edX, Canvas, any LMS that supports LTI | diff --git a/Dockerfile b/Dockerfile index d199d17..0cdfb01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,19 +12,22 @@ RUN apt-get update && \ RUN useradd -m --shell /bin/false app +COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv + WORKDIR /edx/app/xqueue_watcher -COPY requirements /edx/app/xqueue_watcher/requirements -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r requirements/production.txt + +COPY pyproject.toml uv.lock ./ +RUN uv sync --frozen --no-dev --no-install-project COPY . /edx/app/xqueue_watcher +RUN uv sync --frozen --no-dev USER app -CMD ["python", "-m", "xqueue_watcher", "-d", "/edx/etc/xqueue_watcher"] +CMD ["xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] FROM base AS edx.org USER root -RUN pip install --no-cache-dir newrelic +RUN uv sync --frozen --extra production USER app -CMD ["newrelic-admin", "run-program", "python", "-m", "xqueue_watcher", "-d", "/edx/etc/xqueue_watcher"] +CMD ["newrelic-admin", "run-program", "xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] diff --git a/docker-compose.yml b/docker-compose.yml index d2cb78c..cb46851 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -24,9 +24,9 @@ services: condition: service_healthy volumes: # Mount the local conf.d so you can edit queue configs without rebuilding. - - ./conf.d:/etc/xqueue-watcher/conf.d:ro + - ./conf.d:/etc/xqueue-watcher/conf.d:rw # Mount local grader scripts for rapid iteration. - - ./data:/graders:ro + - ./data:/graders:rw environment: # Use docker backend so grading containers run locally via the Docker socket. GRADER_BACKEND: docker diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 7a97433..762d4a3 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -251,7 +251,10 @@ def _run_docker(self, grader_path, code, seed): client = docker_sdk.from_env() try: - result = client.containers.run( + # Run detached so we can enforce a wall-clock timeout via container.wait(). + # containers.run() does not accept a timeout argument; using detach=True + # lets us call container.wait(timeout=...) to cap execution time. + container = client.containers.run( image=self.image, command=[container_grader_path, "submission.py", str(seed)], working_dir="/grader", @@ -261,11 +264,19 @@ def _run_docker(self, grader_path, code, seed): nano_cpus=int(_parse_cpu_millis(self.cpu_limit) * 1_000_000), network_disabled=True, read_only=True, - remove=True, + detach=True, stdout=True, stderr=False, - timeout=self.timeout, ) + try: + exit_info = container.wait(timeout=self.timeout) + if exit_info.get("StatusCode", 0) != 0: + raise RuntimeError( + f"Grading container exited with non-zero status: {exit_info}" + ) + result = container.logs(stdout=True, stderr=False) + finally: + container.remove(force=True) except docker_sdk.errors.ContainerError as exc: raise RuntimeError( f"Grading container exited with error: {exc}" @@ -347,7 +358,7 @@ def grade(self, grader_path, grader_config, submission): if not expected_ok: results["errors"].append( - "There was a problem running the staff solution (Staff debug: L364)" + "There was a problem running the staff solution (Staff debug)." ) self.log.error( "Couldn't run staff solution. grader = %s, output: %r", @@ -368,7 +379,7 @@ def grade(self, grader_path, grader_config, submission): actual_ok = True else: results["errors"].append( - "There was a problem running your solution (Staff debug: L379)." + "There was a problem running your solution (Staff debug)." ) except Exception: actual_exc = sys.exc_info() @@ -384,7 +395,7 @@ def grade(self, grader_path, grader_config, submission): if not actual_ok: results["errors"].append( - "We couldn't run your solution (Staff debug: L397)." + "We couldn't run your solution (Staff debug)." ) self.log.error( "Couldn't run student solution. grader = %s, output: %r", @@ -433,7 +444,7 @@ def grade(self, grader_path, grader_config, submission): if n == 0 and not results["errors"]: results["errors"] = [ - "There was a problem while running your code (Staff debug: L450). " + "There was a problem while running your code (Staff debug). " "Please contact the course staff for assistance." ] diff --git a/xqueue_watcher/grader.py b/xqueue_watcher/grader.py index db69a09..e8eca85 100644 --- a/xqueue_watcher/grader.py +++ b/xqueue_watcher/grader.py @@ -131,6 +131,18 @@ def process_item(self, content, queue=None): self.log.debug(f"Processing submission, grader payload: {payload}") relative_grader_path = grader_config['grader'] grader_path = (self.grader_root / relative_grader_path).absolute() + # Guard against path traversal: ensure the resolved path stays + # within grader_root. Convert to pathlib for relative_to(). + import pathlib + try: + pathlib.Path(str(grader_path)).relative_to( + pathlib.Path(str(self.grader_root)).resolve() + ) + except ValueError as exc: + raise ValueError( + f"Grader path {relative_grader_path!r} resolves outside " + f"grader_root {self.grader_root!r}" + ) from exc start = time.time() results = self.grade(grader_path, grader_config, student_response) From 1491b294675cbdc984ed79508d6571568ed8354d Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 12:59:55 -0400 Subject: [PATCH 09/25] refactor: replace path-py with stdlib pathlib MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit path-py is an external dependency that wraps pathlib with a fluent API. Since we now require Python >= 3.11, pathlib covers all the same functionality without an extra dependency. Changes: - Replace 'from path import Path' with 'from pathlib import Path' in all source and test files - .dirname() → .parent - .basename() → .name - .absolute() / .absolute() → .resolve() (symlink-safe) - .files('*.json') → .glob('*.json') (with sorted() for stable ordering) - Remove path-py (path-py / path) from pyproject.toml dependencies - Regenerate uv.lock (removes path==17.1.1 and path-py==12.5.0) - Simplify grader.py path-traversal check: now that grader_path is a native pathlib.Path, the inline 'import pathlib' is no longer needed - Fix test_grader.py mock: grader_path.endswith() → grader_path.name == - Fix test_manager.py: pass str() to argparse (Path is not subscriptable) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- load_test/run.py | 4 ++-- pyproject.toml | 1 - tests/test_grader.py | 8 ++++---- tests/test_jailed_grader.py | 4 ++-- tests/test_manager.py | 6 +++--- uv.lock | 23 ----------------------- xqueue_watcher/containergrader.py | 8 ++++---- xqueue_watcher/grader.py | 12 ++++-------- xqueue_watcher/jailedgrader.py | 12 ++++++------ xqueue_watcher/manager.py | 4 ++-- 10 files changed, 27 insertions(+), 55 deletions(-) diff --git a/load_test/run.py b/load_test/run.py index dc10d98..1eb6027 100644 --- a/load_test/run.py +++ b/load_test/run.py @@ -7,7 +7,7 @@ import json import tempfile import getpass -from path import Path +from pathlib import Path import pprint import argparse @@ -20,7 +20,7 @@ { "HANDLER": "xqueue_watcher.jailedgrader.JailedGrader", "KWARGS": { - "grader_root": Path(__file__).dirname() / "../../data/6.00x/graders/", + "grader_root": Path(__file__).parent / "../../data/6.00x/graders/", } } ] diff --git a/pyproject.toml b/pyproject.toml index c5aea90..8fde6bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ dependencies = [ "dogstatsd-python", "docker>=7.0.0", "kubernetes>=29.0.0", - "path-py", "requests", ] diff --git a/tests/test_grader.py b/tests/test_grader.py index 9093983..97662f1 100644 --- a/tests/test_grader.py +++ b/tests/test_grader.py @@ -2,12 +2,12 @@ from unittest import mock import json import sys -from path import Path +from pathlib import Path from queue import Queue from xqueue_watcher import grader -MYDIR = Path(__file__).dirname() / 'fixtures' +MYDIR = Path(__file__).parent / 'fixtures' class MockGrader(grader.Grader): @@ -16,12 +16,12 @@ def grade(self, grader_path, grader_config, student_response): errors = [] correct = 0 score = 0 - if grader_path.endswith('/correct'): + if grader_path.name == 'correct': correct = 1 score = 1 tests.append(('short', 'long', True, 'expected', 'actual')) tests.append(('short', '', True, 'expected', 'actual')) - elif grader_path.endswith('/incorrect'): + elif grader_path.name == 'incorrect': tests.append(('short', 'long', False, 'expected', 'actual')) errors.append('THIS IS AN ERROR') errors.append('\x00\xc3\x83\xc3\xb8\x02') diff --git a/tests/test_jailed_grader.py b/tests/test_jailed_grader.py index c1b25fd..1c989cf 100644 --- a/tests/test_jailed_grader.py +++ b/tests/test_jailed_grader.py @@ -3,7 +3,7 @@ import sys import textwrap import unittest -from path import Path +from pathlib import Path import pytest @@ -29,7 +29,7 @@ def setUp(self): user=getpass.getuser(), ) break - self.grader_root = Path(__file__).dirname() / 'fixtures' + self.grader_root = Path(__file__).parent / 'fixtures' self.g = JailedGrader(grader_root=self.grader_root) self.g3 = JailedGrader(grader_root=self.grader_root, codejail_python='python3') diff --git a/tests/test_manager.py b/tests/test_manager.py index fa407a6..4ff5064 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -1,5 +1,5 @@ import unittest -from path import Path +from pathlib import Path import json from unittest.mock import Mock import time @@ -179,6 +179,6 @@ def test_main_with_errors(self): self.assertIn('required', err_msg) sys.stderr = stderr - mydir = Path(__file__).dirname() - args = ['-d', mydir / "fixtures/config"] + mydir = Path(__file__).parent + args = ['-d', str(mydir / "fixtures/config")] self.assertEqual(manager.main(args), 0) diff --git a/uv.lock b/uv.lock index 7604c1b..21c49f0 100644 --- a/uv.lock +++ b/uv.lock @@ -315,27 +315,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] -[[package]] -name = "path" -version = "17.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dd/52/a7bdd5ef8488977d354b7915d1e75009bebbd04f73eff14e52372d5e9435/path-17.1.1.tar.gz", hash = "sha256:2dfcbfec8b4d960f3469c52acf133113c2a8bf12ac7b98d629fa91af87248d42", size = 50528, upload-time = "2025-07-27T20:40:23.79Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/50/11c9ee1ede64b45d687fd36eb8768dafc57afc78b4d83396920cfd69ed30/path-17.1.1-py3-none-any.whl", hash = "sha256:ec7e136df29172e5030dd07e037d55f676bdb29d15bfa09b80da29d07d3b9303", size = 23936, upload-time = "2025-07-27T20:40:22.453Z" }, -] - -[[package]] -name = "path-py" -version = "12.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "path" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b6/e3/81be70016d58ade0f516191fa80152daba5453d0b07ce648d9daae86a188/path.py-12.5.0.tar.gz", hash = "sha256:8d885e8b2497aed005703d94e0fd97943401f035e42a136810308bff034529a8", size = 15713, upload-time = "2020-07-27T23:53:38.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/04/130b7a538c25693c85c4dee7e25d126ebf5511b1eb7320e64906687b159e/path.py-12.5.0-py3-none-any.whl", hash = "sha256:a43e82eb2c344c3fd0b9d6352f6b856f40b8b7d3d65cc05978b42c3715668496", size = 2251, upload-time = "2020-07-27T23:53:37.293Z" }, -] - [[package]] name = "pluggy" version = "1.6.0" @@ -587,7 +566,6 @@ dependencies = [ { name = "docker" }, { name = "dogstatsd-python" }, { name = "kubernetes" }, - { name = "path-py" }, { name = "requests" }, ] @@ -609,7 +587,6 @@ requires-dist = [ { name = "dogstatsd-python" }, { name = "kubernetes", specifier = ">=29.0.0" }, { name = "newrelic", marker = "extra == 'production'" }, - { name = "path-py" }, { name = "requests" }, ] provides-extras = ["production"] diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 762d4a3..d175b52 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -17,7 +17,7 @@ import tempfile import time import uuid -from path import Path +from pathlib import Path from .grader import Grader from grader_support.gradelib import EndTest @@ -241,8 +241,8 @@ def _run_docker(self, grader_path, code, seed): "Install it with: uv add docker" ) - grader_dir = str(Path(grader_path).dirname().absolute()) - grader_rel = str(Path(grader_path).basename()) + grader_dir = str(Path(grader_path).parent.resolve()) + grader_rel = str(Path(grader_path).name) # Mount the problem directory at /graders/ (not /grader/ which would # overwrite the base image's grader_support package). Pass the grader # as an absolute in-container path so the entrypoint can add its parent @@ -327,7 +327,7 @@ def grade(self, grader_path, grader_config, submission): results["errors"].extend(errors) return results - answer_path = Path(grader_path).dirname() / "answer.py" + answer_path = Path(grader_path).parent / "answer.py" with open(answer_path, "rb") as f: answer = f.read().decode("utf-8") diff --git a/xqueue_watcher/grader.py b/xqueue_watcher/grader.py index e8eca85..41bf488 100644 --- a/xqueue_watcher/grader.py +++ b/xqueue_watcher/grader.py @@ -5,7 +5,7 @@ import sys import time import json -from path import Path +from pathlib import Path import logging import multiprocessing from statsd import statsd @@ -130,14 +130,10 @@ def process_item(self, content, queue=None): self.log.debug(f"Processing submission, grader payload: {payload}") relative_grader_path = grader_config['grader'] - grader_path = (self.grader_root / relative_grader_path).absolute() - # Guard against path traversal: ensure the resolved path stays - # within grader_root. Convert to pathlib for relative_to(). - import pathlib + grader_path = (self.grader_root / relative_grader_path).resolve() + # Guard against path traversal: ensure the resolved path stays within grader_root. try: - pathlib.Path(str(grader_path)).relative_to( - pathlib.Path(str(self.grader_root)).resolve() - ) + grader_path.relative_to(self.grader_root.resolve()) except ValueError as exc: raise ValueError( f"Grader path {relative_grader_path!r} resolves outside " diff --git a/xqueue_watcher/jailedgrader.py b/xqueue_watcher/jailedgrader.py index 56bf8c4..c470a08 100644 --- a/xqueue_watcher/jailedgrader.py +++ b/xqueue_watcher/jailedgrader.py @@ -11,7 +11,7 @@ import json import random import gettext -from path import Path +from pathlib import Path try: import codejail @@ -28,7 +28,7 @@ TIMEOUT = 1 SUPPORT_FILES = [ - Path(grader_support.__file__).dirname(), + Path(grader_support.__file__).parent, ] @@ -82,7 +82,7 @@ def _run(self, grader_path, thecode, seed): if self.locale_dir.exists(): files.append(self.locale_dir) extra_files = [('submission.py', thecode.encode('utf-8'))] - argv = ["-B", "-m", "grader_support.run", Path(grader_path).basename(), 'submission.py', seed] + argv = ["-B", "-m", "grader_support.run", Path(grader_path).name, 'submission.py', seed] r = codejail.jail_code.jail_code(self.codejail_python, files=files, extra_files=extra_files, argv=argv) return r @@ -116,7 +116,7 @@ def grade(self, grader_path, grader_config, submission): self._enable_i18n(grader_config.get("lang", LANGUAGE)) - answer_path = Path(grader_path).dirname() / 'answer.py' + answer_path = Path(grader_path).parent / 'answer.py' with open(answer_path, 'rb') as f: answer = f.read().decode('utf-8') @@ -280,8 +280,8 @@ def main(args): # pragma: no cover submission = f.read().decode('utf-8') grader_config = {"lang": "eo"} - grader_path = Path(grader_path).absolute() - g = JailedGrader(grader_root=grader_path.dirname().parent.parent) + grader_path = Path(grader_path).resolve() + g = JailedGrader(grader_root=grader_path.parent.parent.parent) pprint(g.grade(grader_path, grader_config, submission)) diff --git a/xqueue_watcher/manager.py b/xqueue_watcher/manager.py index 1652e42..01c5afa 100644 --- a/xqueue_watcher/manager.py +++ b/xqueue_watcher/manager.py @@ -5,7 +5,7 @@ import json import logging import logging.config -from path import Path +from pathlib import Path import signal import sys import time @@ -97,7 +97,7 @@ def configure_from_directory(self, directory): self.manager_config = get_manager_config_values(app_config_path) confd = directory / 'conf.d' - for watcher in confd.files('*.json'): + for watcher in sorted(confd.glob('*.json')): with open(watcher) as queue_config: self.configure(json.load(queue_config)) From cbdcf1545a87e0abe1516f7f402c6a7a2f698f25 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 13:09:18 -0400 Subject: [PATCH 10/25] feat: add edx-codejail as optional dependency; document container isolation decision Add edx-codejail (the upstream PyPI package, v4.1.0) as an optional 'codejail' extra, replacing the previously pinned git-URL reference to a specific commit. uv add --optional codejail edx-codejail codejail is intentionally excluded from the base Docker image because ContainerGrader uses container-level isolation (Linux namespaces, cgroups, capability dropping, network isolation, read-only filesystem) which provides equivalent sandboxing to AppArmor without requiring host-level AppArmor configuration that is unavailable inside Kubernetes pods. Install the 'codejail' extra only when using the legacy JailedGrader on a bare-metal or VM host with AppArmor configured. To use: uv sync --extra codejail Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 9 +++++++++ pyproject.toml | 3 +++ uv.lock | 18 +++++++++++++++++- 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 0cdfb01..fb3da8d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,6 +21,15 @@ RUN uv sync --frozen --no-dev --no-install-project COPY . /edx/app/xqueue_watcher RUN uv sync --frozen --no-dev +# Note: the `codejail` optional extra (edx-codejail) is intentionally omitted +# from this image. In the Kubernetes deployment, student code runs inside an +# isolated container (ContainerGrader) — the container boundary provides the +# sandbox via Linux namespaces, cgroups, capability dropping, network isolation, +# and a read-only filesystem. codejail (AppArmor + OS-level user-switching) +# requires host-level AppArmor configuration that is unavailable inside +# Kubernetes pods and adds no meaningful security benefit on top of container +# isolation. Install the `codejail` extra only when running the legacy +# JailedGrader on a bare-metal or VM host with AppArmor configured. USER app diff --git a/pyproject.toml b/pyproject.toml index 8fde6bd..316adce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,9 @@ dependencies = [ ] [project.optional-dependencies] +codejail = [ + "edx-codejail", +] production = [ "newrelic", ] diff --git a/uv.lock b/uv.lock index 21c49f0..40b77eb 100644 --- a/uv.lock +++ b/uv.lock @@ -226,6 +226,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] +[[package]] +name = "edx-codejail" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/4b/d94b6f4c3b8ac1ddb9a6badd4bd03d8337265ef207406dffc91f671db695/edx_codejail-4.1.0.tar.gz", hash = "sha256:fdccde57a2dc8c81ebf80c4f9d317cbf1ae2f68c7f598c2f17289b85b7c0ccdb", size = 29888, upload-time = "2025-11-07T15:30:16.156Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/82/8077069f7161d70257abd40ff9eddcbf3c1e4fbe66e23c933bf9a3c6ccb0/edx_codejail-4.1.0-py3-none-any.whl", hash = "sha256:0b2779131136117929bb8e58c302062dc3e98ee6526f3eb6936b1738daa7a14c", size = 25463, upload-time = "2025-11-07T15:30:14.747Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -570,6 +582,9 @@ dependencies = [ ] [package.optional-dependencies] +codejail = [ + { name = "edx-codejail" }, +] production = [ { name = "newrelic" }, ] @@ -585,11 +600,12 @@ dev = [ requires-dist = [ { name = "docker", specifier = ">=7.0.0" }, { name = "dogstatsd-python" }, + { name = "edx-codejail", marker = "extra == 'codejail'" }, { name = "kubernetes", specifier = ">=29.0.0" }, { name = "newrelic", marker = "extra == 'production'" }, { name = "requests" }, ] -provides-extras = ["production"] +provides-extras = ["codejail", "production"] [package.metadata.requires-dev] dev = [ From 193c04380b94f401262a4deaedf4252bd6c11f88 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 13:22:57 -0400 Subject: [PATCH 11/25] fix: address second round of PR review feedback - Makefile: fix tab indentation on all recipe lines (was space-indented) - grader.py: remove unused sys import - jailedgrader.py: replace deprecated load_module() with spec_from_file_location/exec_module - containergrader.py: - remove unused imports (logging, os, tempfile) and _JOB_LABEL constant - add emptyDir volume at /tmp in K8s Job spec (required when read_only_root_filesystem=True) - add clarifying comment that K8s grader scripts are baked into the course image - replace deprecated load_module() with importlib.util spec/exec_module pattern - capture stderr from Docker container on non-zero exit for better diagnostics - grader_support/entrypoint.py: correct misleading comment about /tmp writability - deploy/kubernetes/deployment.yaml: fix command to use xqueue-watcher entry point - deploy/kubernetes/configmap.yaml: add xqueue-watcher-queue-configs ConfigMap so manifests apply cleanly out of the box - docker-compose.yml: mount Docker socket for docker backend to work - conf.d/600.json: use absolute /graders/ path instead of relative ../data path - Dockerfile: use C.UTF-8 locale (available without installing locales package) - pyproject.toml: add edx-codejail to dev group so jailed grader tests run in CI Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 5 ++-- Makefile | 12 +++++----- conf.d/600.json | 2 +- deploy/kubernetes/configmap.yaml | 38 +++++++++++++++++++++++++++++++ deploy/kubernetes/deployment.yaml | 4 +++- docker-compose.yml | 2 ++ grader_support/entrypoint.py | 6 +++-- pyproject.toml | 1 + uv.lock | 2 ++ xqueue_watcher/containergrader.py | 35 ++++++++++++++++++++-------- xqueue_watcher/grader.py | 1 - xqueue_watcher/jailedgrader.py | 6 +++-- 12 files changed, 89 insertions(+), 25 deletions(-) diff --git a/Dockerfile b/Dockerfile index fb3da8d..b6243a4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,9 +2,8 @@ FROM python:3.11-slim AS base ENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ - LANG=en_US.UTF-8 \ - LANGUAGE=en_US:en \ - LC_ALL=en_US.UTF-8 + LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 RUN apt-get update && \ apt-get install -y --no-install-recommends git-core && \ diff --git a/Makefile b/Makefile index 9c4e7f7..b124348 100644 --- a/Makefile +++ b/Makefile @@ -11,19 +11,19 @@ help: @echo '' requirements: -uv sync + uv sync test: requirements -uv run pytest --cov=xqueue_watcher --cov-report=xml tests + uv run pytest --cov=xqueue_watcher --cov-report=xml tests docker-build: -docker build -t xqueue-watcher:local . -docker build -t grader-base:local -f grader_support/Dockerfile.base . + docker build -t xqueue-watcher:local . + docker build -t grader-base:local -f grader_support/Dockerfile.base . local-run: -docker compose up + docker compose up clean: -find . -name '*.pyc' -delete + find . -name '*.pyc' -delete .PHONY: help requirements test docker-build local-run clean diff --git a/conf.d/600.json b/conf.d/600.json index 8f0632e..3052446 100644 --- a/conf.d/600.json +++ b/conf.d/600.json @@ -7,7 +7,7 @@ { "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", "KWARGS": { - "grader_root": "../data/6.00x/graders/", + "grader_root": "/graders/", "image": "grader-base:local", "backend": "docker", "cpu_limit": "500m", diff --git a/deploy/kubernetes/configmap.yaml b/deploy/kubernetes/configmap.yaml index 5933eda..b910827 100644 --- a/deploy/kubernetes/configmap.yaml +++ b/deploy/kubernetes/configmap.yaml @@ -63,3 +63,41 @@ data: ] } } +--- +# Queue-specific configurations: one JSON file per course queue. +# Operators replace or extend this with real queue names, server URLs, +# and grader images. AUTH credentials should be injected from a Secret +# (e.g., via Vault Secrets Operator) rather than stored in this ConfigMap. +apiVersion: v1 +kind: ConfigMap +metadata: + name: xqueue-watcher-queue-configs + namespace: xqueue-watcher + labels: + app.kubernetes.io/name: xqueue-watcher + app.kubernetes.io/component: queue-config +data: + # Replace with your actual queue configs. Each key becomes a file in + # /etc/xqueue-watcher/conf.d/ and must end in .json to be picked up. + example-queue.json: | + { + "my-course-queue": { + "SERVER": "http://xqueue:18040", + "CONNECTIONS": 2, + "AUTH": ["xqueue_user", "xqueue_pass"], + "HANDLERS": [ + { + "HANDLER": "xqueue_watcher.containergrader.ContainerGrader", + "KWARGS": { + "grader_root": "/graders/my-course/", + "image": "registry.example.com/my-course-grader:latest", + "backend": "kubernetes", + "namespace": "xqueue-watcher", + "cpu_limit": "500m", + "memory_limit": "256Mi", + "timeout": 20 + } + } + ] + } + } diff --git a/deploy/kubernetes/deployment.yaml b/deploy/kubernetes/deployment.yaml index e05854e..0159455 100644 --- a/deploy/kubernetes/deployment.yaml +++ b/deploy/kubernetes/deployment.yaml @@ -35,7 +35,7 @@ spec: - name: xqueue-watcher image: registry.example.com/xqueue-watcher:latest imagePullPolicy: Always - command: ["python", "-m", "xqueue_watcher", "-d", "/etc/xqueue-watcher"] + command: ["xqueue-watcher", "-d", "/etc/xqueue-watcher"] env: # NEW_RELIC_LICENSE_KEY injected from secret if using newrelic extra @@ -91,6 +91,8 @@ spec: - key: logging.json path: logging.json - name: queue-configs + # Queue-specific configs live in a separate ConfigMap so course teams + # can update them independently of the main watcher config. # Replace with a Secret if the configs contain credentials. # In practice, AUTH credentials should come from a Secret and be # mounted separately or passed as environment variables. diff --git a/docker-compose.yml b/docker-compose.yml index cb46851..23379f0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,6 +32,8 @@ services: GRADER_BACKEND: docker # Give xqueue-watcher access to the Docker socket so it can spawn grader containers. # Remove this if you don't need the docker backend locally. + volumes: + - /var/run/docker.sock:/var/run/docker.sock extra_hosts: - "host.docker.internal:host-gateway" command: python -m xqueue_watcher -d /etc/xqueue-watcher diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py index c40385b..94cb519 100644 --- a/grader_support/entrypoint.py +++ b/grader_support/entrypoint.py @@ -28,8 +28,10 @@ def main(): grader_name, submission_name, seed = sys.argv[1], sys.argv[2], sys.argv[3] code = os.environ.get("SUBMISSION_CODE", "") - # Write the submission to a temp file in /tmp (always writable even with - # read_only_root_filesystem=true, because /tmp is a separate tmpfs). + # Write the submission to /tmp, which must be writable. When running under + # Kubernetes with read_only_root_filesystem=True, ensure an emptyDir volume + # is mounted at /tmp in the Job spec — the root filesystem is read-only but + # emptyDir mounts are not. For Docker (local dev), /tmp is writable by default. with tempfile.NamedTemporaryFile( mode="w", suffix=".py", diff --git a/pyproject.toml b/pyproject.toml index 316adce..5fa012a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ xqueue-watcher = "xqueue_watcher.manager:main" [dependency-groups] dev = [ "coverage", + "edx-codejail", "mock", "pytest-cov", ] diff --git a/uv.lock b/uv.lock index 40b77eb..43fa4e7 100644 --- a/uv.lock +++ b/uv.lock @@ -592,6 +592,7 @@ production = [ [package.dev-dependencies] dev = [ { name = "coverage" }, + { name = "edx-codejail" }, { name = "mock" }, { name = "pytest-cov" }, ] @@ -610,6 +611,7 @@ provides-extras = ["codejail", "production"] [package.metadata.requires-dev] dev = [ { name = "coverage" }, + { name = "edx-codejail" }, { name = "mock" }, { name = "pytest-cov" }, ] diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index d175b52..fa7e198 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -10,11 +10,9 @@ """ import importlib +import importlib.util import json -import logging -import os import random -import tempfile import time import uuid from pathlib import Path @@ -28,9 +26,6 @@ _BACKEND_DOCKER = "docker" _SUPPORTED_BACKENDS = (_BACKEND_KUBERNETES, _BACKEND_DOCKER) -# Label applied to all grading Jobs so they can be bulk-cleaned up if needed. -_JOB_LABEL = "app.kubernetes.io/component=xqueue-grader" - def _truncate(out): TOO_LONG = 5000 @@ -171,6 +166,10 @@ def _build_k8s_job(self, job_name, grader_path, code, seed): run_as_non_root=True, run_as_user=1000, ), + # Grader scripts are baked into the course-specific image + # (no volume mount required). The image extends + # grader_support/Dockerfile.base and includes the grader + # files at the path referenced by grader_abs. containers=[ k8s_client.V1Container( name="grader", @@ -198,8 +197,23 @@ def _build_k8s_job(self, job_name, grader_path, code, seed): read_only_root_filesystem=True, capabilities=k8s_client.V1Capabilities(drop=["ALL"]), ), + volume_mounts=[ + k8s_client.V1VolumeMount( + name="tmp", + mount_path="/tmp", + ), + ], ) ], + volumes=[ + # emptyDir at /tmp is required because read_only_root_filesystem=True + # prevents writes to the root FS; the entrypoint writes the student + # submission to /tmp/submission.py before executing it. + k8s_client.V1Volume( + name="tmp", + empty_dir=k8s_client.V1EmptyDirVolumeSource(), + ), + ], ) ), ), @@ -271,8 +285,10 @@ def _run_docker(self, grader_path, code, seed): try: exit_info = container.wait(timeout=self.timeout) if exit_info.get("StatusCode", 0) != 0: + stderr = container.logs(stdout=False, stderr=True) raise RuntimeError( - f"Grading container exited with non-zero status: {exit_info}" + f"Grading container exited with non-zero status: {exit_info}. " + f"stderr: {stderr[:2000] if stderr else ''}" ) result = container.logs(stdout=True, stderr=False) finally: @@ -318,8 +334,9 @@ def grade(self, grader_path, grader_config, submission): # Load the grader module to access its test definitions and preprocessors. # The module runs outside the sandbox (trusted code). - sf_loader = importlib.machinery.SourceFileLoader("grader_module", str(grader_path)) - grader_module = sf_loader.load_module() + spec = importlib.util.spec_from_file_location("grader_module", str(grader_path)) + grader_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(grader_module) grader = grader_module.grader errors = grader.input_errors(submission) diff --git a/xqueue_watcher/grader.py b/xqueue_watcher/grader.py index 41bf488..48035b1 100644 --- a/xqueue_watcher/grader.py +++ b/xqueue_watcher/grader.py @@ -2,7 +2,6 @@ Implementation of a grader compatible with XServer """ import html -import sys import time import json from pathlib import Path diff --git a/xqueue_watcher/jailedgrader.py b/xqueue_watcher/jailedgrader.py index c470a08..dc3f130 100644 --- a/xqueue_watcher/jailedgrader.py +++ b/xqueue_watcher/jailedgrader.py @@ -8,6 +8,7 @@ import os import sys import importlib +import importlib.util import json import random import gettext @@ -123,8 +124,9 @@ def grade(self, grader_path, grader_config, submission): # Import the grader, straight from the original file. (It probably isn't in # sys.path, and we may be in a long running gunicorn process, so we don't # want to add stuff to sys.path either.) - sf_loader = importlib.machinery.SourceFileLoader("grader_module", str(grader_path)) - grader_module = sf_loader.load_module() + spec = importlib.util.spec_from_file_location("grader_module", str(grader_path)) + grader_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(grader_module) grader = grader_module.grader # Preprocess for grader-specified errors From d28da94f1f01ea489aa00699fc58cdaf1107130d Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 11 Mar 2026 16:25:08 -0400 Subject: [PATCH 12/25] refactor: move full grading pipeline into container; add ContainerGrader unit tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Architecture change: grader scripts are baked into the course-specific Docker image, so the watcher pod has no need to access grader files locally. The grader_support entrypoint now runs the complete grading pipeline inside the container (load grader, preprocess, run answer + submission, compare, return JSON grade), and ContainerGrader.grade() is simplified to just launch the container and parse its JSON output. Changes: - grader_support/entrypoint.py: complete rewrite; now takes GRADER_FILE SEED (not GRADER_FILE submission.py SEED); runs full grade pipeline in container; reads GRADER_LANGUAGE and HIDE_OUTPUT env vars from ContainerGrader - xqueue_watcher/containergrader.py: - Remove grader-module loading, gettext, answer.py reading, and all test- comparison logic from grade() — the container handles this now - grade() now just calls _run() and parses the returned JSON - _run() accepts grader_config and forwards lang/hide_output as env vars - _build_k8s_job(): args are now [grader_abs, seed] (not 3 args), adds GRADER_LANGUAGE and HIDE_OUTPUT env vars, still mounts emptyDir at /tmp - _run_docker(): same arg change; passes GRADER_LANGUAGE and HIDE_OUTPUT - ReadTimeout from container.wait() caught and re-raised as clear RuntimeError - Remove unused _truncate, _prepend_coding, importlib.util - tests/test_container_grader.py: 36 new unit tests covering: - _parse_cpu_millis - ContainerGrader init / backend validation - _build_k8s_job: args, env vars, resource limits, emptyDir/tmp, security - _run_docker: success, non-zero exit (with stderr), timeout, missing SDK - grade(): skip_grader, successful result, container failure, size warning Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- grader_support/entrypoint.py | 202 ++++++++++++++------ tests/test_container_grader.py | 298 ++++++++++++++++++++++++++++++ xqueue_watcher/containergrader.py | 273 ++++++++++----------------- 3 files changed, 536 insertions(+), 237 deletions(-) create mode 100644 tests/test_container_grader.py diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py index 94cb519..926b074 100644 --- a/grader_support/entrypoint.py +++ b/grader_support/entrypoint.py @@ -1,83 +1,165 @@ """ -Entrypoint wrapper for running grader_support.run inside a container. +Entrypoint for running the complete grading pipeline inside a container. -Reads the student submission from the SUBMISSION_CODE environment variable -and writes it to a temporary file before invoking the standard runner. -This avoids having to pass code through argv (length limits) or requiring -a writable submission directory when the root filesystem is read-only. +The grader scripts (grader file, answer.py) are baked into this image. +This module reads SUBMISSION_CODE from the environment, runs both the staff +answer and the student submission through the grader, compares results, and +prints the final grade as JSON to stdout. Usage (set by Dockerfile ENTRYPOINT): - python -m grader_support.entrypoint GRADER_FILE submission.py SEED + python -m grader_support.entrypoint GRADER_FILE SEED """ +import importlib.util +import json import os import sys -import tempfile - -from . import run as _run_module def main(): - if len(sys.argv) != 4: + if len(sys.argv) != 3: print( - "Usage: python -m grader_support.entrypoint GRADER_FILE submission.py SEED", + "Usage: python -m grader_support.entrypoint GRADER_FILE SEED", file=sys.stderr, ) sys.exit(1) - grader_name, submission_name, seed = sys.argv[1], sys.argv[2], sys.argv[3] - code = os.environ.get("SUBMISSION_CODE", "") - - # Write the submission to /tmp, which must be writable. When running under - # Kubernetes with read_only_root_filesystem=True, ensure an emptyDir volume - # is mounted at /tmp in the Job spec — the root filesystem is read-only but - # emptyDir mounts are not. For Docker (local dev), /tmp is writable by default. - with tempfile.NamedTemporaryFile( - mode="w", - suffix=".py", - prefix="submission_", - dir="/tmp", - delete=False, - encoding="utf-8", - ) as f: - f.write(code) - tmp_path = f.name - - # Temporarily add /tmp and the working directory to sys.path so that - # grader_support.run can import the submission and grader by name. - cwd = os.getcwd() + grader_path = sys.argv[1] + seed = int(sys.argv[2]) + submission_code = os.environ.get("SUBMISSION_CODE", "") + + results = {"errors": [], "tests": [], "correct": False, "score": 0} + + # Load the grader module to access test definitions, preprocessors, and + # input validators. The grader script is baked into this image. + spec = importlib.util.spec_from_file_location("grader_module", grader_path) + grader_module_obj = importlib.util.module_from_spec(spec) + spec.loader.exec_module(grader_module_obj) + grader = grader_module_obj.grader + + # Validate submission format before doing any work. + errors = grader.input_errors(submission_code) + if errors: + results["errors"].extend(errors) + print(json.dumps(results)) + return + + # Locale support: course graders may translate error messages. + import gettext + lang = os.environ.get("GRADER_LANGUAGE", "en") + grader_dir = os.path.dirname(os.path.abspath(grader_path)) + locale_dir = os.path.join(grader_dir, "conf", "locale") + trans = gettext.translation( + "graders", localedir=locale_dir, fallback=True, languages=[lang] + ) + trans.install(names=None) + + # Preprocess both the staff answer and the student submission. + answer_path = os.path.join(grader_dir, "answer.py") + with open(answer_path, "rb") as f: + answer = f.read().decode("utf-8") + + processed_answer = "# coding: utf8\n" + grader.preprocess(answer) + processed_submission = "# coding: utf8\n" + grader.preprocess(submission_code) + + # Write to /tmp, which is backed by an emptyDir volume mount in Kubernetes + # (readOnlyRootFilesystem=True prevents writes to the root FS). + with open("/tmp/answer.py", "w", encoding="utf-8") as f: + f.write(processed_answer) + with open("/tmp/submission.py", "w", encoding="utf-8") as f: + f.write(processed_submission) + + # Make /tmp and the grader directory importable so run.py can find them. sys.path.insert(0, "/tmp") - sys.path.insert(0, cwd) - - # grader_name may be an absolute path (e.g. /graders/ps07/Robot/grade_Robot.py) - # when the containergrader passes the full in-container path. In that case, add - # the grader's directory to sys.path and use only the basename as the module name - # so that __import__() can find it. - if os.path.isabs(grader_name): - grader_dir = os.path.dirname(grader_name) - grader_module = os.path.basename(grader_name) - if grader_dir not in sys.path: - sys.path.insert(0, grader_dir) - else: - grader_module = grader_name - - # Rename so the module name matches what run.py expects (submission_name - # without the .py extension becomes the importable module name). - import shutil - - dest = os.path.join("/tmp", submission_name) - shutil.move(tmp_path, dest) - - seed_int = int(seed) - output = _run_module.run( - grader_module[:-3] if grader_module.endswith(".py") else grader_module, - submission_name[:-3] if submission_name.endswith(".py") else submission_name, - seed_int, + sys.path.insert(0, grader_dir) + + from . import run as run_module + from .gradelib import EndTest + + grader_name = os.path.splitext(os.path.basename(grader_path))[0] + + # Run the staff answer first to get expected outputs. + expected_output = run_module.run(grader_name, "answer", seed) + expected_ok = ( + not expected_output["exceptions"] + and expected_output["grader"]["status"] == "ok" + and expected_output["submission"]["status"] == "ok" ) + if not expected_ok: + results["errors"].append( + "There was a problem running the staff solution (Staff debug)." + ) + print(json.dumps(results)) + return - import json + # Run the student submission. + actual_output = run_module.run(grader_name, "submission", seed) + actual_ok = actual_output["grader"]["status"] == "ok" - print(json.dumps(output)) + if actual_output["submission"]["status"] != "ok": + shown_error = actual_output["submission"].get("exception") or ( + "There was an error thrown while running your solution." + ) + results["errors"].append(shown_error) + actual_ok = False + + if not actual_ok: + results["errors"].append("We couldn't run your solution (Staff debug).") + print(json.dumps(results)) + return + + # Compare test results. + expected_results = expected_output["results"] + actual_results = actual_output["results"] + + if len(expected_results) != len(actual_results): + results["errors"].append( + "Something went wrong: different numbers of tests ran for " + "your code and for our reference code." + ) + print(json.dumps(results)) + return + + hide_output = os.environ.get("HIDE_OUTPUT", "").lower() in ("1", "true", "yes") + TOO_LONG = 5000 + corrects = [] + + for test, exp, act in zip(grader.tests(), expected_results, actual_results): + exp_short, exp_long, exp_out = exp + act_short, act_long, act_out = act + + if exp_short != act_short: + results["errors"].append("Something went wrong: tests don't match up.") + print(json.dumps(results)) + return + + if len(act_out) > TOO_LONG: + act_out = act_out[:TOO_LONG] + "...OUTPUT TRUNCATED" + + try: + correct = test.compare_results(exp_out, act_out) + except EndTest as e: + if e is not None: + act_out += f"\n*** ERROR: {e} ***" + correct = False + + corrects.append(correct) + if not hide_output: + results["tests"].append( + (exp_short, exp_long, correct, exp_out, act_out) + ) + + n = len(corrects) + results["correct"] = all(corrects) and n > 0 + results["score"] = float(sum(corrects)) / n if n > 0 else 0 + + if n == 0 and not results["errors"]: + results["errors"] = [ + "There was a problem while running your code (Staff debug). " + "Please contact the course staff for assistance." + ] + + print(json.dumps(results)) if __name__ == "__main__": diff --git a/tests/test_container_grader.py b/tests/test_container_grader.py new file mode 100644 index 0000000..f4c758c --- /dev/null +++ b/tests/test_container_grader.py @@ -0,0 +1,298 @@ +""" +Unit tests for ContainerGrader. + +Uses mock objects for the Docker SDK and kubernetes client to test container +execution paths without requiring a live Docker daemon or cluster. +""" + +import json +from pathlib import Path +from unittest import mock + +import pytest + +from xqueue_watcher.containergrader import ContainerGrader, _parse_cpu_millis + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def make_grader(backend="docker", **kwargs): + defaults = dict(grader_root="/graders", image="course-grader:v1", backend=backend) + defaults.update(kwargs) + return ContainerGrader(**defaults) + + +# --------------------------------------------------------------------------- +# _parse_cpu_millis +# --------------------------------------------------------------------------- + +class TestParseCpuMillis: + def test_millicores(self): + assert _parse_cpu_millis("500m") == 500.0 + + def test_whole_cores(self): + assert _parse_cpu_millis("2") == 2000.0 + + def test_fractional_cores(self): + assert _parse_cpu_millis("0.5") == 500.0 + + +# --------------------------------------------------------------------------- +# ContainerGrader.__init__ +# --------------------------------------------------------------------------- + +class TestContainerGraderInit: + def test_valid_kubernetes_backend(self): + g = make_grader(backend="kubernetes") + assert g.backend == "kubernetes" + + def test_valid_docker_backend(self): + g = make_grader(backend="docker") + assert g.backend == "docker" + + def test_invalid_backend(self): + with pytest.raises(ValueError, match="Unsupported backend"): + make_grader(backend="podman") + + def test_defaults(self): + g = ContainerGrader(grader_root="/graders", image="img:latest") + assert g.backend == "kubernetes" + assert g.namespace == "default" + assert g.cpu_limit == "500m" + assert g.memory_limit == "256Mi" + assert g.timeout == 20 + + +# --------------------------------------------------------------------------- +# _build_k8s_job +# --------------------------------------------------------------------------- + +class TestBuildK8sJob: + def setup_method(self): + self.grader = make_grader( + backend="kubernetes", + namespace="test-ns", + cpu_limit="1000m", + memory_limit="512Mi", + timeout=30, + ) + + def _build(self, job_name="test-job", grader_path="/graders/grade.py", code="code", seed=42): + return self.grader._build_k8s_job(job_name, grader_path, code, seed) + + def test_job_name(self): + job = self._build(job_name="xqueue-grader-abc123") + assert job.metadata.name == "xqueue-grader-abc123" + + def test_image(self): + job = self._build() + assert job.spec.template.spec.containers[0].image == "course-grader:v1" + + def test_args_are_grader_and_seed(self): + # entrypoint takes GRADER_FILE SEED (no submission.py positional arg) + job = self._build(grader_path="/graders/ps07/grade.py", seed=99) + assert job.spec.template.spec.containers[0].args == ["/graders/ps07/grade.py", "99"] + + def test_submission_code_env(self): + job = self._build(code="x = 1") + env = {e.name: e.value for e in job.spec.template.spec.containers[0].env} + assert env["SUBMISSION_CODE"] == "x = 1" + + def test_grader_language_env_default(self): + job = self._build() + env = {e.name: e.value for e in job.spec.template.spec.containers[0].env} + assert env["GRADER_LANGUAGE"] == "en" + + def test_grader_language_from_config(self): + job = self.grader._build_k8s_job("job", "/g/grade.py", "code", 1, {"lang": "fr"}) + env = {e.name: e.value for e in job.spec.template.spec.containers[0].env} + assert env["GRADER_LANGUAGE"] == "fr" + + def test_hide_output_env_default_off(self): + job = self._build() + env = {e.name: e.value for e in job.spec.template.spec.containers[0].env} + assert env["HIDE_OUTPUT"] == "0" + + def test_hide_output_env_when_set(self): + job = self.grader._build_k8s_job("job", "/g/grade.py", "code", 1, {"hide_output": True}) + env = {e.name: e.value for e in job.spec.template.spec.containers[0].env} + assert env["HIDE_OUTPUT"] == "1" + + def test_resource_limits(self): + job = self._build() + limits = job.spec.template.spec.containers[0].resources.limits + assert limits["cpu"] == "1000m" + assert limits["memory"] == "512Mi" + + def test_tmp_empty_dir_volume_present(self): + job = self._build() + volumes = job.spec.template.spec.volumes + tmp_vol = next((v for v in volumes if v.name == "tmp"), None) + assert tmp_vol is not None, "emptyDir volume at /tmp is required" + assert tmp_vol.empty_dir is not None + + def test_tmp_volume_mounted_at_tmp(self): + job = self._build() + mounts = job.spec.template.spec.containers[0].volume_mounts + tmp_mount = next((m for m in mounts if m.name == "tmp"), None) + assert tmp_mount is not None + assert tmp_mount.mount_path == "/tmp" + + def test_read_only_root_filesystem(self): + job = self._build() + sc = job.spec.template.spec.containers[0].security_context + assert sc.read_only_root_filesystem is True + + def test_no_privilege_escalation(self): + job = self._build() + sc = job.spec.template.spec.containers[0].security_context + assert sc.allow_privilege_escalation is False + + def test_backoff_limit_zero(self): + job = self._build() + assert job.spec.backoff_limit == 0 + + def test_active_deadline_matches_timeout(self): + job = self._build() + assert job.spec.active_deadline_seconds == 30 + + +# --------------------------------------------------------------------------- +# _run_docker +# --------------------------------------------------------------------------- + +def _make_mock_client(exit_code=0, stdout_data=b'{"correct": true}', stderr_data=b""): + """Return a (client, container) pair pre-configured with given outputs.""" + container = mock.MagicMock() + container.wait.return_value = {"StatusCode": exit_code} + + def logs_side_effect(stdout=True, stderr=False): + if stderr and not stdout: + return stderr_data + return stdout_data + + container.logs.side_effect = logs_side_effect + client = mock.MagicMock() + client.containers.run.return_value = container + return client, container + + +class TestRunDocker: + def setup_method(self): + self.grader = make_grader(backend="docker", timeout=10) + + def _run(self, client, code="print('hi')", seed=42, grader_config=None): + with mock.patch("docker.from_env", return_value=client): + return self.grader._run_docker( + "/graders/ps07/grade.py", code, seed, grader_config or {} + ) + + def test_success_returns_stdout(self): + client, _ = _make_mock_client(stdout_data=b'{"correct": true}') + result = self._run(client) + assert result == b'{"correct": true}' + + def test_container_removed_on_success(self): + client, container = _make_mock_client() + self._run(client) + container.remove.assert_called_once_with(force=True) + + def test_container_removed_on_failure(self): + client, container = _make_mock_client(exit_code=1, stderr_data=b"Traceback...") + with pytest.raises(RuntimeError): + self._run(client) + container.remove.assert_called_once_with(force=True) + + def test_non_zero_exit_raises(self): + client, _ = _make_mock_client(exit_code=1, stderr_data=b"Error!") + with pytest.raises(RuntimeError, match="non-zero status"): + self._run(client) + + def test_stderr_included_in_error_message(self): + client, _ = _make_mock_client(exit_code=1, stderr_data=b"ImportError: missing module") + with pytest.raises(RuntimeError, match="ImportError"): + self._run(client) + + def test_timeout_raises_runtime_error(self): + client, container = _make_mock_client() + container.wait.side_effect = Exception("ReadTimeout") + with pytest.raises(RuntimeError, match="timed out"): + self._run(client) + + def test_missing_docker_sdk_raises(self): + with mock.patch.dict("sys.modules", {"docker": None}): + with pytest.raises(RuntimeError, match="'docker' package"): + self.grader._run_docker("/graders/grade.py", "code", 1, {}) + + def test_string_result_converted_to_bytes(self): + client, container = _make_mock_client() + container.logs.side_effect = None + container.logs.return_value = '{"correct": false}' + result = self._run(client) + assert isinstance(result, bytes) + + def test_entrypoint_args_are_grader_and_seed(self): + """Container command should be [grader_path, seed] — not 3 args.""" + client, _ = _make_mock_client() + self._run(client, seed=99) + call_kwargs = client.containers.run.call_args + command = call_kwargs.kwargs.get("command") or call_kwargs[1].get("command") + assert len(command) == 2 + assert command[1] == "99" + + def test_grader_language_passed_as_env(self): + client, _ = _make_mock_client() + self._run(client, grader_config={"lang": "es"}) + call_kwargs = client.containers.run.call_args + env = call_kwargs.kwargs.get("environment") or call_kwargs[1].get("environment") + assert env.get("GRADER_LANGUAGE") == "es" + + +# --------------------------------------------------------------------------- +# grade() public interface +# --------------------------------------------------------------------------- + +class TestGrade: + def setup_method(self): + self.grader = make_grader() + + def _grade(self, submission="x = 1", grader_config=None): + if grader_config is None: + grader_config = {} + return self.grader.grade( + grader_path=Path("/graders/ps07/grade.py"), + grader_config=grader_config, + submission=submission, + ) + + def test_skip_grader_returns_correct(self): + result = self._grade(grader_config={"skip_grader": True}) + assert result["correct"] is True + assert result["score"] == 1 + + def test_container_result_returned_directly(self): + grade_json = {"correct": True, "score": 1.0, "errors": [], "tests": []} + with mock.patch.object(self.grader, "_run", return_value=json.dumps(grade_json).encode()): + result = self._grade() + assert result["correct"] is True + assert result["score"] == 1.0 + + def test_container_failure_returns_error_dict(self): + with mock.patch.object(self.grader, "_run", side_effect=RuntimeError("container died")): + result = self._grade() + assert result["correct"] is False + assert result["errors"] + + def test_large_submission_logs_warning(self, caplog): + import logging + large_code = "x = 1\n" * 10_000 # ~70 KB + grade_json = {"correct": False, "score": 0.0, "errors": [], "tests": []} + # Mock the backend method so _run() still executes the size check. + with mock.patch.object( + self.grader, "_run_docker", return_value=json.dumps(grade_json).encode() + ): + with caplog.at_level(logging.WARNING): + self._grade(submission=large_code) + assert any("large" in r.message.lower() for r in caplog.records) diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index fa7e198..7107aad 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -9,8 +9,6 @@ No AppArmor or elevated host privileges are required. """ -import importlib -import importlib.util import json import random import time @@ -18,8 +16,6 @@ from pathlib import Path from .grader import Grader -from grader_support.gradelib import EndTest -from grader_support.graderutil import LANGUAGE _BACKEND_KUBERNETES = "kubernetes" @@ -27,28 +23,22 @@ _SUPPORTED_BACKENDS = (_BACKEND_KUBERNETES, _BACKEND_DOCKER) -def _truncate(out): - TOO_LONG = 5000 - if len(out) > TOO_LONG: - out = out[:TOO_LONG] + "...OUTPUT TRUNCATED" - return out - - -def _prepend_coding(code): - return "# coding: utf8\n" + code - - class ContainerGrader(Grader): """ Grades student submissions by running them inside an isolated container. + The grader scripts and staff answer are baked into the course-specific grader + image. The container runs the complete grading pipeline (preprocessing, running + both the staff answer and the student submission, comparing results) and returns + a JSON grade result. The watcher pod does not need local access to grader files. + Configuration (passed as KWARGS in the conf.d JSON handler config): - grader_root - Path to the directory containing grader scripts. - Must be accessible inside the container at the same path, - either via a PVC (Kubernetes) or a bind mount (Docker). + grader_root - Path to the grader directory inside the container image. + For the Docker backend this is bind-mounted from the host; + for Kubernetes the scripts are baked into the image. image - Docker image to run. Should extend grader-base and include - all course-specific dependencies. + all course-specific grader scripts and dependencies. backend - "kubernetes" (default) or "docker". namespace - Kubernetes namespace to create Jobs in (default: "default"). cpu_limit - CPU limit for the grading container (default: "500m"). @@ -83,21 +73,39 @@ def __init__( # Internal: container execution # ------------------------------------------------------------------ - def _run(self, grader_path, code, seed): + def _run(self, grader_path, code, seed, grader_config=None): """ - Invoke the grader_support runner inside a container. + Run the complete grading pipeline inside a container. + + The container entrypoint (grader_support.entrypoint) handles: + - Loading the grader module (baked into the image) + - Preprocessing both staff answer and student submission + - Running both through grader_support.run + - Comparing results and returning the final grade JSON - Returns the raw stdout bytes from the container (JSON from run.py). + Returns the raw stdout bytes (JSON grade result). Raises RuntimeError on timeout or non-zero exit. """ + # Warn on large submissions: K8s env vars contribute to the Pod object + # stored in etcd (limit ~1.5 MB total); very large submissions may cause + # job creation to fail. Submissions > 32 KB are unusual for course work. + code_bytes = len(code.encode("utf-8")) + if code_bytes > 32 * 1024: + self.log.warning( + "Submission code is large (%d bytes). Very large submissions may " + "exceed Kubernetes API object size limits when passed via env var.", + code_bytes, + ) + if grader_config is None: + grader_config = {} if self.backend == _BACKEND_KUBERNETES: - return self._run_kubernetes(grader_path, code, seed) - return self._run_docker(grader_path, code, seed) + return self._run_kubernetes(grader_path, code, seed, grader_config) + return self._run_docker(grader_path, code, seed, grader_config) - def _run_kubernetes(self, grader_path, code, seed): + def _run_kubernetes(self, grader_path, code, seed, grader_config): """Create a Kubernetes Job, wait for it, collect stdout, delete it.""" try: - from kubernetes import client as k8s_client, config as k8s_config, watch as k8s_watch + from kubernetes import client as k8s_client, config as k8s_config except ImportError: raise RuntimeError( "The 'kubernetes' package is required for the kubernetes backend. " @@ -113,7 +121,7 @@ def _run_kubernetes(self, grader_path, code, seed): batch_v1 = k8s_client.BatchV1Api() core_v1 = k8s_client.CoreV1Api() - job_manifest = self._build_k8s_job(job_name, grader_path, code, seed) + job_manifest = self._build_k8s_job(job_name, grader_path, code, seed, grader_config) try: batch_v1.create_namespaced_job(namespace=self.namespace, body=job_manifest) @@ -133,16 +141,17 @@ def _run_kubernetes(self, grader_path, code, seed): except Exception: self.log.warning("Failed to delete Job %s", job_name, exc_info=True) - def _build_k8s_job(self, job_name, grader_path, code, seed): - """Return a kubernetes Job manifest dict for the given grading run.""" + def _build_k8s_job(self, job_name, grader_path, code, seed, grader_config=None): + """Return a kubernetes Job manifest for the given grading run.""" from kubernetes import client as k8s_client - # Pass the absolute in-container path to the grader file. The - # entrypoint handles absolute paths by adding the parent directory to - # sys.path before importing the module, so Python can find the file. - # working_dir must stay at /grader (the WORKDIR of the base image) - # so that `python -m grader_support.entrypoint` can locate the - # grader_support package. + if grader_config is None: + grader_config = {} + + # The entrypoint takes: GRADER_FILE SEED + # The grader scripts are baked into the course-specific image at grader_path. + # working_dir must stay at /grader (the WORKDIR of the base image) so that + # `python -m grader_support.entrypoint` can locate the grader_support package. grader_abs = str(grader_path) return k8s_client.V1Job( @@ -174,13 +183,22 @@ def _build_k8s_job(self, job_name, grader_path, code, seed): k8s_client.V1Container( name="grader", image=self.image, - args=[grader_abs, "submission.py", str(seed)], + # entrypoint signature: GRADER_FILE SEED + args=[grader_abs, str(seed)], working_dir="/grader", env=[ k8s_client.V1EnvVar( name="SUBMISSION_CODE", value=code, ), + k8s_client.V1EnvVar( + name="GRADER_LANGUAGE", + value=grader_config.get("lang", "en"), + ), + k8s_client.V1EnvVar( + name="HIDE_OUTPUT", + value="1" if grader_config.get("hide_output") else "0", + ), ], resources=k8s_client.V1ResourceRequirements( limits={ @@ -245,7 +263,7 @@ def _wait_and_collect_k8s(self, batch_v1, core_v1, job_name, timeout): log = core_v1.read_namespaced_pod_log(name=pod_name, namespace=self.namespace) return log.encode("utf-8") - def _run_docker(self, grader_path, code, seed): + def _run_docker(self, grader_path, code, seed, grader_config=None): """Run a local Docker container and return stdout bytes.""" try: import docker as docker_sdk @@ -255,14 +273,22 @@ def _run_docker(self, grader_path, code, seed): "Install it with: uv add docker" ) + if grader_config is None: + grader_config = {} + grader_dir = str(Path(grader_path).parent.resolve()) grader_rel = str(Path(grader_path).name) # Mount the problem directory at /graders/ (not /grader/ which would # overwrite the base image's grader_support package). Pass the grader - # as an absolute in-container path so the entrypoint can add its parent - # directory to sys.path before importing. + # as an absolute in-container path. container_grader_path = f"/graders/{grader_rel}" + env = { + "SUBMISSION_CODE": code, + "GRADER_LANGUAGE": grader_config.get("lang", "en"), + "HIDE_OUTPUT": "1" if grader_config.get("hide_output") else "0", + } + client = docker_sdk.from_env() try: # Run detached so we can enforce a wall-clock timeout via container.wait(). @@ -270,9 +296,10 @@ def _run_docker(self, grader_path, code, seed): # lets us call container.wait(timeout=...) to cap execution time. container = client.containers.run( image=self.image, - command=[container_grader_path, "submission.py", str(seed)], + # entrypoint signature: GRADER_FILE SEED + command=[container_grader_path, str(seed)], working_dir="/grader", - environment={"SUBMISSION_CODE": code}, + environment=env, volumes={grader_dir: {"bind": "/graders", "mode": "ro"}}, mem_limit=self.memory_limit, nano_cpus=int(_parse_cpu_millis(self.cpu_limit) * 1_000_000), @@ -291,6 +318,15 @@ def _run_docker(self, grader_path, code, seed): f"stderr: {stderr[:2000] if stderr else ''}" ) result = container.logs(stdout=True, stderr=False) + except Exception as exc: + # Catch ReadTimeout (requests.exceptions.ReadTimeout) from container.wait() + # and any other unexpected error, converting to a clear RuntimeError. + exc_name = type(exc).__name__ + if "Timeout" in exc_name or "timeout" in str(exc).lower(): + raise RuntimeError( + f"Grading container timed out after {self.timeout}s." + ) from exc + raise finally: container.remove(force=True) except docker_sdk.errors.ContainerError as exc: @@ -301,13 +337,22 @@ def _run_docker(self, grader_path, code, seed): return result if isinstance(result, bytes) else result.encode("utf-8") # ------------------------------------------------------------------ - # Public grading interface (mirrors JailedGrader.grade) + # Public grading interface # ------------------------------------------------------------------ def grade(self, grader_path, grader_config, submission): - import gettext - import sys + """ + Grade a student submission by running the full pipeline inside a container. + The container (grader_support.entrypoint) handles all grading steps: + - Loading the grader module (baked into the image) + - Validating the submission format + - Preprocessing and running the staff answer and student submission + - Comparing results test-by-test + - Returning the final grade as JSON + + Returns a dict with keys: correct, score, errors, tests. + """ if not isinstance(submission, str): self.log.warning("Submission is NOT unicode") @@ -324,148 +369,22 @@ def grade(self, grader_path, grader_config, submission): self.log.debug("Skipping the grader.") return results - lang = grader_config.get("lang", LANGUAGE) - locale_dir = self.grader_root / "conf" / "locale" - if locale_dir.exists(): - trans = gettext.translation( - "graders", localedir=str(locale_dir), fallback=True, languages=[lang] - ) - trans.install(names=None) - - # Load the grader module to access its test definitions and preprocessors. - # The module runs outside the sandbox (trusted code). - spec = importlib.util.spec_from_file_location("grader_module", str(grader_path)) - grader_module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(grader_module) - grader = grader_module.grader - - errors = grader.input_errors(submission) - if errors: - results["errors"].extend(errors) - return results - - answer_path = Path(grader_path).parent / "answer.py" - with open(answer_path, "rb") as f: - answer = f.read().decode("utf-8") - - processed_answer = _prepend_coding(grader.preprocess(answer)) - processed_submission = _prepend_coding(grader.preprocess(submission)) - seed = str(random.randint(0, 20000)) - # Run the staff answer inside the container. - expected_ok = False - expected_outputs = None - expected_exc = None try: - expected_outputs = self._run(grader_path, processed_answer, seed) - if expected_outputs: - expected = json.loads(expected_outputs.decode("utf-8")) - expected_ok = True + output = self._run(grader_path, submission, seed, grader_config) + grade_result = json.loads(output.decode("utf-8")) + return grade_result except Exception: - expected_exc = sys.exc_info() - else: - if expected_ok: - if ( - expected["exceptions"] - or expected["grader"]["status"] != "ok" - or expected["submission"]["status"] != "ok" - ): - expected_ok = False - - if not expected_ok: - results["errors"].append( - "There was a problem running the staff solution (Staff debug)." - ) - self.log.error( - "Couldn't run staff solution. grader = %s, output: %r", - grader_path, - expected_outputs, - exc_info=expected_exc, + self.log.exception( + "Grading container failed. grader = %s", grader_path ) - return results - - # Run the student submission inside the container. - actual_ok = False - actual_outputs = None - actual_exc = None - try: - actual_outputs = self._run(grader_path, processed_submission, seed) - if actual_outputs: - actual = json.loads(actual_outputs.decode("utf-8")) - actual_ok = True - else: - results["errors"].append( - "There was a problem running your solution (Staff debug)." - ) - except Exception: - actual_exc = sys.exc_info() - else: - if actual_ok and actual["grader"]["status"] == "ok": - if actual["submission"]["status"] != "ok": - shown_error = actual["submission"]["exception"] or ( - "There was an error thrown while running your solution." - ) - results["errors"].append(shown_error) - else: - actual_ok = False - - if not actual_ok: results["errors"].append( - "We couldn't run your solution (Staff debug)." - ) - self.log.error( - "Couldn't run student solution. grader = %s, output: %r", - grader_path, - actual_outputs, - exc_info=actual_exc, + "There was a problem running your code (Staff debug). " + "Please contact the course staff for assistance." ) return results - corrects = [] - if not results["errors"]: - expected_results = expected["results"] - actual_results = actual["results"] - if len(expected_results) != len(actual_results): - results["errors"].append( - "Something went wrong: different numbers of tests ran for " - "your code and for our reference code." - ) - return results - - for test, exp, act in zip(grader.tests(), expected_results, actual_results): - exp_short_desc, exp_long_desc, exp_output = exp - act_short_desc, act_long_desc, act_output = act - if exp_short_desc != act_short_desc: - results["errors"].append( - "Something went wrong: tests don't match up." - ) - return results - act_output = _truncate(act_output) - try: - correct = test.compare_results(exp_output, act_output) - except EndTest as e: - if e is not None: - act_output += "\n" - act_output += f"*** ERROR: {e} ***" - correct = False - corrects.append(correct) - if not grader_config.get("hide_output", False): - results["tests"].append( - (exp_short_desc, exp_long_desc, correct, exp_output, act_output) - ) - - n = len(corrects) - results["correct"] = all(corrects) and n > 0 - results["score"] = float(sum(corrects)) / n if n > 0 else 0 - - if n == 0 and not results["errors"]: - results["errors"] = [ - "There was a problem while running your code (Staff debug). " - "Please contact the course staff for assistance." - ] - - return results def _parse_cpu_millis(cpu_str): From 2c3b82ff1838976297045ce6d8b174ebf658977d Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Tue, 17 Mar 2026 09:17:15 -0400 Subject: [PATCH 13/25] refactor: replace statsd/newrelic with OpenTelemetry; add 12-factor settings - Remove dogstatsd-python dependency; replace statsd instrumentation in grader.py with OpenTelemetry counters and a histogram - Add xqueue_watcher/metrics.py: configure_metrics() wires a MeterProvider with an OTLP HTTP exporter when OTEL_EXPORTER_OTLP_ENDPOINT is set; all four instruments (process_item, grader_payload_error, grading_time, replies) defined at module level against the global proxy meter - Call configure_metrics() from Manager.configure_from_directory() so the real provider is installed before any submissions are processed - Add xqueue_watcher/env_settings.py: get_manager_config_from_env() reads all manager config from XQWATCHER_* environment variables, compatible with 12-factor / Kubernetes deployment patterns - Remove newrelic from the production optional-dependency group and from the edx.org Dockerfile stage; the stage now runs xqueue-watcher directly - Add opentelemetry-api, opentelemetry-sdk, opentelemetry-exporter-otlp-proto-http to core dependencies; regenerate uv.lock - Add tests/test_env_settings.py and tests/test_metrics.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 4 +- pyproject.toml | 7 +- tests/test_env_settings.py | 83 +++++++++++++++ tests/test_metrics.py | 90 ++++++++++++++++ uv.lock | 181 ++++++++++++++++++++++++++------- xqueue_watcher/env_settings.py | 90 ++++++++++++++++ xqueue_watcher/grader.py | 13 ++- xqueue_watcher/manager.py | 3 + xqueue_watcher/metrics.py | 78 ++++++++++++++ 9 files changed, 501 insertions(+), 48 deletions(-) create mode 100644 tests/test_env_settings.py create mode 100644 tests/test_metrics.py create mode 100644 xqueue_watcher/env_settings.py create mode 100644 xqueue_watcher/metrics.py diff --git a/Dockerfile b/Dockerfile index b6243a4..c4f2008 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,7 +35,5 @@ USER app CMD ["xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] FROM base AS edx.org -USER root -RUN uv sync --frozen --extra production USER app -CMD ["newrelic-admin", "run-program", "xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] +CMD ["xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] diff --git a/pyproject.toml b/pyproject.toml index 5fa012a..5b19727 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,9 +10,11 @@ readme = "README.md" requires-python = ">=3.11" license = { file = "LICENSE.TXT" } dependencies = [ - "dogstatsd-python", "docker>=7.0.0", "kubernetes>=29.0.0", + "opentelemetry-api", + "opentelemetry-exporter-otlp-proto-http", + "opentelemetry-sdk", "requests", ] @@ -20,9 +22,6 @@ dependencies = [ codejail = [ "edx-codejail", ] -production = [ - "newrelic", -] [project.scripts] xqueue-watcher = "xqueue_watcher.manager:main" diff --git a/tests/test_env_settings.py b/tests/test_env_settings.py new file mode 100644 index 0000000..4386040 --- /dev/null +++ b/tests/test_env_settings.py @@ -0,0 +1,83 @@ +import unittest +from unittest.mock import patch + +from xqueue_watcher.env_settings import get_manager_config_from_env +from xqueue_watcher.settings import MANAGER_CONFIG_DEFAULTS + + +class TestGetManagerConfigFromEnv(unittest.TestCase): + def test_defaults_when_no_env_vars_set(self): + with patch.dict("os.environ", {}, clear=False): + config = get_manager_config_from_env() + self.assertEqual(config, MANAGER_CONFIG_DEFAULTS) + + def test_poll_time_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_POLL_TIME": "30"}): + config = get_manager_config_from_env() + self.assertEqual(config["POLL_TIME"], 30) + + def test_requests_timeout_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_REQUESTS_TIMEOUT": "5"}): + config = get_manager_config_from_env() + self.assertEqual(config["REQUESTS_TIMEOUT"], 5) + + def test_poll_interval_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_POLL_INTERVAL": "3"}): + config = get_manager_config_from_env() + self.assertEqual(config["POLL_INTERVAL"], 3) + + def test_login_poll_interval_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_LOGIN_POLL_INTERVAL": "15"}): + config = get_manager_config_from_env() + self.assertEqual(config["LOGIN_POLL_INTERVAL"], 15) + + def test_http_basic_auth_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_HTTP_BASIC_AUTH": "user:secret"}): + config = get_manager_config_from_env() + self.assertEqual(config["HTTP_BASIC_AUTH"], "user:secret") + + def test_http_basic_auth_empty_string_returns_none(self): + with patch.dict("os.environ", {"XQWATCHER_HTTP_BASIC_AUTH": ""}): + config = get_manager_config_from_env() + self.assertIsNone(config["HTTP_BASIC_AUTH"]) + + def test_follow_client_redirects_true_values(self): + for truthy in ("true", "True", "TRUE", "1", "yes", "YES"): + with self.subTest(value=truthy): + with patch.dict("os.environ", {"XQWATCHER_FOLLOW_CLIENT_REDIRECTS": truthy}): + config = get_manager_config_from_env() + self.assertTrue(config["FOLLOW_CLIENT_REDIRECTS"]) + + def test_follow_client_redirects_false_values(self): + for falsy in ("false", "False", "FALSE", "0", "no", "NO"): + with self.subTest(value=falsy): + with patch.dict("os.environ", {"XQWATCHER_FOLLOW_CLIENT_REDIRECTS": falsy}): + config = get_manager_config_from_env() + self.assertFalse(config["FOLLOW_CLIENT_REDIRECTS"]) + + def test_follow_client_redirects_default_is_false(self): + with patch.dict("os.environ", {}, clear=False): + config = get_manager_config_from_env() + self.assertFalse(config["FOLLOW_CLIENT_REDIRECTS"]) + + def test_all_env_vars_together(self): + env = { + "XQWATCHER_HTTP_BASIC_AUTH": "admin:pass", + "XQWATCHER_POLL_TIME": "20", + "XQWATCHER_REQUESTS_TIMEOUT": "3", + "XQWATCHER_POLL_INTERVAL": "2", + "XQWATCHER_LOGIN_POLL_INTERVAL": "10", + "XQWATCHER_FOLLOW_CLIENT_REDIRECTS": "true", + } + with patch.dict("os.environ", env): + config = get_manager_config_from_env() + self.assertEqual(config["HTTP_BASIC_AUTH"], "admin:pass") + self.assertEqual(config["POLL_TIME"], 20) + self.assertEqual(config["REQUESTS_TIMEOUT"], 3) + self.assertEqual(config["POLL_INTERVAL"], 2) + self.assertEqual(config["LOGIN_POLL_INTERVAL"], 10) + self.assertTrue(config["FOLLOW_CLIENT_REDIRECTS"]) + + def test_returns_all_expected_keys(self): + config = get_manager_config_from_env() + self.assertEqual(set(config.keys()), set(MANAGER_CONFIG_DEFAULTS.keys())) diff --git a/tests/test_metrics.py b/tests/test_metrics.py new file mode 100644 index 0000000..765d680 --- /dev/null +++ b/tests/test_metrics.py @@ -0,0 +1,90 @@ +import unittest +from unittest.mock import patch, MagicMock + +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import InMemoryMetricReader + +from xqueue_watcher.metrics import ( + _build_meter_provider, + _METER_NAME, + _DEFAULT_SERVICE_NAME, +) + + +class TestBuildMeterProvider(unittest.TestCase): + def test_returns_meter_provider(self): + with patch.dict("os.environ", {}, clear=False): + provider = _build_meter_provider() + self.assertIsInstance(provider, MeterProvider) + + def test_no_otlp_endpoint_means_no_readers(self): + env = {"OTEL_EXPORTER_OTLP_ENDPOINT": ""} + with patch.dict("os.environ", env): + provider = _build_meter_provider() + # No PeriodicExportingMetricReader attached → internal reader list is empty. + self.assertEqual(provider._sdk_config.metric_readers, []) + + def test_otlp_endpoint_adds_reader(self): + env = {"OTEL_EXPORTER_OTLP_ENDPOINT": "http://otel-collector:4318"} + mock_exporter = MagicMock() + mock_reader = MagicMock() + with patch.dict("os.environ", env), \ + patch("opentelemetry.exporter.otlp.proto.http.metric_exporter.OTLPMetricExporter", + return_value=mock_exporter) as MockExporter, \ + patch("xqueue_watcher.metrics.PeriodicExportingMetricReader", + return_value=mock_reader) as MockReader: + provider = _build_meter_provider() + MockExporter.assert_called_once() + MockReader.assert_called_once_with(mock_exporter) + self.assertIn(mock_reader, provider._sdk_config.metric_readers) + + def test_default_service_name_applied(self): + # Empty OTEL_SERVICE_NAME should still fall back to the built-in default. + with patch.dict("os.environ", {"OTEL_SERVICE_NAME": ""}): + provider = _build_meter_provider() + attrs = provider._sdk_config.resource.attributes + self.assertEqual(attrs.get("service.name"), _DEFAULT_SERVICE_NAME) + + def test_custom_service_name_applied(self): + env = {"OTEL_SERVICE_NAME": "my-grader"} + with patch.dict("os.environ", env): + provider = _build_meter_provider() + attrs = provider._sdk_config.resource.attributes + self.assertEqual(attrs.get("service.name"), "my-grader") + + +class TestInstruments(unittest.TestCase): + """Verify instruments record correctly against an in-memory provider.""" + + def setUp(self): + self.reader = InMemoryMetricReader() + self.provider = MeterProvider(metric_readers=[self.reader]) + self.meter = self.provider.get_meter(_METER_NAME) + + def _metric_names(self): + return {m.name for m in self.reader.get_metrics_data().resource_metrics[0].scope_metrics[0].metrics} + + def test_process_item_counter(self): + counter = self.meter.create_counter("xqueuewatcher.process_item") + counter.add(1) + counter.add(2) + names = self._metric_names() + self.assertIn("xqueuewatcher.process_item", names) + + def test_grader_payload_error_counter(self): + counter = self.meter.create_counter("xqueuewatcher.grader_payload_error") + counter.add(1) + names = self._metric_names() + self.assertIn("xqueuewatcher.grader_payload_error", names) + + def test_grading_time_histogram(self): + hist = self.meter.create_histogram("xqueuewatcher.grading_time", unit="s") + hist.record(0.42) + names = self._metric_names() + self.assertIn("xqueuewatcher.grading_time", names) + + def test_replies_counter(self): + counter = self.meter.create_counter("xqueuewatcher.replies") + counter.add(1) + names = self._metric_names() + self.assertIn("xqueuewatcher.replies", names) diff --git a/uv.lock b/uv.lock index 43fa4e7..747fd5b 100644 --- a/uv.lock +++ b/uv.lock @@ -211,12 +211,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, ] -[[package]] -name = "dogstatsd-python" -version = "0.5.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/1e/5214d0636f0c3ac0647c70b7c47b96a8b8c5e0202726b0a6cf332c8492ed/dogstatsd-python-0.5.6.tar.gz", hash = "sha256:e2ff2b817995d2ce01bc3d66572be0608b9371f29aae16aa93c0d6a3a58489de", size = 3272, upload-time = "2015-03-20T14:15:50.887Z" } - [[package]] name = "durationpy" version = "0.10" @@ -238,6 +232,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8f/82/8077069f7161d70257abd40ff9eddcbf3c1e4fbe66e23c933bf9a3c6ccb0/edx_codejail-4.1.0-py3-none-any.whl", hash = "sha256:0b2779131136117929bb8e58c302062dc3e98ee6526f3eb6936b1738daa7a14c", size = 25463, upload-time = "2025-11-07T15:30:14.747Z" }, ] +[[package]] +name = "googleapis-common-protos" +version = "1.73.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/96/a0205167fa0154f4a542fd6925bdc63d039d88dab3588b875078107e6f06/googleapis_common_protos-1.73.0.tar.gz", hash = "sha256:778d07cd4fbeff84c6f7c72102f0daf98fa2bfd3fa8bea426edc545588da0b5a", size = 147323, upload-time = "2026-03-06T21:53:09.727Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/28/23eea8acd65972bbfe295ce3666b28ac510dfcb115fac089d3edb0feb00a/googleapis_common_protos-1.73.0-py3-none-any.whl", hash = "sha256:dfdaaa2e860f242046be561e6d6cb5c5f1541ae02cfbcb034371aadb2942b4e8", size = 297578, upload-time = "2026-03-06T21:52:33.933Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -247,6 +253,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -286,36 +304,94 @@ wheels = [ ] [[package]] -name = "newrelic" -version = "11.5.0" +name = "oauthlib" +version = "3.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/09/7031e28416bd79111e4dc2986f54bbfdbd707bf712210388ff649a2528f0/newrelic-11.5.0.tar.gz", hash = "sha256:39607be7a43d1bff59119039b0e9c145bf27debbc2075ec24549817a8faf5a5c", size = 1415240, upload-time = "2026-02-20T00:17:29.971Z" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/14/19628204ea57072ee70e1aab87896058b731ad32aa2c62d0b3f5257417d5/newrelic-11.5.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3adbe17625cb62495b108b3ed9ecb3f2ed248e024c1f07e6fce8dd86d8394d1", size = 921510, upload-time = "2026-02-20T00:16:43.125Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b7/ae54f75597b9c61fcde31745ca007f9a157db9ecef416451193bad323fe1/newrelic-11.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de40cb14a0dea7c29d9d0155c2a3aebd56c2fa12c9066692bf38a8df90f3d6a6", size = 923513, upload-time = "2026-02-20T00:16:45.082Z" }, - { url = "https://files.pythonhosted.org/packages/ca/93/b4d424bbc65cd208f0cc8aa468b1b95b718bf00c6880f9f5ea8d2b90f488/newrelic-11.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6616b785a2deccb74ae7313799dcba4929ee9c707fbf1f15bc7ef0364facf7f4", size = 921816, upload-time = "2026-02-20T00:16:47.288Z" }, - { url = "https://files.pythonhosted.org/packages/af/ba/a2d09b8dc0029626155d671ae8c78e0b29461a9af4cd5103ee7d4c99a484/newrelic-11.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:07b0dce0d4d55679a80f617e03d8292b1dc1e8b4391f5b76cf13dc0768ed5eeb", size = 921002, upload-time = "2026-02-20T00:16:49.185Z" }, - { url = "https://files.pythonhosted.org/packages/dd/1e/12979ec4cfc91ab44b27a7038b95d97f54ec47d9ecd1fc4518ded4c9a5cd/newrelic-11.5.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6d47ba87b86d49b977e96add31c0d41fa2cf7044622e49080d5fb1b0a5715799", size = 928435, upload-time = "2026-02-20T00:16:51.163Z" }, - { url = "https://files.pythonhosted.org/packages/0e/34/502ee15f0357f01a9430a371f5b01aec7f0be2038e94f46f650c9481c703/newrelic-11.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:710bc92d0a74ca429b19d39d04b3e619f789547f25f2c160da2844de8d5cc688", size = 929082, upload-time = "2026-02-20T00:16:53.027Z" }, - { url = "https://files.pythonhosted.org/packages/86/20/d5af4fec5c812d87965bcc538efb559a48d026799d535d8908166a53cde7/newrelic-11.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:445293bd72050f04eb15385327903a2d2e339e42b1d284c850cf0686c37eec4a", size = 927249, upload-time = "2026-02-20T00:16:54.973Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ce/6c4a535344a3675deea29cf8b9e12ae75cf74377976cdea44fdf6f650072/newrelic-11.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0c48016cb9ed11e5dbf9ffd41ed4b2e0e3223a94677574ab9f892d2c2010f181", size = 927830, upload-time = "2026-02-20T00:16:56.963Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ff/efa664e6328127b64e1a225d5fc7680208db5cea9154d33e638759c029d9/newrelic-11.5.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b0dd6bc4518c40dd93b00d39fbb6fbe73db0baa7bc78c5ffab8fac645467f818", size = 928545, upload-time = "2026-02-20T00:16:59.049Z" }, - { url = "https://files.pythonhosted.org/packages/52/a6/040d2b15c1f93a6ca97507042cbe533c7493898b94d2ea0444de273ffe58/newrelic-11.5.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2ad2f2d5edb209c64ad68055ac41ae5e40a644a29729eb9fb3f564b8ee07974", size = 929206, upload-time = "2026-02-20T00:17:00.679Z" }, - { url = "https://files.pythonhosted.org/packages/09/f9/bcb9392e8ed966e907e7445d65316503c6b5948f135a3890164e91435bc5/newrelic-11.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:884c789ab4850dfae8d6cfc9441ff4ab0b82636a6e6564d04a30392ae6c23bdc", size = 927368, upload-time = "2026-02-20T00:17:02.997Z" }, - { url = "https://files.pythonhosted.org/packages/f0/8c/97c2bdc7d45161d601f839c737c0dd1b53351e3819e35b6c29945444c231/newrelic-11.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc8f15350f75eed04b64a4258038d7b33fdc568e3c4ef2dad4088302d20126ff", size = 927956, upload-time = "2026-02-20T00:17:04.655Z" }, - { url = "https://files.pythonhosted.org/packages/59/23/d626af55d35e46f822da657f5a0f117b4947b1e36b90371352d7e9b3c304/newrelic-11.5.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:051227445ab789128a490eb9cb3fe50af271488c9f15012e3eb937c46c4c5a1c", size = 927552, upload-time = "2026-02-20T00:17:06.283Z" }, - { url = "https://files.pythonhosted.org/packages/bc/59/2bb56313a48ce69ddbdca25cf49ea4deca76bd9922e30f2b07bf0279861c/newrelic-11.5.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e7085cbe9e2fd2c98fbcd67e82b245a9924522373033d80a712bb68b534fffa8", size = 928985, upload-time = "2026-02-20T00:17:08.846Z" }, - { url = "https://files.pythonhosted.org/packages/07/a6/0e5843797442620b8874ec91f5c242dddb49361c3edbf3b9879c683d8e48/newrelic-11.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:799c7c7b1cfa0e7a69982ef4f356eba6063f234d49c2f66e01205220768b89c8", size = 927158, upload-time = "2026-02-20T00:17:10.52Z" }, - { url = "https://files.pythonhosted.org/packages/e7/60/5727f3a6c64374e39f78ef1434fbb77a032c7a2d6d570c4fb9c81154f823/newrelic-11.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c2d61a523a5fcdaee58c547081b81d61ae613dd64a1c4c781c715c6e4bf5a54a", size = 927059, upload-time = "2026-02-20T00:17:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, ] [[package]] -name = "oauthlib" -version = "3.3.1" +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.40.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/bc/1559d46557fe6eca0b46c88d4c2676285f1f3be2e8d06bb5d15fbffc814a/opentelemetry_exporter_otlp_proto_common-1.40.0.tar.gz", hash = "sha256:1cbee86a4064790b362a86601ee7934f368b81cd4cc2f2e163902a6e7818a0fa", size = 20416, upload-time = "2026-03-04T14:17:23.801Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ca/8f122055c97a932311a3f640273f084e738008933503d0c2563cd5d591fc/opentelemetry_exporter_otlp_proto_common-1.40.0-py3-none-any.whl", hash = "sha256:7081ff453835a82417bf38dccf122c827c3cbc94f2079b03bba02a3165f25149", size = 18369, upload-time = "2026-03-04T14:17:04.796Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/fa/73d50e2c15c56be4d000c98e24221d494674b0cc95524e2a8cb3856d95a4/opentelemetry_exporter_otlp_proto_http-1.40.0.tar.gz", hash = "sha256:db48f5e0f33217588bbc00274a31517ba830da576e59503507c839b38fa0869c", size = 17772, upload-time = "2026-03-04T14:17:25.324Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/3a/8865d6754e61c9fb170cdd530a124a53769ee5f740236064816eb0ca7301/opentelemetry_exporter_otlp_proto_http-1.40.0-py3-none-any.whl", hash = "sha256:a8d1dab28f504c5d96577d6509f80a8150e44e8f45f82cdbe0e34c99ab040069", size = 19960, upload-time = "2026-03-04T14:17:07.153Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/77/dd38991db037fdfce45849491cb61de5ab000f49824a00230afb112a4392/opentelemetry_proto-1.40.0.tar.gz", hash = "sha256:03f639ca129ba513f5819810f5b1f42bcb371391405d99c168fe6937c62febcd", size = 45667, upload-time = "2026-03-04T14:17:31.194Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/b2/189b2577dde745b15625b3214302605b1353436219d42b7912e77fa8dc24/opentelemetry_proto-1.40.0-py3-none-any.whl", hash = "sha256:266c4385d88923a23d63e353e9761af0f47a6ed0d486979777fe4de59dc9b25f", size = 72073, upload-time = "2026-03-04T14:17:16.673Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/fd/3c3125b20ba18ce2155ba9ea74acb0ae5d25f8cd39cfd37455601b7955cc/opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2", size = 184252, upload-time = "2026-03-04T14:17:31.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/c5/6a852903d8bfac758c6dc6e9a68b015d3c33f2f1be5e9591e0f4b69c7e0a/opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1", size = 141951, upload-time = "2026-03-04T14:17:17.961Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.61b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/c0/4ae7973f3c2cfd2b6e321f1675626f0dab0a97027cc7a297474c9c8f3d04/opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a", size = 145755, upload-time = "2026-03-04T14:17:32.664Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/37/cc6a55e448deaa9b27377d087da8615a3416d8ad523d5960b78dbeadd02a/opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2", size = 231621, upload-time = "2026-03-04T14:17:19.33Z" }, ] [[package]] @@ -336,6 +412,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, + { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, + { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -552,6 +643,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + [[package]] name = "urllib3" version = "2.6.3" @@ -576,8 +676,10 @@ version = "0.3" source = { editable = "." } dependencies = [ { name = "docker" }, - { name = "dogstatsd-python" }, { name = "kubernetes" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-sdk" }, { name = "requests" }, ] @@ -585,9 +687,6 @@ dependencies = [ codejail = [ { name = "edx-codejail" }, ] -production = [ - { name = "newrelic" }, -] [package.dev-dependencies] dev = [ @@ -600,13 +699,14 @@ dev = [ [package.metadata] requires-dist = [ { name = "docker", specifier = ">=7.0.0" }, - { name = "dogstatsd-python" }, { name = "edx-codejail", marker = "extra == 'codejail'" }, { name = "kubernetes", specifier = ">=29.0.0" }, - { name = "newrelic", marker = "extra == 'production'" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-sdk" }, { name = "requests" }, ] -provides-extras = ["codejail", "production"] +provides-extras = ["codejail"] [package.metadata.requires-dev] dev = [ @@ -615,3 +715,12 @@ dev = [ { name = "mock" }, { name = "pytest-cov" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] diff --git a/xqueue_watcher/env_settings.py b/xqueue_watcher/env_settings.py new file mode 100644 index 0000000..70379bd --- /dev/null +++ b/xqueue_watcher/env_settings.py @@ -0,0 +1,90 @@ +""" +12-factor / Kubernetes-compatible settings for xqueue-watcher. + +All manager configuration values can be supplied via environment variables +using the ``XQWATCHER_`` prefix. This module mirrors the keys defined in +:data:`xqueue_watcher.settings.MANAGER_CONFIG_DEFAULTS` so it can be used +as a drop-in source of configuration alongside or instead of the JSON file +read by :func:`xqueue_watcher.settings.get_manager_config_values`. + +Environment variables +--------------------- +XQWATCHER_HTTP_BASIC_AUTH + HTTP Basic Auth credentials as ``username:password``. Unset or empty + means no authentication (equivalent to ``None``). +XQWATCHER_POLL_TIME + Seconds between liveness checks of client threads (integer, default 10). +XQWATCHER_REQUESTS_TIMEOUT + Timeout in seconds for outbound HTTP requests (integer, default 1). +XQWATCHER_POLL_INTERVAL + Seconds between queue-polling attempts (integer, default 1). +XQWATCHER_LOGIN_POLL_INTERVAL + Seconds between login-retry attempts (integer, default 5). +XQWATCHER_FOLLOW_CLIENT_REDIRECTS + Follow HTTP redirects when ``true`` or ``1``, ignore otherwise + (boolean, default false). +""" + +import os + +from .settings import MANAGER_CONFIG_DEFAULTS + +_PREFIX = "XQWATCHER_" + + +def _get_bool(name: str, default: bool) -> bool: + raw = os.environ.get(name, "").strip().lower() + if raw in ("1", "true", "yes"): + return True + if raw in ("0", "false", "no"): + return False + return default + + +def _get_int(name: str, default: int) -> int: + raw = os.environ.get(name, "").strip() + if raw: + return int(raw) + return default + + +def _get_auth(name: str, default): + raw = os.environ.get(name, "").strip() + if raw: + return raw + return default + + +def get_manager_config_from_env() -> dict: + """ + Return manager configuration populated from environment variables. + + Values not present in the environment fall back to + :data:`~xqueue_watcher.settings.MANAGER_CONFIG_DEFAULTS`. + """ + return { + "HTTP_BASIC_AUTH": _get_auth( + f"{_PREFIX}HTTP_BASIC_AUTH", + MANAGER_CONFIG_DEFAULTS["HTTP_BASIC_AUTH"], + ), + "POLL_TIME": _get_int( + f"{_PREFIX}POLL_TIME", + MANAGER_CONFIG_DEFAULTS["POLL_TIME"], + ), + "REQUESTS_TIMEOUT": _get_int( + f"{_PREFIX}REQUESTS_TIMEOUT", + MANAGER_CONFIG_DEFAULTS["REQUESTS_TIMEOUT"], + ), + "POLL_INTERVAL": _get_int( + f"{_PREFIX}POLL_INTERVAL", + MANAGER_CONFIG_DEFAULTS["POLL_INTERVAL"], + ), + "LOGIN_POLL_INTERVAL": _get_int( + f"{_PREFIX}LOGIN_POLL_INTERVAL", + MANAGER_CONFIG_DEFAULTS["LOGIN_POLL_INTERVAL"], + ), + "FOLLOW_CLIENT_REDIRECTS": _get_bool( + f"{_PREFIX}FOLLOW_CLIENT_REDIRECTS", + MANAGER_CONFIG_DEFAULTS["FOLLOW_CLIENT_REDIRECTS"], + ), + } diff --git a/xqueue_watcher/grader.py b/xqueue_watcher/grader.py index 48035b1..72e2e18 100644 --- a/xqueue_watcher/grader.py +++ b/xqueue_watcher/grader.py @@ -7,7 +7,8 @@ from pathlib import Path import logging import multiprocessing -from statsd import statsd + +from . import metrics as _metrics def format_errors(errors): @@ -109,7 +110,7 @@ def grade(self, grader_path, grader_config, student_response): def process_item(self, content, queue=None): try: - statsd.increment('xqueuewatcher.process-item') + _metrics.process_item_counter.add(1) body = content['xqueue_body'] files = content['xqueue_files'] @@ -122,7 +123,7 @@ def process_item(self, content, queue=None): except ValueError as err: # If parsing json fails, erroring is fine--something is wrong in the content. # However, for debugging, still want to see what the problem is - statsd.increment('xqueuewatcher.grader_payload_error') + _metrics.grader_payload_error_counter.add(1) self.log.debug(f"error parsing: '{payload}' -- {err}") raise @@ -141,14 +142,16 @@ def process_item(self, content, queue=None): start = time.time() results = self.grade(grader_path, grader_config, student_response) - statsd.histogram('xqueuewatcher.grading-time', time.time() - start) + elapsed = time.time() - start + _metrics.grading_time_histogram.record(elapsed) + self.log.info('grading-time seconds=%.3f', elapsed) # Make valid JSON message reply = {'correct': results['correct'], 'score': results['score'], 'msg': self.render_results(results)} - statsd.increment('xqueuewatcher.replies (non-exception)') + _metrics.replies_counter.add(1) except Exception as e: self.log.exception("process_item") if queue: diff --git a/xqueue_watcher/manager.py b/xqueue_watcher/manager.py index 01c5afa..af24d88 100644 --- a/xqueue_watcher/manager.py +++ b/xqueue_watcher/manager.py @@ -16,6 +16,7 @@ _codejail_jail_code = None from .settings import get_manager_config_values, MANAGER_CONFIG_DEFAULTS +from .metrics import configure_metrics class Manager: @@ -93,6 +94,8 @@ def configure_from_directory(self, directory): logging.basicConfig(level="DEBUG") self.log = logging.getLogger('xqueue_watcher.manager') + configure_metrics() + app_config_path = directory / 'xqwatcher.json' self.manager_config = get_manager_config_values(app_config_path) diff --git a/xqueue_watcher/metrics.py b/xqueue_watcher/metrics.py new file mode 100644 index 0000000..2cf0773 --- /dev/null +++ b/xqueue_watcher/metrics.py @@ -0,0 +1,78 @@ +""" +OpenTelemetry metrics for xqueue-watcher. + +Call :func:`configure_metrics` once at process startup (before the first +submission is processed). All configuration is read from the standard +OpenTelemetry environment variables so no application-level config files are +needed: + +``OTEL_EXPORTER_OTLP_ENDPOINT`` + OTLP collector endpoint, e.g. ``http://otel-collector:4318``. + When absent or empty, metrics are recorded in-process but not exported. +``OTEL_SERVICE_NAME`` + Service name attached to every metric (default: ``xqueue-watcher``). +``OTEL_RESOURCE_ATTRIBUTES`` + Additional resource attributes as ``key=value,...`` pairs. +``OTEL_METRIC_EXPORT_INTERVAL`` + Export interval in milliseconds (SDK default: 60 000). +""" + +import os + +from opentelemetry import metrics +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.sdk.resources import Resource + +_METER_NAME = "xqueue_watcher" +_DEFAULT_SERVICE_NAME = "xqueue-watcher" + + +def _build_meter_provider() -> MeterProvider: + resource = Resource.create( + {"service.name": os.environ.get("OTEL_SERVICE_NAME", "").strip() or _DEFAULT_SERVICE_NAME} + ) + readers = [] + if os.environ.get("OTEL_EXPORTER_OTLP_ENDPOINT", "").strip(): + from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter + readers.append(PeriodicExportingMetricReader(OTLPMetricExporter())) + return MeterProvider(resource=resource, metric_readers=readers) + + +def configure_metrics() -> None: + """Configure the global OTel MeterProvider from environment variables.""" + metrics.set_meter_provider(_build_meter_provider()) + + +# --------------------------------------------------------------------------- +# Instruments +# +# Created at module level against the global proxy meter. The OTel proxy +# delegates transparently to whichever MeterProvider is active, so these +# instruments work correctly whether configure_metrics() has been called or +# not (unmeasured data simply goes to the no-op provider until the real +# provider is installed). +# --------------------------------------------------------------------------- + +_meter = metrics.get_meter(_METER_NAME) + +process_item_counter = _meter.create_counter( + "xqueuewatcher.process_item", + description="Number of grading submissions received.", +) + +grader_payload_error_counter = _meter.create_counter( + "xqueuewatcher.grader_payload_error", + description="Number of submissions whose grader_payload could not be parsed.", +) + +grading_time_histogram = _meter.create_histogram( + "xqueuewatcher.grading_time", + unit="s", + description="Wall-clock time in seconds spent grading a single submission.", +) + +replies_counter = _meter.create_counter( + "xqueuewatcher.replies", + description="Number of successful (non-exception) grading replies sent.", +) From 22d311c1c51463c31dfe25c64ea5dc2f153faa1f Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Tue, 17 Mar 2026 09:17:57 -0400 Subject: [PATCH 14/25] chore: remove planning doc from git tracking Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .gitignore | 1 + Automated code Graders With xqueue-watcher.md | 67 ------------------- 2 files changed, 1 insertion(+), 67 deletions(-) delete mode 100755 Automated code Graders With xqueue-watcher.md diff --git a/.gitignore b/.gitignore index dc97db5..d371a40 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ reports/ # Kubernetes secrets — never commit real values deploy/kubernetes/secret.yaml +Automated code Graders With xqueue-watcher.md diff --git a/Automated code Graders With xqueue-watcher.md b/Automated code Graders With xqueue-watcher.md deleted file mode 100755 index af0baf1..0000000 --- a/Automated code Graders With xqueue-watcher.md +++ /dev/null @@ -1,67 +0,0 @@ -## Current Status - -Student-submitted code questions are part of the course material for the 6.00x series, 6.86, and 7.8QBWx courses. These are automatically evaluated and graded in the context of the edX interface. The current implementation of this feature relies on the combination of [xqueue](https://github.com/openedx/xqueue) and [xqueue-watcher](https://github.com/openedx/xqueue-watcher). The xqueue and xqueue-watcher applications are [deprecated](https://github.com/openedx/public-engineering/issues/214) and slated for removal from the Open edX ecosystem in October 2024\. - -The user experience for course teams and students is quite poor. Course teams have no reliable means of validating their grader tests without deploying them to a running environment, leading to longer cycle times and a lack of visibility into errors. This also contributes to a substantial support burden on engineering to provide debugging information and onboarding for deployment. Students get opaque responses when their submissions are invalid or incorrect. Instructors have limited visibility into the code that their students are submitting and no ability to provide in-context feedback to the students. - -As an alternative to the combination of xqueue and xqueue-watcher, there are several paid services that offer automatic grading of student code. These platforms also offer many additional features beyond the capabilities of xqueue/xqueue-watcher. Examples include in-browser editor support (e.g., Visual Studio Code), in-context feedback mechanisms for instructors to comment on student code, and workspace management for students and instructors to keep track of what code has been written. A non-exhaustive list of companies providing this service includes [Codio](https://www.codio.com/), [Vocareum](https://www.vocareum.com/), and [CodeGrade](https://www.codegrade.com/). - -The core capability of the xqueue-watcher code grading system is to facilitate a request/response interaction for evaluating student-submitted code in a stateless manner. This is a benefit in that it allows for scaling to a large number of students. If there is a desire to have a more stateful experience for the learners, then instructors are advised to take advantage of services as noted above. - -## Planned Improvements - -In order to provide continuity of service to course teams currently using code grading in their classes, the engineering team will continue to provide the xqueue/xqueue-watcher systems as a service for Open edX courses. This will require investing engineering resources to upgrade the current code to be compatible with newer versions of the edX platform, as well as updating the system dependencies. The deployment architecture will also require upgrades to be compatible with the Open Learning infrastructure platform. - -## Scope Of Work - -The focus of the improvements and upgrades to xqueue and xqueue-watcher will be on the local development experience for course instructors, and reducing the maintenance burden on infrastructure engineers. - -### Upgrade Service Deployment - -The initial scope of work will be focused on migrating the deployment of the xqueue-watcher service. This will include the ability to more easily scale capacity, allowing for the migration of current grader workloads hosted by edx.org/2U onto our infrastructure.This will require the dedicated attention of one member of the infrastructure engineering team for approximately 2 months to accomplish. - -### Enable Local Testing - -A significant source of frustration with the xqueue-watcher system is the inability to effectively test the functionality of the evaluation functions during their development. In addition, the course teams have no control over the runtime environment and system dependencies available to the student code. The current state of the service is that course teams are only able to use Python code and there is no means of specifying which version of the language to use. - -In order to make this easier, a new execution context that uses Docker images will be added to the xqueue-watcher service. This will allow course teams to develop their own images, specify the language and version that students can write their code in, and specify the dependencies available to them. Having a pre-built image to use for run-time execution also reduces the work required of the infrastructure team, allowing them to focus on the runtime of the core service rather than being responsible for the evaluation context of student code. - -For course teams that have existing grader logic, the engineering team will assist them in the initial creation of a Docker image that is compatible with their current code. This will not require substantial modifications to the grader code as it is presently written in order to function under the new execution context. - -The work to add and validate this functionality will require the attention of 2 engineers for approximately 4 months. - -### Provide Debugging Information To Course Teams - -The other major source of friction for both course instructors and platform operators is the inability to debug issues that students encounter. To allow course teams to diagnose and debug errors, the infrastructure team will collect log messages emitted by the grader into a system that allows for searching of that data. Course teams will be granted access to their logs for the purpose of supporting their students in the event that there are logical or resource-related failures due to student submissions and/or grader logic. This will require the focus of 1 infrastructure engineer for approximately 1 month to configure, integrate, and validate the functionality and permission scoping. If there is a desire to integrate this experience into the Open edX interface then it will require an additional developer for approximately 1 \- 2 months. - -### Ongoing Maintenance, Support, and Improvements - -To support the ongoing operation and support of the automated grader service will require the attention of 1 full time DevOps engineer. The responsibilities of this engineer include: - -* Helping course teams with deployment and debugging of their graders -* Build utilities to simplify the creation of graders by course teams -* Perform ongoing maintenance and upgrades of the xqueue and xqueue-watcher projects -* Manage deployment, monitoring, and scaling of the automated graders - -## Explicitly Out Of Scope - -There are a number of features that we are explicitly not going to offer to course teams. In particular, we have no intention of adding features around dedicated lab spaces for students, editor integrations, or support for other courseware systems. For any use cases that go beyond a simple request/response interchange for evaluating student code the course team is advised to engage with services such as Codio, CodeGrade, etc. as noted above. - -## Estimated Costs - -As noted in the scope of work, the ongoing support, maintenance, and upgrades of the service will require an additional dedicated DevOps engineer. - -The infrastructure costs for the current installation of xqueue-watchers is approximately $700/month. With increased scaling to account for the course load currently being managed on edx.org that number is anticipated to reach \~$1,500/month. - -The total estimated cost per learner is \~$5/learner/month. This is based on the above costs and an enrollment rate of \~3,700 verified learners per course term. - -## Comparison To Alternatives - -| Property | Xqueue/Xqueue-Watcher | Cod.io | -| :---- | :---- | :---- | -| Cost | \~$5/student/month | \~$12/student/month minimum | -| Compatibility | Open edX only | Open edX, Canvas, any LMS that supports LTI | -| Features | Automated evaluation of student code submissions | In-browser IDE, Automated code evaluation, Cloud lab environments, Content authoring | -| Instructor Support | Business hours by staff FTE | [Assistance with development and maintenance of problems and graders](https://docs.codio.com/index.html) | -| Student Support | 24/7 2U or MITx Online escalated to course team | [https://docs.codio.com/student.html](https://docs.codio.com/student.html) | - From d524236c58367a1e48921ce02de5089585466484 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Tue, 17 Mar 2026 14:56:50 -0400 Subject: [PATCH 15/25] chore: remove codecov upload from CI Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/ci.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dcf2daf..8f0f876 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,10 +29,4 @@ jobs: run: uv python install ${{ matrix.python-version }} - name: Run Tests - run: uv run --python ${{ matrix.python-version }} pytest --cov=xqueue_watcher --cov-report=xml tests - - - name: Run Coverage - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - fail_ci_if_error: true + run: uv run --python ${{ matrix.python-version }} pytest tests From 16e034b7794aa2690989a42e69b1779d7cf7768c Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 18 Mar 2026 12:16:20 -0400 Subject: [PATCH 16/25] fix: address PR #14 review feedback - docker-compose.yml: remove unused GRADER_BACKEND env var, fix duplicate volumes key by merging into one list, tag sample-grader with image: grader-base:local so conf.d/600.json reference resolves - Dockerfile: standardise CMD config path to /etc/xqueue-watcher to match docker-compose and Kubernetes manifests - metrics.py: remove OTEL_METRIC_EXPORT_INTERVAL from docstring since it is not wired up in _build_meter_provider() - containergrader.py: add pod template metadata labels so the NetworkPolicy podSelector (app.kubernetes.io/component=xqueue-grader) actually matches grading pods; set automount_service_account_token=False on the grading pod spec to reduce blast radius if the NetworkPolicy is misconfigured; add _parse_memory_bytes() helper and use it for the Docker backend mem_limit so Kubernetes-style strings like '256Mi' are converted to bytes rather than passed raw (which Docker does not accept) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 4 ++-- docker-compose.yml | 10 +++------- xqueue_watcher/containergrader.py | 33 ++++++++++++++++++++++++++++++- xqueue_watcher/metrics.py | 2 -- 4 files changed, 37 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index c4f2008..237f090 100644 --- a/Dockerfile +++ b/Dockerfile @@ -32,8 +32,8 @@ RUN uv sync --frozen --no-dev USER app -CMD ["xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] +CMD ["xqueue-watcher", "-d", "/etc/xqueue-watcher"] FROM base AS edx.org USER app -CMD ["xqueue-watcher", "-d", "/edx/etc/xqueue_watcher"] +CMD ["xqueue-watcher", "-d", "/etc/xqueue-watcher"] diff --git a/docker-compose.yml b/docker-compose.yml index 23379f0..ec3475a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,16 +27,11 @@ services: - ./conf.d:/etc/xqueue-watcher/conf.d:rw # Mount local grader scripts for rapid iteration. - ./data:/graders:rw - environment: - # Use docker backend so grading containers run locally via the Docker socket. - GRADER_BACKEND: docker - # Give xqueue-watcher access to the Docker socket so it can spawn grader containers. - # Remove this if you don't need the docker backend locally. - volumes: + # Give xqueue-watcher access to the Docker socket so it can spawn grader containers. - /var/run/docker.sock:/var/run/docker.sock extra_hosts: - "host.docker.internal:host-gateway" - command: python -m xqueue_watcher -d /etc/xqueue-watcher + command: xqueue-watcher -d /etc/xqueue-watcher # sample-grader: an example grader image for local testing. # Course teams replace this with their own image. @@ -44,6 +39,7 @@ services: build: context: . dockerfile: grader_support/Dockerfile.base + image: grader-base:local # This service is not started automatically — it exists so `docker compose build` # builds the base image that course grader images extend. profiles: ["build-only"] diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 7107aad..086f707 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -169,8 +169,15 @@ def _build_k8s_job(self, job_name, grader_path, code, seed, grader_config=None): active_deadline_seconds=self.timeout, ttl_seconds_after_finished=300, template=k8s_client.V1PodTemplateSpec( + metadata=k8s_client.V1ObjectMeta( + labels={ + "app.kubernetes.io/component": "xqueue-grader", + "app.kubernetes.io/managed-by": "xqueue-watcher", + } + ), spec=k8s_client.V1PodSpec( restart_policy="Never", + automount_service_account_token=False, security_context=k8s_client.V1PodSecurityContext( run_as_non_root=True, run_as_user=1000, @@ -301,7 +308,7 @@ def _run_docker(self, grader_path, code, seed, grader_config=None): working_dir="/grader", environment=env, volumes={grader_dir: {"bind": "/graders", "mode": "ro"}}, - mem_limit=self.memory_limit, + mem_limit=_parse_memory_bytes(self.memory_limit), nano_cpus=int(_parse_cpu_millis(self.cpu_limit) * 1_000_000), network_disabled=True, read_only=True, @@ -393,3 +400,27 @@ def _parse_cpu_millis(cpu_str): if cpu_str.endswith("m"): return float(cpu_str[:-1]) return float(cpu_str) * 1000 + + +def _parse_memory_bytes(memory_str): + """Convert a Kubernetes/Docker memory string to bytes for the Docker API. + + Handles IEC binary suffixes (Ki, Mi, Gi, Ti) and SI decimal suffixes + (K, M, G, T). Plain integers are returned unchanged. + + Examples: + "256Mi" -> 268435456 + "1Gi" -> 1073741824 + "512M" -> 512000000 + "1024" -> 1024 + """ + s = str(memory_str).strip() + iec = {"Ti": 1024**4, "Gi": 1024**3, "Mi": 1024**2, "Ki": 1024} + si = {"T": 1000**4, "G": 1000**3, "M": 1000**2, "K": 1000} + for suffix, factor in iec.items(): + if s.endswith(suffix): + return int(float(s[: -len(suffix)]) * factor) + for suffix, factor in si.items(): + if s.endswith(suffix): + return int(float(s[: -len(suffix)]) * factor) + return int(s) diff --git a/xqueue_watcher/metrics.py b/xqueue_watcher/metrics.py index 2cf0773..16ffb27 100644 --- a/xqueue_watcher/metrics.py +++ b/xqueue_watcher/metrics.py @@ -13,8 +13,6 @@ Service name attached to every metric (default: ``xqueue-watcher``). ``OTEL_RESOURCE_ATTRIBUTES`` Additional resource attributes as ``key=value,...`` pairs. -``OTEL_METRIC_EXPORT_INTERVAL`` - Export interval in milliseconds (SDK default: 60 000). """ import os From 0495e7584912af6df505e4d5eaa10a3148581f61 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 18 Mar 2026 16:11:46 -0400 Subject: [PATCH 17/25] fix: add venv bin to PATH so xqueue-watcher entrypoint resolves uv installs the console script into the project virtual environment at .venv/bin/xqueue-watcher. Without adding this directory to PATH the CMD cannot be found at container startup. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 237f090..0f0a8d7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,8 @@ FROM python:3.11-slim AS base ENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 + LC_ALL=C.UTF-8 \ + PATH="/edx/app/xqueue_watcher/.venv/bin:$PATH" RUN apt-get update && \ apt-get install -y --no-install-recommends git-core && \ From c247b520ed45e7aaaf69af7377fc9458a16b50fd Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 18 Mar 2026 16:30:25 -0400 Subject: [PATCH 18/25] feat: add configure_logging() for 12-factor stdout logging When no logging.json file is present, manager.py now calls configure_logging() from env_settings instead of basicConfig(). configure_logging() sets up a single StreamHandler on stdout with a consistent timestamp/level/module format, honours XQWATCHER_LOG_LEVEL (default INFO), and suppresses noisy requests/urllib3 debug output. This removes the need for a logging.json file in Kubernetes deployments. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/test_env_settings.py | 41 +++++++++++- xqueue_watcher/env_settings.py | 114 +++++++++++++++++++++++++++++++++ xqueue_watcher/manager.py | 3 +- 3 files changed, 156 insertions(+), 2 deletions(-) diff --git a/tests/test_env_settings.py b/tests/test_env_settings.py index 4386040..412a5b6 100644 --- a/tests/test_env_settings.py +++ b/tests/test_env_settings.py @@ -1,10 +1,49 @@ +import logging import unittest from unittest.mock import patch -from xqueue_watcher.env_settings import get_manager_config_from_env +from xqueue_watcher.env_settings import configure_logging, get_manager_config_from_env from xqueue_watcher.settings import MANAGER_CONFIG_DEFAULTS +class TestConfigureLogging(unittest.TestCase): + def tearDown(self): + # Reset root logger after each test so handlers don't accumulate. + root = logging.getLogger() + root.handlers.clear() + root.setLevel(logging.WARNING) + + def test_default_level_is_info(self): + with patch.dict("os.environ", {}, clear=False): + configure_logging() + self.assertEqual(logging.getLogger().level, logging.INFO) + + def test_custom_level_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_LOG_LEVEL": "DEBUG"}): + configure_logging() + self.assertEqual(logging.getLogger().level, logging.DEBUG) + + def test_stdout_handler_installed(self): + import sys + with patch.dict("os.environ", {}, clear=False): + configure_logging() + handlers = logging.getLogger().handlers + self.assertEqual(len(handlers), 1) + self.assertIsInstance(handlers[0], logging.StreamHandler) + self.assertIs(handlers[0].stream, sys.stdout) + + def test_requests_logger_set_to_warning(self): + with patch.dict("os.environ", {"XQWATCHER_LOG_LEVEL": "DEBUG"}): + configure_logging() + self.assertEqual(logging.getLogger("requests").level, logging.WARNING) + self.assertEqual(logging.getLogger("urllib3").level, logging.WARNING) + + def test_invalid_level_raises(self): + with patch.dict("os.environ", {"XQWATCHER_LOG_LEVEL": "NOTLEVEL"}): + with self.assertRaises(ValueError): + configure_logging() + + class TestGetManagerConfigFromEnv(unittest.TestCase): def test_defaults_when_no_env_vars_set(self): with patch.dict("os.environ", {}, clear=False): diff --git a/xqueue_watcher/env_settings.py b/xqueue_watcher/env_settings.py index 70379bd..aaba7f6 100644 --- a/xqueue_watcher/env_settings.py +++ b/xqueue_watcher/env_settings.py @@ -7,8 +7,16 @@ as a drop-in source of configuration alongside or instead of the JSON file read by :func:`xqueue_watcher.settings.get_manager_config_values`. +It also provides :func:`configure_logging`, which initialises a structured +stdout logging configuration without requiring a ``logging.json`` file — +suitable for Kubernetes and any 12-factor environment where logs are consumed +from stdout by the container runtime. + Environment variables --------------------- +XQWATCHER_LOG_LEVEL + Root log level (default: ``INFO``). Accepts any standard Python level + name: ``DEBUG``, ``INFO``, ``WARNING``, ``ERROR``, ``CRITICAL``. XQWATCHER_HTTP_BASIC_AUTH HTTP Basic Auth credentials as ``username:password``. Unset or empty means no authentication (equivalent to ``None``). @@ -25,12 +33,118 @@ (boolean, default false). """ +import logging +import logging.config import os from .settings import MANAGER_CONFIG_DEFAULTS _PREFIX = "XQWATCHER_" +_LOG_FORMAT = "%(asctime)s %(levelname)s %(process)d [%(name)s] %(filename)s:%(lineno)d - %(message)s" + + +def configure_logging() -> None: + """ + Initialise logging to stdout using a level read from the environment. + + This is the 12-factor / Kubernetes alternative to supplying a + ``logging.json`` file. All log records are written to ``stdout`` so they + are captured by the container runtime and forwarded to whatever log + aggregation system is in use (e.g. Fluentd, Loki, CloudWatch). + + The root log level defaults to ``INFO`` and can be overridden via the + ``XQWATCHER_LOG_LEVEL`` environment variable. The ``requests`` and + ``urllib3`` libraries are pinned to ``WARNING`` to suppress noisy + HTTP-level debug output. + """ + level = os.environ.get(f"{_PREFIX}LOG_LEVEL", "INFO").strip().upper() + + logging.config.dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "standard": { + "format": _LOG_FORMAT, + }, + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "formatter": "standard", + "level": level, + }, + }, + "root": { + "handlers": ["stdout"], + "level": level, + }, + "loggers": { + "requests": {"level": "WARNING"}, + "urllib3": {"level": "WARNING"}, + }, + }) + + +def _get_bool(name: str, default: bool) -> bool: + raw = os.environ.get(name, "").strip().lower() + if raw in ("1", "true", "yes"): + return True + if raw in ("0", "false", "no"): + return False + return default + + +def _get_int(name: str, default: int) -> int: + raw = os.environ.get(name, "").strip() + if raw: + return int(raw) + return default + + +def _get_auth(name: str, default): + raw = os.environ.get(name, "").strip() + if raw: + return raw + return default + + +def get_manager_config_from_env() -> dict: + """ + Return manager configuration populated from environment variables. + + Values not present in the environment fall back to + :data:`~xqueue_watcher.settings.MANAGER_CONFIG_DEFAULTS`. + """ + return { + "HTTP_BASIC_AUTH": _get_auth( + f"{_PREFIX}HTTP_BASIC_AUTH", + MANAGER_CONFIG_DEFAULTS["HTTP_BASIC_AUTH"], + ), + "POLL_TIME": _get_int( + f"{_PREFIX}POLL_TIME", + MANAGER_CONFIG_DEFAULTS["POLL_TIME"], + ), + "REQUESTS_TIMEOUT": _get_int( + f"{_PREFIX}REQUESTS_TIMEOUT", + MANAGER_CONFIG_DEFAULTS["REQUESTS_TIMEOUT"], + ), + "POLL_INTERVAL": _get_int( + f"{_PREFIX}POLL_INTERVAL", + MANAGER_CONFIG_DEFAULTS["POLL_INTERVAL"], + ), + "LOGIN_POLL_INTERVAL": _get_int( + f"{_PREFIX}LOGIN_POLL_INTERVAL", + MANAGER_CONFIG_DEFAULTS["LOGIN_POLL_INTERVAL"], + ), + "FOLLOW_CLIENT_REDIRECTS": _get_bool( + f"{_PREFIX}FOLLOW_CLIENT_REDIRECTS", + MANAGER_CONFIG_DEFAULTS["FOLLOW_CLIENT_REDIRECTS"], + ), + } + + def _get_bool(name: str, default: bool) -> bool: raw = os.environ.get(name, "").strip().lower() diff --git a/xqueue_watcher/manager.py b/xqueue_watcher/manager.py index af24d88..9209a12 100644 --- a/xqueue_watcher/manager.py +++ b/xqueue_watcher/manager.py @@ -17,6 +17,7 @@ from .settings import get_manager_config_values, MANAGER_CONFIG_DEFAULTS from .metrics import configure_metrics +from .env_settings import configure_logging class Manager: @@ -91,7 +92,7 @@ def configure_from_directory(self, directory): with open(log_config) as config: logging.config.dictConfig(json.load(config)) else: - logging.basicConfig(level="DEBUG") + configure_logging() self.log = logging.getLogger('xqueue_watcher.manager') configure_metrics() From 2e7ab2d6f10c92be0753e95bd365501b9fa73b00 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Wed, 18 Mar 2026 17:11:52 -0400 Subject: [PATCH 19/25] fix: symlink xqueue-watcher into /usr/local/bin for reliable resolution Using PATH via ENV is fragile -- container runtimes and security policies can reset or ignore it. Install a symlink at /usr/local/bin/xqueue-watcher (always in the standard system PATH) so the entrypoint resolves regardless of how the container is launched. Also remove the stale NEW_RELIC_LICENSE_KEY env entry from the Kubernetes deployment manifest. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- Dockerfile | 6 +++--- deploy/kubernetes/deployment.yaml | 9 --------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0f0a8d7..51c8824 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,8 +3,7 @@ FROM python:3.11-slim AS base ENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 \ - PATH="/edx/app/xqueue_watcher/.venv/bin:$PATH" + LC_ALL=C.UTF-8 RUN apt-get update && \ apt-get install -y --no-install-recommends git-core && \ @@ -20,7 +19,8 @@ COPY pyproject.toml uv.lock ./ RUN uv sync --frozen --no-dev --no-install-project COPY . /edx/app/xqueue_watcher -RUN uv sync --frozen --no-dev +RUN uv sync --frozen --no-dev && \ + ln -s /edx/app/xqueue_watcher/.venv/bin/xqueue-watcher /usr/local/bin/xqueue-watcher # Note: the `codejail` optional extra (edx-codejail) is intentionally omitted # from this image. In the Kubernetes deployment, student code runs inside an # isolated container (ContainerGrader) — the container boundary provides the diff --git a/deploy/kubernetes/deployment.yaml b/deploy/kubernetes/deployment.yaml index 0159455..df736ac 100644 --- a/deploy/kubernetes/deployment.yaml +++ b/deploy/kubernetes/deployment.yaml @@ -37,15 +37,6 @@ spec: imagePullPolicy: Always command: ["xqueue-watcher", "-d", "/etc/xqueue-watcher"] - env: - # NEW_RELIC_LICENSE_KEY injected from secret if using newrelic extra - - name: NEW_RELIC_LICENSE_KEY - valueFrom: - secretKeyRef: - name: xqueue-watcher-secrets - key: new-relic-license-key - optional: true - resources: requests: cpu: "100m" From 5502d68f0b30ab851ae71f7d87163577eb22afd2 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 11:29:04 -0400 Subject: [PATCH 20/25] feat: add env-based defaults for ContainerGrader configuration Add get_container_grader_defaults() to env_settings, reading five new XQWATCHER_GRADER_* env vars: XQWATCHER_GRADER_BACKEND (default: kubernetes) XQWATCHER_GRADER_NAMESPACE (default: default) XQWATCHER_GRADER_CPU_LIMIT (default: 500m) XQWATCHER_GRADER_MEMORY_LIMIT (default: 256Mi) XQWATCHER_GRADER_TIMEOUT (default: 20) ContainerGrader.__init__ now uses None sentinels for these params so that any value omitted from a conf.d KWARGS block falls back to the env-derived default rather than a hardcoded constant. Values supplied explicitly in conf.d always take precedence, preserving backwards compatibility. Also fixes duplicate function definitions that had crept into env_settings.py. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/test_container_grader.py | 53 +++++++++++- tests/test_env_settings.py | 54 +++++++++++- xqueue_watcher/containergrader.py | 42 +++++---- xqueue_watcher/env_settings.py | 138 ++++++++++++++---------------- 4 files changed, 192 insertions(+), 95 deletions(-) diff --git a/tests/test_container_grader.py b/tests/test_container_grader.py index f4c758c..87a26df 100644 --- a/tests/test_container_grader.py +++ b/tests/test_container_grader.py @@ -8,10 +8,11 @@ import json from pathlib import Path from unittest import mock +from unittest.mock import patch import pytest -from xqueue_watcher.containergrader import ContainerGrader, _parse_cpu_millis +from xqueue_watcher.containergrader import ContainerGrader, _parse_cpu_millis, _parse_memory_bytes # --------------------------------------------------------------------------- @@ -56,7 +57,8 @@ def test_invalid_backend(self): with pytest.raises(ValueError, match="Unsupported backend"): make_grader(backend="podman") - def test_defaults(self): + def test_defaults_from_env_when_no_kwargs(self): + """With no kwargs, values come from env defaults (all at baseline).""" g = ContainerGrader(grader_root="/graders", image="img:latest") assert g.backend == "kubernetes" assert g.namespace == "default" @@ -64,6 +66,53 @@ def test_defaults(self): assert g.memory_limit == "256Mi" assert g.timeout == 20 + def test_kwargs_override_env_defaults(self): + """Explicit kwargs always win over env defaults.""" + env = { + "XQWATCHER_GRADER_BACKEND": "docker", + "XQWATCHER_GRADER_NAMESPACE": "env-ns", + "XQWATCHER_GRADER_CPU_LIMIT": "250m", + "XQWATCHER_GRADER_MEMORY_LIMIT": "128Mi", + "XQWATCHER_GRADER_TIMEOUT": "5", + } + with patch.dict("os.environ", env): + g = ContainerGrader( + grader_root="/graders", + image="img:latest", + backend="kubernetes", + namespace="kwarg-ns", + cpu_limit="1000m", + memory_limit="512Mi", + timeout=99, + ) + assert g.backend == "kubernetes" + assert g.namespace == "kwarg-ns" + assert g.cpu_limit == "1000m" + assert g.memory_limit == "512Mi" + assert g.timeout == 99 + + def test_env_defaults_applied_when_no_kwargs(self): + """Env vars are used when the corresponding kwarg is absent.""" + env = { + "XQWATCHER_GRADER_BACKEND": "docker", + "XQWATCHER_GRADER_NAMESPACE": "grading", + "XQWATCHER_GRADER_CPU_LIMIT": "750m", + "XQWATCHER_GRADER_MEMORY_LIMIT": "512Mi", + "XQWATCHER_GRADER_TIMEOUT": "30", + } + with patch.dict("os.environ", env): + g = ContainerGrader(grader_root="/graders", image="img:latest") + assert g.backend == "docker" + assert g.namespace == "grading" + assert g.cpu_limit == "750m" + assert g.memory_limit == "512Mi" + assert g.timeout == 30 + + def test_invalid_backend_from_env_raises(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_BACKEND": "podman"}): + with pytest.raises(ValueError, match="Unsupported backend"): + ContainerGrader(grader_root="/graders", image="img:latest") + # --------------------------------------------------------------------------- # _build_k8s_job diff --git a/tests/test_env_settings.py b/tests/test_env_settings.py index 412a5b6..fa46951 100644 --- a/tests/test_env_settings.py +++ b/tests/test_env_settings.py @@ -2,7 +2,7 @@ import unittest from unittest.mock import patch -from xqueue_watcher.env_settings import configure_logging, get_manager_config_from_env +from xqueue_watcher.env_settings import configure_logging, get_container_grader_defaults, get_manager_config_from_env from xqueue_watcher.settings import MANAGER_CONFIG_DEFAULTS @@ -120,3 +120,55 @@ def test_all_env_vars_together(self): def test_returns_all_expected_keys(self): config = get_manager_config_from_env() self.assertEqual(set(config.keys()), set(MANAGER_CONFIG_DEFAULTS.keys())) + + +class TestGetContainerGraderDefaults(unittest.TestCase): + def test_built_in_defaults_when_no_env(self): + with patch.dict("os.environ", {}, clear=False): + d = get_container_grader_defaults() + self.assertEqual(d["backend"], "kubernetes") + self.assertEqual(d["namespace"], "default") + self.assertEqual(d["cpu_limit"], "500m") + self.assertEqual(d["memory_limit"], "256Mi") + self.assertEqual(d["timeout"], 20) + + def test_backend_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_BACKEND": "docker"}): + d = get_container_grader_defaults() + self.assertEqual(d["backend"], "docker") + + def test_namespace_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_NAMESPACE": "grading"}): + d = get_container_grader_defaults() + self.assertEqual(d["namespace"], "grading") + + def test_cpu_limit_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_CPU_LIMIT": "1"}): + d = get_container_grader_defaults() + self.assertEqual(d["cpu_limit"], "1") + + def test_memory_limit_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_MEMORY_LIMIT": "1Gi"}): + d = get_container_grader_defaults() + self.assertEqual(d["memory_limit"], "1Gi") + + def test_timeout_from_env(self): + with patch.dict("os.environ", {"XQWATCHER_GRADER_TIMEOUT": "60"}): + d = get_container_grader_defaults() + self.assertEqual(d["timeout"], 60) + + def test_all_grader_env_vars_together(self): + env = { + "XQWATCHER_GRADER_BACKEND": "docker", + "XQWATCHER_GRADER_NAMESPACE": "ci", + "XQWATCHER_GRADER_CPU_LIMIT": "250m", + "XQWATCHER_GRADER_MEMORY_LIMIT": "128Mi", + "XQWATCHER_GRADER_TIMEOUT": "10", + } + with patch.dict("os.environ", env): + d = get_container_grader_defaults() + self.assertEqual(d["backend"], "docker") + self.assertEqual(d["namespace"], "ci") + self.assertEqual(d["cpu_limit"], "250m") + self.assertEqual(d["memory_limit"], "128Mi") + self.assertEqual(d["timeout"], 10) diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 086f707..9ac1d37 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -16,6 +16,7 @@ from pathlib import Path from .grader import Grader +from .env_settings import get_container_grader_defaults _BACKEND_KUBERNETES = "kubernetes" @@ -39,35 +40,42 @@ class ContainerGrader(Grader): for Kubernetes the scripts are baked into the image. image - Docker image to run. Should extend grader-base and include all course-specific grader scripts and dependencies. - backend - "kubernetes" (default) or "docker". - namespace - Kubernetes namespace to create Jobs in (default: "default"). - cpu_limit - CPU limit for the grading container (default: "500m"). - memory_limit - Memory limit for the grading container (default: "256Mi"). - timeout - Maximum wall-clock seconds a grading job may run (default: 20). + backend - "kubernetes" or "docker". Defaults to + XQWATCHER_GRADER_BACKEND env var, or "kubernetes". + namespace - Kubernetes namespace to create Jobs in. Defaults to + XQWATCHER_GRADER_NAMESPACE env var, or "default". + cpu_limit - CPU limit for the grading container. Defaults to + XQWATCHER_GRADER_CPU_LIMIT env var, or "500m". + memory_limit - Memory limit for the grading container. Defaults to + XQWATCHER_GRADER_MEMORY_LIMIT env var, or "256Mi". + timeout - Maximum wall-clock seconds a grading job may run. Defaults + to XQWATCHER_GRADER_TIMEOUT env var, or 20. """ def __init__( self, grader_root, image, - backend=_BACKEND_KUBERNETES, - namespace="default", - cpu_limit="500m", - memory_limit="256Mi", - timeout=20, + backend=None, + namespace=None, + cpu_limit=None, + memory_limit=None, + timeout=None, **kwargs, ): - if backend not in _SUPPORTED_BACKENDS: + env_defaults = get_container_grader_defaults() + resolved_backend = backend if backend is not None else env_defaults["backend"] + if resolved_backend not in _SUPPORTED_BACKENDS: raise ValueError( - f"Unsupported backend {backend!r}. Choose from {_SUPPORTED_BACKENDS}." + f"Unsupported backend {resolved_backend!r}. Choose from {_SUPPORTED_BACKENDS}." ) super().__init__(grader_root=grader_root, fork_per_item=False, **kwargs) self.image = image - self.backend = backend - self.namespace = namespace - self.cpu_limit = cpu_limit - self.memory_limit = memory_limit - self.timeout = timeout + self.backend = resolved_backend + self.namespace = namespace if namespace is not None else env_defaults["namespace"] + self.cpu_limit = cpu_limit if cpu_limit is not None else env_defaults["cpu_limit"] + self.memory_limit = memory_limit if memory_limit is not None else env_defaults["memory_limit"] + self.timeout = timeout if timeout is not None else env_defaults["timeout"] # ------------------------------------------------------------------ # Internal: container execution diff --git a/xqueue_watcher/env_settings.py b/xqueue_watcher/env_settings.py index aaba7f6..38cb312 100644 --- a/xqueue_watcher/env_settings.py +++ b/xqueue_watcher/env_settings.py @@ -31,6 +31,25 @@ XQWATCHER_FOLLOW_CLIENT_REDIRECTS Follow HTTP redirects when ``true`` or ``1``, ignore otherwise (boolean, default false). + +ContainerGrader defaults +~~~~~~~~~~~~~~~~~~~~~~~~ +These allow operators to set deployment-wide grader defaults without repeating +them in every conf.d queue JSON file. Individual queue configs may still +override any of these values in their ``KWARGS`` block. + +XQWATCHER_GRADER_BACKEND + Container backend: ``kubernetes`` (default) or ``docker``. +XQWATCHER_GRADER_NAMESPACE + Kubernetes namespace in which grading Jobs are created (default: + ``default``). Ignored by the Docker backend. +XQWATCHER_GRADER_CPU_LIMIT + CPU limit for grading containers in Kubernetes / Docker notation + (default: ``500m``). +XQWATCHER_GRADER_MEMORY_LIMIT + Memory limit for grading containers, e.g. ``256Mi`` (default: ``256Mi``). +XQWATCHER_GRADER_TIMEOUT + Maximum wall-clock seconds a grading job may run (integer, default 20). """ import logging @@ -44,6 +63,40 @@ _LOG_FORMAT = "%(asctime)s %(levelname)s %(process)d [%(name)s] %(filename)s:%(lineno)d - %(message)s" +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + +def _get_bool(name: str, default: bool) -> bool: + raw = os.environ.get(name, "").strip().lower() + if raw in ("1", "true", "yes"): + return True + if raw in ("0", "false", "no"): + return False + return default + + +def _get_int(name: str, default: int) -> int: + raw = os.environ.get(name, "").strip() + if raw: + return int(raw) + return default + + +def _get_str(name: str, default: str) -> str: + raw = os.environ.get(name, "").strip() + return raw if raw else default + + +def _get_auth(name: str, default): + raw = os.environ.get(name, "").strip() + return raw if raw else default + + +# --------------------------------------------------------------------------- +# Public API +# --------------------------------------------------------------------------- + def configure_logging() -> None: """ Initialise logging to stdout using a level read from the environment. @@ -87,29 +140,6 @@ def configure_logging() -> None: }) -def _get_bool(name: str, default: bool) -> bool: - raw = os.environ.get(name, "").strip().lower() - if raw in ("1", "true", "yes"): - return True - if raw in ("0", "false", "no"): - return False - return default - - -def _get_int(name: str, default: int) -> int: - raw = os.environ.get(name, "").strip() - if raw: - return int(raw) - return default - - -def _get_auth(name: str, default): - raw = os.environ.get(name, "").strip() - if raw: - return raw - return default - - def get_manager_config_from_env() -> dict: """ Return manager configuration populated from environment variables. @@ -145,60 +175,18 @@ def get_manager_config_from_env() -> dict: } - -def _get_bool(name: str, default: bool) -> bool: - raw = os.environ.get(name, "").strip().lower() - if raw in ("1", "true", "yes"): - return True - if raw in ("0", "false", "no"): - return False - return default - - -def _get_int(name: str, default: int) -> int: - raw = os.environ.get(name, "").strip() - if raw: - return int(raw) - return default - - -def _get_auth(name: str, default): - raw = os.environ.get(name, "").strip() - if raw: - return raw - return default - - -def get_manager_config_from_env() -> dict: +def get_container_grader_defaults() -> dict: """ - Return manager configuration populated from environment variables. + Return deployment-wide ContainerGrader defaults from environment variables. - Values not present in the environment fall back to - :data:`~xqueue_watcher.settings.MANAGER_CONFIG_DEFAULTS`. + These values are used when a ``ContainerGrader`` is constructed without + an explicit value for the corresponding parameter. Any value supplied + directly in the conf.d ``KWARGS`` block takes precedence. """ return { - "HTTP_BASIC_AUTH": _get_auth( - f"{_PREFIX}HTTP_BASIC_AUTH", - MANAGER_CONFIG_DEFAULTS["HTTP_BASIC_AUTH"], - ), - "POLL_TIME": _get_int( - f"{_PREFIX}POLL_TIME", - MANAGER_CONFIG_DEFAULTS["POLL_TIME"], - ), - "REQUESTS_TIMEOUT": _get_int( - f"{_PREFIX}REQUESTS_TIMEOUT", - MANAGER_CONFIG_DEFAULTS["REQUESTS_TIMEOUT"], - ), - "POLL_INTERVAL": _get_int( - f"{_PREFIX}POLL_INTERVAL", - MANAGER_CONFIG_DEFAULTS["POLL_INTERVAL"], - ), - "LOGIN_POLL_INTERVAL": _get_int( - f"{_PREFIX}LOGIN_POLL_INTERVAL", - MANAGER_CONFIG_DEFAULTS["LOGIN_POLL_INTERVAL"], - ), - "FOLLOW_CLIENT_REDIRECTS": _get_bool( - f"{_PREFIX}FOLLOW_CLIENT_REDIRECTS", - MANAGER_CONFIG_DEFAULTS["FOLLOW_CLIENT_REDIRECTS"], - ), + "backend": _get_str(f"{_PREFIX}GRADER_BACKEND", "kubernetes"), + "namespace": _get_str(f"{_PREFIX}GRADER_NAMESPACE", "default"), + "cpu_limit": _get_str(f"{_PREFIX}GRADER_CPU_LIMIT", "500m"), + "memory_limit": _get_str(f"{_PREFIX}GRADER_MEMORY_LIMIT", "256Mi"), + "timeout": _get_int(f"{_PREFIX}GRADER_TIMEOUT", 20), } From 91d6ab14296b0ffd15251a117663b97315368e77 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 14:30:56 -0400 Subject: [PATCH 21/25] feat(containergrader): add ImageDigestPoller and image pull policy support MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add ImageDigestPoller class: background daemon thread that periodically resolves a tag-based image reference to its current digest via docker.APIClient.inspect_distribution(). Thread-safe; falls back to the original reference if resolution fails. - Add image_pull_policy param to ContainerGrader (auto-detect: IfNotPresent for digest refs, Always for tag-based refs; can be overridden explicitly). - Add poll_image_digest and digest_poll_interval params to activate the poller. When enabled, Kubernetes Jobs use the most recently resolved repo@sha256:… reference via _effective_image(), ensuring nodes always run the latest pushed image without relying on imagePullPolicy: Always for every pod. - Add .github/workflows/publish-grader-base-image.yml to build and push grader_support/Dockerfile.base to ghcr.io/mitodl/xqueue-watcher-grader-base on push to master (grader_support/** paths), weekly schedule, and workflow_dispatch. Multi-platform linux/amd64,linux/arm64. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../workflows/publish-grader-base-image.yml | 69 ++++++++ xqueue_watcher/containergrader.py | 147 ++++++++++++++++-- 2 files changed, 199 insertions(+), 17 deletions(-) create mode 100644 .github/workflows/publish-grader-base-image.yml diff --git a/.github/workflows/publish-grader-base-image.yml b/.github/workflows/publish-grader-base-image.yml new file mode 100644 index 0000000..97c5c4e --- /dev/null +++ b/.github/workflows/publish-grader-base-image.yml @@ -0,0 +1,69 @@ +name: Publish grader base image + +# Builds grader_support/Dockerfile.base and pushes +# ghcr.io/mitodl/xqueue-watcher-grader-base to GHCR. +# +# Downstream grader images (e.g. graders-mit-600x) extend this base; keeping +# it in GHCR lets the Concourse grader-image pipeline pull it as a trigger +# without requiring DockerHub credentials. + +on: + push: + branches: + - master + paths: + - "grader_support/**" + schedule: + # Weekly rebuild to pick up base Python/OS security patches (Sunday 00:00 UTC) + - cron: "0 0 * * 0" + workflow_dispatch: + +env: + REGISTRY: ghcr.io + IMAGE_NAME: mitodl/xqueue-watcher-grader-base + +jobs: + build-and-push: + name: Build and push grader base image + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Log in to GHCR + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up QEMU (for multi-platform builds) + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Extract image metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=raw,value=latest,enable={{is_default_branch}} + type=sha,format=short + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: . + file: grader_support/Dockerfile.base + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 9ac1d37..ed8210b 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -10,7 +10,9 @@ """ import json +import logging import random +import threading import time import uuid from pathlib import Path @@ -23,6 +25,68 @@ _BACKEND_DOCKER = "docker" _SUPPORTED_BACKENDS = (_BACKEND_KUBERNETES, _BACKEND_DOCKER) +log = logging.getLogger(__name__) + + +class ImageDigestPoller: + """ + Background thread that periodically resolves an image tag to its digest. + + Resolves ``repo:tag`` → ``repo@sha256:…`` by querying the Docker registry + via the Docker SDK's ``inspect_distribution`` API (no image pull required). + The resolved reference is cached and refreshed every ``poll_interval`` seconds. + + Thread-safe: ``resolved_image`` may be read from any thread at any time. + + If the initial resolution fails, ``resolved_image`` returns the original + unresolved reference so that grading can proceed with ``imagePullPolicy: + Always`` as a safe fallback. + """ + + def __init__(self, image: str, poll_interval: int = 300) -> None: + self._image = image + self._poll_interval = poll_interval + self._resolved: str | None = None + self._lock = threading.Lock() + self._thread = threading.Thread( + target=self._poll_loop, name=f"digest-poller-{image}", daemon=True + ) + self._thread.start() + + @property + def resolved_image(self) -> str: + with self._lock: + return self._resolved if self._resolved is not None else self._image + + def _poll_loop(self) -> None: + while True: + self._refresh() + time.sleep(self._poll_interval) + + def _refresh(self) -> None: + try: + import docker as docker_sdk + + client = docker_sdk.APIClient() + info = client.inspect_distribution(self._image) + digest = info["Descriptor"]["digest"] + repo = self._image.split(":")[0].split("@")[0] + resolved = f"{repo}@{digest}" + with self._lock: + if self._resolved != resolved: + log.info( + "Resolved grader image %s → %s", self._image, resolved + ) + self._resolved = resolved + except Exception: + log.warning( + "Failed to resolve digest for grader image %s; " + "will retry in %ds", + self._image, + self._poll_interval, + exc_info=True, + ) + class ContainerGrader(Grader): """ @@ -35,21 +99,35 @@ class ContainerGrader(Grader): Configuration (passed as KWARGS in the conf.d JSON handler config): - grader_root - Path to the grader directory inside the container image. - For the Docker backend this is bind-mounted from the host; - for Kubernetes the scripts are baked into the image. - image - Docker image to run. Should extend grader-base and include - all course-specific grader scripts and dependencies. - backend - "kubernetes" or "docker". Defaults to - XQWATCHER_GRADER_BACKEND env var, or "kubernetes". - namespace - Kubernetes namespace to create Jobs in. Defaults to - XQWATCHER_GRADER_NAMESPACE env var, or "default". - cpu_limit - CPU limit for the grading container. Defaults to - XQWATCHER_GRADER_CPU_LIMIT env var, or "500m". - memory_limit - Memory limit for the grading container. Defaults to - XQWATCHER_GRADER_MEMORY_LIMIT env var, or "256Mi". - timeout - Maximum wall-clock seconds a grading job may run. Defaults - to XQWATCHER_GRADER_TIMEOUT env var, or 20. + grader_root - Path to the grader directory inside the container image. + For the Docker backend this is bind-mounted from the host; + for Kubernetes the scripts are baked into the image. + image - Docker image to run. Should extend grader-base and include + all course-specific grader scripts and dependencies. + backend - "kubernetes" or "docker". Defaults to + XQWATCHER_GRADER_BACKEND env var, or "kubernetes". + namespace - Kubernetes namespace to create Jobs in. Defaults to + XQWATCHER_GRADER_NAMESPACE env var, or "default". + cpu_limit - CPU limit for the grading container. Defaults to + XQWATCHER_GRADER_CPU_LIMIT env var, or "500m". + memory_limit - Memory limit for the grading container. Defaults to + XQWATCHER_GRADER_MEMORY_LIMIT env var, or "256Mi". + timeout - Maximum wall-clock seconds a grading job may run. Defaults + to XQWATCHER_GRADER_TIMEOUT env var, or 20. + image_pull_policy - Kubernetes imagePullPolicy for grading Jobs: "Always", + "IfNotPresent", or "Never". When None (default) the policy + is inferred from the image reference: "IfNotPresent" for + digest-pinned refs (``repo@sha256:…``), "Always" for + tag-based refs (no digest present). + poll_image_digest - When True and ``image`` is a tag-based reference, start + a background ``ImageDigestPoller`` that periodically + resolves the tag to its current digest. Grading Jobs will + use the most recently resolved ``repo@digest`` reference, + which ensures Kubernetes nodes always pull the latest + pushed image without relying on ``imagePullPolicy: Always`` + for every pod. Default: False. + digest_poll_interval - Seconds between digest resolution polls when + ``poll_image_digest`` is True. Default: 300. """ def __init__( @@ -61,6 +139,9 @@ def __init__( cpu_limit=None, memory_limit=None, timeout=None, + image_pull_policy=None, + poll_image_digest=False, + digest_poll_interval=300, **kwargs, ): env_defaults = get_container_grader_defaults() @@ -77,6 +158,37 @@ def __init__( self.memory_limit = memory_limit if memory_limit is not None else env_defaults["memory_limit"] self.timeout = timeout if timeout is not None else env_defaults["timeout"] + # image_pull_policy: explicit override or auto-detect from image ref. + if image_pull_policy is not None: + self.image_pull_policy = image_pull_policy + elif "@sha256:" in image: + self.image_pull_policy = "IfNotPresent" + else: + self.image_pull_policy = "Always" + + # Optional background digest polling for tag-based image references. + self._digest_poller: ImageDigestPoller | None = None + if poll_image_digest and "@sha256:" not in image: + self._digest_poller = ImageDigestPoller( + image=image, poll_interval=digest_poll_interval + ) + log.info( + "Started digest poller for grader image %s (interval=%ds)", + image, + digest_poll_interval, + ) + + def _effective_image(self) -> str: + """Return the image reference to use for container execution. + + If a digest poller is active and has resolved a digest, returns the + pinned ``repo@sha256:…`` form. Falls back to the configured tag-based + reference otherwise. + """ + if self._digest_poller is not None: + return self._digest_poller.resolved_image + return self.image + # ------------------------------------------------------------------ # Internal: container execution # ------------------------------------------------------------------ @@ -197,7 +309,8 @@ def _build_k8s_job(self, job_name, grader_path, code, seed, grader_config=None): containers=[ k8s_client.V1Container( name="grader", - image=self.image, + image=self._effective_image(), + image_pull_policy=self.image_pull_policy, # entrypoint signature: GRADER_FILE SEED args=[grader_abs, str(seed)], working_dir="/grader", @@ -310,7 +423,7 @@ def _run_docker(self, grader_path, code, seed, grader_config=None): # containers.run() does not accept a timeout argument; using detach=True # lets us call container.wait(timeout=...) to cap execution time. container = client.containers.run( - image=self.image, + image=self._effective_image(), # entrypoint signature: GRADER_FILE SEED command=[container_grader_path, str(seed)], working_dir="/grader", From 37daee9d719d23c7ad59e0c0f3f38ef86b7a036e Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 16:49:46 -0400 Subject: [PATCH 22/25] fix: normalise imagePullPolicy to title-case before K8s API call Kubernetes requires imagePullPolicy to be exactly 'Always', 'IfNotPresent', or 'Never' (case-sensitive). When the value is supplied via KWARGS in the conf.d JSON (e.g. 'always' or 'ALWAYS'), the K8s API returns 422 Unprocessable Entity. Add a normalisation dict lookup that maps the lowercased input back to the canonical title-case form. Unknown values are passed through unchanged so Kubernetes can surface the validation error with a clear message. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- xqueue_watcher/containergrader.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index ed8210b..79b256e 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -159,8 +159,14 @@ def __init__( self.timeout = timeout if timeout is not None else env_defaults["timeout"] # image_pull_policy: explicit override or auto-detect from image ref. + # Normalise to title-case ("Always", "IfNotPresent", "Never") regardless + # of how the value was supplied in KWARGS — the Kubernetes API is + # case-sensitive and rejects variants like "always" or "ALWAYS". + _policy_map = {p.lower(): p for p in ("Always", "IfNotPresent", "Never")} if image_pull_policy is not None: - self.image_pull_policy = image_pull_policy + self.image_pull_policy = _policy_map.get( + image_pull_policy.strip().lower(), image_pull_policy.strip() + ) elif "@sha256:" in image: self.image_pull_policy = "IfNotPresent" else: From 1a9243084c5e03debb0f621e37680ecd0a3bedbd Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 17:02:22 -0400 Subject: [PATCH 23/25] feat: add strip_path_components to ContainerGrader for legacy path prefixes LMS grader_payload 'grader' fields configured against the old git-clone deployment include a queue-name prefix, e.g.: mit-600x-Watcher-MITX-6.0001r/graders/python3graders/chips1/.../grade.py In the containerized approach, graders are baked directly into the image at grader_root, so the path resolves to: /graders/mit-600x-Watcher-MITX-6.0001r/graders/python3graders/... which doesn't exist. The actual path in the image is: /graders/python3graders/... Add strip_path_components (int, default 0) KWARG to ContainerGrader. When > 0, that many leading path components are stripped from the grader path (relative to grader_root) before it is passed as the container entrypoint argument. Set to 2 to remove both the queue-name component and the redundant repo subdirectory name. Example KWARGS: "strip_path_components": 2 Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- xqueue_watcher/containergrader.py | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/xqueue_watcher/containergrader.py b/xqueue_watcher/containergrader.py index 79b256e..9f938de 100644 --- a/xqueue_watcher/containergrader.py +++ b/xqueue_watcher/containergrader.py @@ -114,6 +114,15 @@ class ContainerGrader(Grader): XQWATCHER_GRADER_MEMORY_LIMIT env var, or "256Mi". timeout - Maximum wall-clock seconds a grading job may run. Defaults to XQWATCHER_GRADER_TIMEOUT env var, or 20. + strip_path_components - Number of leading path components to strip from + the grader path (relative to grader_root) before + passing it to the container entrypoint. Use this + when the LMS grader_payload ``grader`` field still + contains a legacy prefix from the old git-clone + setup, e.g. ``{queue_name}/graders/{actual_path}``. + Set to 2 to strip both the queue name and the + redundant repo subdirectory. Default: 0 (no + stripping). image_pull_policy - Kubernetes imagePullPolicy for grading Jobs: "Always", "IfNotPresent", or "Never". When None (default) the policy is inferred from the image reference: "IfNotPresent" for @@ -142,6 +151,7 @@ def __init__( image_pull_policy=None, poll_image_digest=False, digest_poll_interval=300, + strip_path_components=0, **kwargs, ): env_defaults = get_container_grader_defaults() @@ -157,6 +167,7 @@ def __init__( self.cpu_limit = cpu_limit if cpu_limit is not None else env_defaults["cpu_limit"] self.memory_limit = memory_limit if memory_limit is not None else env_defaults["memory_limit"] self.timeout = timeout if timeout is not None else env_defaults["timeout"] + self.strip_path_components = int(strip_path_components) # image_pull_policy: explicit override or auto-detect from image ref. # Normalise to title-case ("Always", "IfNotPresent", "Never") regardless @@ -278,7 +289,18 @@ def _build_k8s_job(self, job_name, grader_path, code, seed, grader_config=None): # The grader scripts are baked into the course-specific image at grader_path. # working_dir must stay at /grader (the WORKDIR of the base image) so that # `python -m grader_support.entrypoint` can locate the grader_support package. - grader_abs = str(grader_path) + # + # Legacy LMS configs may include a queue-name prefix in the grader path + # (e.g. "{queue_name}/graders/{actual_path}") left over from the old + # git-clone deployment. strip_path_components removes N leading components + # from the path relative to grader_root before constructing the container arg. + if self.strip_path_components > 0: + from pathlib import Path as _Path + rel = _Path(grader_path).relative_to(self.grader_root) + stripped = _Path(*rel.parts[self.strip_path_components:]) + grader_abs = str(self.grader_root / stripped) + else: + grader_abs = str(grader_path) return k8s_client.V1Job( api_version="batch/v1", From c8d2bf60fe032384c307c612a1f194aed1fdbfc5 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 19:23:14 -0400 Subject: [PATCH 24/25] fix: install gettext into builtins before loading grader module Grader scripts may call _() at module level (e.g. in input_validators defined at import time). The previous code installed trans.install() after exec_module, causing NameError: name '_' is not defined. Move the entire locale/gettext setup block to before exec_module so _ is available in builtins when the grader script is first executed. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- grader_support/entrypoint.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py index 926b074..2d2bc04 100644 --- a/grader_support/entrypoint.py +++ b/grader_support/entrypoint.py @@ -30,6 +30,18 @@ def main(): results = {"errors": [], "tests": [], "correct": False, "score": 0} + # Install gettext into builtins BEFORE loading the grader module. + # Grader scripts may call _() at module level (e.g. in input_validators), + # so _ must be available before exec_module runs. + import gettext + lang = os.environ.get("GRADER_LANGUAGE", "en") + grader_dir = os.path.dirname(os.path.abspath(grader_path)) + locale_dir = os.path.join(grader_dir, "conf", "locale") + trans = gettext.translation( + "graders", localedir=locale_dir, fallback=True, languages=[lang] + ) + trans.install(names=None) + # Load the grader module to access test definitions, preprocessors, and # input validators. The grader script is baked into this image. spec = importlib.util.spec_from_file_location("grader_module", grader_path) @@ -44,16 +56,6 @@ def main(): print(json.dumps(results)) return - # Locale support: course graders may translate error messages. - import gettext - lang = os.environ.get("GRADER_LANGUAGE", "en") - grader_dir = os.path.dirname(os.path.abspath(grader_path)) - locale_dir = os.path.join(grader_dir, "conf", "locale") - trans = gettext.translation( - "graders", localedir=locale_dir, fallback=True, languages=[lang] - ) - trans.install(names=None) - # Preprocess both the staff answer and the student submission. answer_path = os.path.join(grader_dir, "answer.py") with open(answer_path, "rb") as f: From 9a402da130724ac85ae82105fb8ba47fe635a757 Mon Sep 17 00:00:00 2001 From: Tobias Macey Date: Thu, 19 Mar 2026 19:54:19 -0400 Subject: [PATCH 25/25] fix: normalize mixed tab/space indentation before exec Python 3 raises TabError when exec'ing code with mixed tabs and spaces in the same indented block. Many course grader answer.py files were authored for Python 2 which tolerated this. Call expandtabs(4) on both the staff answer and student submission before preprocessing and writing to /tmp, so exec never sees raw tabs. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- grader_support/entrypoint.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/grader_support/entrypoint.py b/grader_support/entrypoint.py index 2d2bc04..e05458a 100644 --- a/grader_support/entrypoint.py +++ b/grader_support/entrypoint.py @@ -61,6 +61,12 @@ def main(): with open(answer_path, "rb") as f: answer = f.read().decode("utf-8") + # Normalize tabs to spaces before preprocessing. Many course grader files + # were authored for Python 2 which tolerated mixed tab/space indentation; + # Python 3's exec raises TabError on such code. + answer = answer.expandtabs(4) + submission_code = submission_code.expandtabs(4) + processed_answer = "# coding: utf8\n" + grader.preprocess(answer) processed_submission = "# coding: utf8\n" + grader.preprocess(submission_code)