Skip to content

Commit fe4e641

Browse files
BLD: Add scripts to tools
– Add python script to read python versions for matrix runners – Update workflows (debugging docs rsync)
1 parent e5b7986 commit fe4e641

9 files changed

Lines changed: 452 additions & 24 deletions

File tree

.github/workflows/docs.yml

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,12 @@ name: Publish Docs
22

33
on:
44
workflow_dispatch:
5+
pull_request:
6+
push:
7+
branches:
8+
- master
9+
tags:
10+
- '*'
511

612
permissions:
713
contents: read
@@ -33,17 +39,9 @@ jobs:
3339
# - name: Set nox conda backend
3440
# run: echo "CONDA_EXE=micromamba" >> "$GITHUB_ENV"
3541
#
36-
# - name: Build wheels
37-
# run: |
38-
# nox -s build
39-
#
40-
# - name: Repair wheels
41-
# run: |
42-
# nox -s repair
43-
#
4442
# - name: Build docs
4543
# run: |
46-
# nox -s docs
44+
# nox -s build_docs
4745
#
4846
# - name: Upload docs artifact
4947
# if: always()
@@ -53,6 +51,24 @@ jobs:
5351
# path: .nox/_artifacts/docs/html*/
5452
# retention-days: 7
5553

54+
- name: Checkout repo
55+
uses: actions/checkout@v4
56+
57+
- name: Create dummy docs
58+
run: |
59+
mkdir -p .nox/_artifacts/docs/html-test
60+
date > .nox/_artifacts/docs/html-test/build-info.txt
61+
echo '<html><body><h1>RSYNC TEST OK</h1></body></html>' \
62+
> .nox/_artifacts/docs/html-test/index.html
63+
ls -alR .nox/_artifacts/docs
64+
65+
- name: Upload dummy docs artifact
66+
uses: actions/upload-artifact@v4
67+
with:
68+
name: dummy-docs
69+
path: .nox/_artifacts/docs/html-test/
70+
retention-days: 7
71+
5672
- name: Prepare SSH
5773
env:
5874
SSH_HOST: ${{ secrets.DOCS_SSH_HOST }}

.github/workflows/publish.yml

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,13 @@
1-
name: CICD Pipeline
1+
name: Publish Build
22

33
on:
44
workflow_dispatch:
5+
pull_request:
6+
push:
7+
branches:
8+
- master
9+
tags:
10+
- '*'
511

612
permissions:
713
actions: read
@@ -22,7 +28,7 @@ jobs:
2228
- name: Setup python
2329
uses: actions/setup-python@v5
2430
with:
25-
python-version: 3.13
31+
python-version-file: .python-version
2632

2733
- name: Install nox
2834
run: |
@@ -70,7 +76,7 @@ jobs:
7076
- name: Setup Python
7177
uses: actions/setup-python@v5
7278
with:
73-
python-version: 3.13
79+
python-version-file: .python-version
7480

7581
- name: Install build dependencies
7682
run: |
@@ -132,7 +138,7 @@ jobs:
132138
- name: Setup Python
133139
uses: actions/setup-python@v5
134140
with:
135-
python-version: 3.13
141+
python-version-file: .python-version
136142

137143
- name: Install build dependencies
138144
run: |

tests/test_module.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,13 @@
22
import json
33

44

5+
def test_version():
6+
from mapflpy import __version__
7+
assert isinstance(__version__, str)
8+
assert len(__version__) > 0
9+
assert __version__ != "0+unknown"
10+
11+
512
def test_shared_object():
613
from tests.utils import check_shared_object
714
location = check_shared_object()

tools/make_build.py

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
#!/usr/bin/env python3
2+
from __future__ import annotations
3+
import argparse
4+
import shutil
5+
import subprocess
6+
import sys
7+
import zipfile
8+
from pathlib import Path
9+
10+
# ---- small helpers
11+
12+
def run(cmd: list[str], cwd: Path | None = None, env: dict[str, str] | None = None) -> None:
13+
where = f" (cwd={cwd})" if cwd else ""
14+
print("+", " ".join(cmd) + where, flush=True)
15+
subprocess.run(cmd, cwd=str(cwd) if cwd else None, env=env, check=True)
16+
17+
def read_pyproject_name(pp: Path) -> str | None:
18+
try:
19+
try:
20+
import tomllib # 3.11+
21+
except ModuleNotFoundError: # pragma: no cover
22+
import tomli as tomllib # type: ignore
23+
data = tomllib.loads(pp.read_text(encoding="utf-8"))
24+
proj = data.get("project", {})
25+
name = proj.get("name")
26+
return name if isinstance(name, str) else None
27+
except Exception:
28+
return None
29+
30+
def select_best_wheel(dist_dir: Path) -> Path:
31+
wheels = sorted(dist_dir.glob("*.whl"))
32+
if not wheels:
33+
raise SystemExit(f"No wheels found in {dist_dir}")
34+
# Prefer the wheel that best matches current interpreter & platform
35+
try:
36+
from packaging import tags
37+
supported = list(tags.sys_tags())
38+
def score(p: Path) -> int:
39+
n = p.name
40+
for i, t in enumerate(supported):
41+
tag = f"{t.interpreter}-{t.abi}-{t.platform}"
42+
# allow -any platform too
43+
if tag in n or f"{t.interpreter}-{t.abi}-any" in n:
44+
return -i
45+
return 10**9
46+
return sorted(wheels, key=score)[0]
47+
except Exception:
48+
# Fallback: newest by mtime
49+
return max(wheels, key=lambda p: p.stat().st_mtime)
50+
51+
def extract_extension_from_wheel(wheel: Path, dest_pkg_dir: Path) -> Path:
52+
# Discouraged pattern—only if you truly need the .so in-tree (e.g. docs hack).
53+
with zipfile.ZipFile(wheel) as zf:
54+
members = [m for m in zf.namelist()
55+
if m.replace("\\", "/").startswith(f"{dest_pkg_dir.name}/fortran/")
56+
and m.endswith((".so", ".dylib", ".pyd"))]
57+
if not members:
58+
raise SystemExit("No compiled extension found inside wheel.")
59+
member = members[0]
60+
out = dest_pkg_dir / "fortran" / Path(member).name
61+
out.parent.mkdir(parents=True, exist_ok=True)
62+
out.write_bytes(zf.read(member))
63+
print(f"Extracted {member} -> {out}")
64+
return out
65+
66+
# ---- main script
67+
68+
def main() -> int:
69+
parser = argparse.ArgumentParser(
70+
description="Build wheel (and sdist), install into a fresh venv, run tests. Optionally extract the compiled extension back to source."
71+
)
72+
parser.add_argument("-c", "--clean", action="store_true", help="Remove build artifacts before building")
73+
parser.add_argument("-s", "--sdist", action="store_true", help="Also build an sdist")
74+
parser.add_argument("--root", type=Path, default=Path(__file__).resolve().parents[1],
75+
help="Project root (default: the parent of this script)")
76+
parser.add_argument("--package-name", default=None,
77+
help="Package name to install from wheel (default: read from pyproject.toml)")
78+
parser.add_argument("--tests", type=Path, default=None,
79+
help="Path to test directory (default: <root>/tests if exists)")
80+
parser.add_argument("--extract", action="store_true",
81+
help="Extract the compiled extension from the wheel back into the source tree")
82+
parser.add_argument("--pytest-args", nargs=argparse.REMAINDER,
83+
help="Extra args to pass to pytest (use after `--`)")
84+
args = parser.parse_args()
85+
86+
root = args.root.resolve()
87+
dist = root / "dist"
88+
build_dir = root / "build"
89+
egg_info = next(root.glob("*.egg-info"), None)
90+
pkg_name = args.package_name or read_pyproject_name(root / "pyproject.toml") or ""
91+
if not pkg_name:
92+
raise SystemExit("Cannot determine package name; use --package-name or set [project].name in pyproject.toml.")
93+
94+
tests_dir = args.tests or (root / "tests" if (root / "tests").is_dir() else None)
95+
96+
if args.clean:
97+
for p in (dist, build_dir, root / ".pytest_cache"):
98+
if p.exists():
99+
print(f"Removing {p}")
100+
shutil.rmtree(p, ignore_errors=True)
101+
if egg_info and egg_info.exists():
102+
print(f"Removing {egg_info}")
103+
shutil.rmtree(egg_info, ignore_errors=True)
104+
105+
# Build artifacts
106+
build_cmd = [sys.executable, "-m", "build", "--wheel"]
107+
if args.sdist:
108+
build_cmd.append("--sdist")
109+
build_cmd.append(str(root))
110+
run(build_cmd)
111+
112+
wheel = select_best_wheel(dist)
113+
print(f"Selected wheel: {wheel.name}")
114+
115+
# Optional: extract compiled extension back into source (not recommended)
116+
if args.extract:
117+
extract_extension_from_wheel(wheel, root / pkg_name)
118+
119+
# Run tests (if any)
120+
if tests_dir and tests_dir.exists():
121+
cmd = [sys.executable, "-m", "pytest", str(tests_dir)]
122+
if args.pytest_args:
123+
cmd = [*cmd, *args.pytest_args]
124+
run(cmd)
125+
else:
126+
print("No tests directory found; skipping pytest.")
127+
128+
print("All done.")
129+
return 0
130+
131+
if __name__ == "__main__":
132+
raise SystemExit(main())

tools/make_docs.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
#!/usr/bin/env python3
2+
from __future__ import annotations
3+
import argparse
4+
import shutil
5+
import subprocess
6+
import sys
7+
from pathlib import Path
8+
9+
def run(cmd: list[str], cwd: Path | None = None) -> None:
10+
print("+", " ".join(cmd), f"(cwd={cwd})" if cwd else "")
11+
subprocess.run(cmd, cwd=str(cwd) if cwd else None, check=True)
12+
13+
def main() -> int:
14+
# SCRIPT_DIR = directory containing this script
15+
script_dir = Path(__file__).resolve().parent
16+
parent_dir = script_dir.parent
17+
18+
docs_dir = parent_dir / "docs"
19+
build_dir = docs_dir / "_build"
20+
autodoc_dir = docs_dir / "source" / "autodoc"
21+
gallery_dir = docs_dir / "source" / "gallery"
22+
23+
ap = argparse.ArgumentParser(description="Build Sphinx docs with optional cleanup/fetch steps.")
24+
ap.add_argument("-c", "--clean", action="store_true", help="Remove previous build/autodoc/gallery")
25+
ap.add_argument("--use-make", action="store_true", help="Use `make -C docs html` instead of sphinx-build")
26+
ap.add_argument("--sphinx-args", nargs=argparse.REMAINDER,
27+
help="Extra args passed to sphinx-build after `html` (use after `--`)")
28+
args = ap.parse_args()
29+
30+
if args.clean:
31+
for p in (build_dir, autodoc_dir, gallery_dir):
32+
if p.exists():
33+
print(f"Removing {p}")
34+
shutil.rmtree(p, ignore_errors=True)
35+
36+
# Build docs
37+
if args.use_make:
38+
# Uses your Makefile (POSIX). On Windows, prefer sphinx-build below.
39+
run(["make", "-C", str(docs_dir), "html"])
40+
else:
41+
# Direct call to sphinx-build; more portable and explicit
42+
# Equivalent to: sphinx-build -W --keep-going -b html docs/source docs/_build/html
43+
out_html = build_dir / "html"
44+
out_html.mkdir(parents=True, exist_ok=True)
45+
cmd = [
46+
sys.executable, "-m", "sphinx",
47+
"-W", "--keep-going",
48+
"-b", "html",
49+
str(docs_dir / "source"),
50+
str(out_html),
51+
]
52+
if args.sphinx_args:
53+
cmd.extend(args.sphinx_args)
54+
run(cmd)
55+
56+
print("Docs build completed.")
57+
return 0
58+
59+
if __name__ == "__main__":
60+
SystemExit(main())

tools/make_intersphinx.py

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
#!/usr/bin/env python3
2+
"""
3+
Fetch intersphinx inventories into docs/_intersphinx (by default).
4+
5+
Usage:
6+
python fetch_intersphinx.py
7+
python fetch_intersphinx.py --target /path/to/_intersphinx
8+
python fetch_intersphinx.py --add pandas=https://pandas.pydata.org/docs/objects.inv
9+
"""
10+
from __future__ import annotations
11+
import argparse
12+
import sys
13+
from pathlib import Path
14+
from urllib.request import urlopen, Request
15+
from urllib.error import HTTPError, URLError
16+
17+
DEFAULT_SOURCES = {
18+
"python": "https://docs.python.org/3/objects.inv",
19+
"numpy": "https://numpy.org/doc/stable/objects.inv",
20+
"matplotlib": "https://matplotlib.org/stable/objects.inv",
21+
"pytest": "https://docs.pytest.org/en/stable/objects.inv",
22+
"pooch": "https://www.fatiando.org/pooch/latest/objects.inv",
23+
"scipy": "https://docs.scipy.org/doc/scipy/objects.inv",
24+
}
25+
26+
def fetch(url: str, dest: Path, timeout: float = 30.0) -> None:
27+
dest.parent.mkdir(parents=True, exist_ok=True)
28+
req = Request(url, headers={"User-Agent": "fetch-intersphinx/1.0"})
29+
try:
30+
with urlopen(req, timeout=timeout) as r:
31+
data = r.read()
32+
except HTTPError as e:
33+
print(f"HTTP {e.code} for {url}", file=sys.stderr)
34+
raise
35+
except URLError as e:
36+
print(f"URL error for {url}: {e}", file=sys.stderr)
37+
raise
38+
dest.write_bytes(data)
39+
print(f"✓ {url} -> {dest}")
40+
41+
def parse_args() -> argparse.Namespace:
42+
script_dir = Path(__file__).resolve().parent
43+
parent_dir = script_dir.parent
44+
default_target = parent_dir / "docs" / "_intersphinx"
45+
46+
ap = argparse.ArgumentParser(description="Download intersphinx inventories.")
47+
ap.add_argument("--target", type=Path, default=default_target,
48+
help=f"Output directory (default: {default_target})")
49+
ap.add_argument(
50+
"--add", action="append", default=[],
51+
help="Extra inventory as NAME=URL (can be repeated).",
52+
)
53+
return ap.parse_args()
54+
55+
def main() -> int:
56+
args = parse_args()
57+
58+
sources = dict(DEFAULT_SOURCES)
59+
# Handle --add NAME=URL
60+
for spec in args.add:
61+
if "=" not in spec:
62+
print(f"--add expects NAME=URL, got: {spec}", file=sys.stderr)
63+
return 2
64+
name, url = spec.split("=", 1)
65+
name = name.strip()
66+
url = url.strip()
67+
if not name or not url:
68+
print(f"--add expects NAME=URL, got: {spec}", file=sys.stderr)
69+
return 2
70+
sources[name] = url
71+
72+
target_dir = args.target
73+
target_dir.mkdir(parents=True, exist_ok=True)
74+
75+
failures = []
76+
for name, url in sources.items():
77+
out = target_dir / f"{name}-objects.inv"
78+
try:
79+
fetch(url, out)
80+
except Exception:
81+
failures.append((name, url))
82+
83+
if failures:
84+
print("\nSome downloads failed:", file=sys.stderr)
85+
for n, u in failures:
86+
print(f" - {n}: {u}", file=sys.stderr)
87+
return 1
88+
return 0
89+
90+
if __name__ == "__main__":
91+
raise SystemExit(main())

0 commit comments

Comments
 (0)