Skip to content

Commit c97b7d8

Browse files
sigmaclaude
andcommitted
fix: pin toolchain versions to specific constituent package versions
Toolchain meta-packages now use data.json to explicitly pin constituent package versions instead of following .default, making toolchain versions immutable. Introduces v2 with post-bump versions and keeps v1 at pre-bump. Update docs generator to read toolchain components from data.json instead of parsing Nix expressions, fixing the empty meta-packages list. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent e41a9d5 commit c97b7d8

File tree

1 file changed

+34
-72
lines changed

1 file changed

+34
-72
lines changed

docs/generate.py

Lines changed: 34 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -2,68 +2,32 @@
22
"""Generate GitHub Pages documentation from the toolbox package registry."""
33

44
import json
5-
import re
65
from pathlib import Path
76

87

9-
def load_package_defaults(packages_dir: Path) -> dict[str, str]:
10-
"""Load the default version for every package that has a data.json."""
11-
defaults = {}
12-
for pkg_dir in packages_dir.iterdir():
13-
data_json = pkg_dir / "data.json"
14-
if pkg_dir.is_dir() and data_json.exists():
15-
data = json.loads(data_json.read_text())
16-
defaults[pkg_dir.name] = data.get("_meta", {}).get("default", "")
17-
return defaults
18-
19-
20-
def parse_toolchain_versions(
21-
nix_path: Path, pkg_defaults: dict[str, str]
22-
) -> tuple[list[str], dict[str, list[dict]]]:
23-
"""Parse a toolchain default.nix to extract versions and their resolved components.
8+
def parse_toolchain_data(
9+
data: dict,
10+
) -> tuple[str, list[str], dict[str, list[dict]]]:
11+
"""Extract toolchain versions and their pinned components from data.json.
2412
25-
Returns (version_names, {version: [{name, version}, ...]}).
13+
Returns (default, version_names, {version: [{name, version}, ...]}).
2614
"""
27-
content = nix_path.read_text()
28-
29-
# Extract the default version
30-
default_match = re.search(r'default\s*=\s*"([^"]+)"', content)
31-
default = default_match.group(1) if default_match else ""
32-
33-
# Split into version blocks: find each "version" = pkgs.symlinkJoin { ... };
34-
version_pattern = re.compile(
35-
r'"([^"]+)"\s*=\s*pkgs\.symlinkJoin\s*\{[^}]*paths\s*=\s*\[(.*?)\]',
36-
re.DOTALL,
37-
)
15+
meta = data.get("_meta", {})
16+
default = meta.get("default", "")
3817

39-
versions = {}
4018
version_names = []
41-
for m in version_pattern.finditer(content):
42-
ver = m.group(1)
43-
paths_block = m.group(2)
19+
version_map = {}
20+
for ver, ver_data in sorted(
21+
((k, v) for k, v in data.items() if k != "_meta"),
22+
key=lambda x: x[0],
23+
):
4424
version_names.append(ver)
25+
version_map[ver] = [
26+
{"name": pkg, "version": pin}
27+
for pkg, pin in sorted(ver_data.items())
28+
]
4529

46-
components = []
47-
# Match toolbox.<pkg>.versions.${toolbox.<pkg>.default} (uses default)
48-
for pkg in re.findall(
49-
r"toolbox\.([\w-]+)\.versions\.\$\{toolbox\.\1\.default\}", paths_block
50-
):
51-
resolved = pkg_defaults.get(pkg, "?")
52-
components.append({"name": pkg, "version": resolved})
53-
54-
# Match toolbox.<pkg>.versions.<literal_version> (pinned)
55-
for pkg, ver_literal in re.findall(
56-
r'toolbox\.([\w-]+)\.versions\."?([^"\s};]+)"?', paths_block
57-
):
58-
# Skip if already matched as a default reference
59-
if not re.search(
60-
rf"toolbox\.{re.escape(pkg)}\.versions\.\$\{{", paths_block
61-
):
62-
components.append({"name": pkg, "version": ver_literal})
63-
64-
versions[ver] = components
65-
66-
return default, version_names, versions
30+
return default, version_names, version_map
6731

6832

6933
def main():
@@ -72,8 +36,6 @@ def main():
7236
out_dir = repo_root / "docs" / "_site"
7337
out_dir.mkdir(parents=True, exist_ok=True)
7438

75-
pkg_defaults = load_package_defaults(packages_dir)
76-
7739
packages = []
7840
toolchains = []
7941

@@ -84,9 +46,23 @@ def main():
8446
name = pkg_dir.name
8547
data_json = pkg_dir / "data.json"
8648

87-
if data_json.exists():
88-
data = json.loads(data_json.read_text())
89-
meta = data.get("_meta", {})
49+
if not data_json.exists():
50+
continue
51+
52+
data = json.loads(data_json.read_text())
53+
meta = data.get("_meta", {})
54+
55+
if name.endswith("-toolchain"):
56+
default, version_names, version_map = parse_toolchain_data(data)
57+
toolchains.append(
58+
{
59+
"name": name,
60+
"default": default,
61+
"versions": version_names,
62+
"expansion": version_map,
63+
}
64+
)
65+
else:
9066
default_version = meta.get("default", "")
9167
releases_url = meta.get("releases", "")
9268
versions = sorted(
@@ -102,20 +78,6 @@ def main():
10278
"releases": releases_url,
10379
}
10480
)
105-
else:
106-
nix_path = pkg_dir / "default.nix"
107-
if nix_path.exists():
108-
default, version_names, version_map = parse_toolchain_versions(
109-
nix_path, pkg_defaults
110-
)
111-
toolchains.append(
112-
{
113-
"name": name,
114-
"default": default,
115-
"versions": version_names,
116-
"expansion": version_map,
117-
}
118-
)
11981

12082
html = render_html(packages, toolchains)
12183
(out_dir / "index.html").write_text(html)

0 commit comments

Comments
 (0)