Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified .DS_Store
Binary file not shown.
33 changes: 19 additions & 14 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -1,26 +1,31 @@
[run]
branch = True
source =
nextlevelapex
Tests
nextlevelapex
Tests
omit =
*/__init__.py
*/tests/*
*/migrations/*
.venv/*
setup.py
*/site-packages/*
*/__init__.py
*/migrations/*
*/site-packages/*
*/tests/*
.venv/*
nextlevelapex/core/logger.py
nextlevelapex/main.py
nextlevelapex/tasks/*
setup.py


[report]
# Improve human readability in CLI output
show_missing = True
skip_covered = True
exclude_lines =
pragma: no cover
def __repr__
if self\.debug
raise NotImplementedError
if __name__ == .__main__.:
def __repr__
if TYPE_CHECKING:
if __name__ == .__main__.:
if self\.debug
pragma: no cover
raise NotImplementedError


[html]
directory = htmlcov
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,6 @@ cython_debug/
# macOS Files
.DS_Store
etc-pihole/tls.pem

# editor/backup files
*.bak
55 changes: 21 additions & 34 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,22 +1,16 @@
--- # ← document start required by yamllint
# ---------------------------------------------------------------------------
# Global pre‑commit settings
# ---------------------------------------------------------------------------
--- # ← document start (keeps yamllint happy)
minimum_pre_commit_version: "3.6.0"

default_language_version:
python: python3.13 # project’s baseline interpreter
python: python3.11 # match your Poetry env (3.11.x)

ci:
autofix: true # rewrite files, then fail so diff is visible
autofix: true # rewrite files, then fail so diff is visible
fail_fast: true
default_stages: [pre-commit, pre-push]
default_stages: [pre-commit, pre-push] # was [commit, push]

# ---------------------------------------------------------------------------
# Repositories & hooks
# ---------------------------------------------------------------------------
repos:
# ---------------------------------------------------- Housekeeping hooks
# ---------------------------------------------------- House-keeping hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
Expand Down Expand Up @@ -47,46 +41,50 @@ repos:
hooks:
- id: shfmt
args: ["-i", "2", "-sr", "-ci"]
files: "^scripts/.*\\.sh$" # only format our scripts/
exclude: "^docker/" # avoid parsing docker/orchestrate.sh for now

- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
hooks:
- id: shellcheck
args: ["--severity", "warning"]
files: "^scripts/.*\\.sh$"
exclude: "^docker/"

# ----------------------------------- Python formatters & linters stack
- repo: https://github.com/psf/black
rev: 25.1.0
hooks:
- id: black
language_version: python3.13
language_version: python3.11 # <-- was python3.13

- repo: https://github.com/PyCQA/isort
rev: 6.0.1
hooks:
- id: isort
args: ["--profile", "black"]
language_version: python3.13
language_version: python3.11 # <-- was python3.13

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.8 # bundles Ruff 0.11.8 binary
rev: v0.11.9 # keep in sync with your lockfile/ruff version
hooks:
# 1 Formatter (runs first)
# - id: ruff-format
# stages: [pre-commit]
# exclude: "build/|dist/|\\.venv/|\\.eggs/|\\.mypy_cache/|\\.ruff_cache/"
# If you want the formatter, uncomment:
# - id: ruff-format
# stages: [commit]

# Linter + autofix on commit
# Linter + auto-fix on commit
- id: ruff
name: ruff-lint-fix
args: ["--fix", "--exit-non-zero-on-fix", "--show-fixes", "--unsafe-fixes"]
stages: [pre-commit]
args:
["--fix", "--exit-non-zero-on-fix", "--show-fixes", "--unsafe-fixes"]
stages: [pre-commit] # <-- was [commit]

# Strict linter on push/CI (no fixes)
# Strict linter on push/CI (no fixes)
- id: ruff
name: ruff-lint-ci
args: ["--show-source"]
stages: [pre-push]
stages: [pre-push] # <-- was [push]

- repo: local
hooks:
Expand All @@ -98,14 +96,3 @@ repos:
files: "\\.py$"
pass_filenames: false
always_run: true


# ---------------------------------------------------------------------------
# Optional – MyPy strict typing (uncomment when ready)
# ---------------------------------------------------------------------------
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.10.0
# hooks:
# - id: mypy
# additional_dependencies: ["types-requests"]
# args: ["--strict"]
111 changes: 111 additions & 0 deletions docker/orchestrate.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
#!/usr/bin/env bash

set -euo pipefail

### 🔧 ApexKit DNS Stack Orchestrator
# Modular, self-healing, idempotent stack manager for:
# - cloudflared
# - unbound
# - pihole
# Supports dry-run, full rebuilds, diagnostics

# Constants
STACK_NAME="dns_stack"
DIR="$(cd -- "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"

CLOUDFLARED_IMAGE="cloudflared:with-dig"
UNBOUND_IMAGE="apexkit-unbound:latest"

DRY_RUN=false
RESET_NET=false
REBUILD_ALL=false
SERVICES=(cloudflared unbound pihole)

# Helpers
print() { echo -e "[💡] $*"; }
run() { $DRY_RUN && echo "[DRY-RUN] $*" || eval "$*"; }

# Validate required tools
require_tools() {
for tool in docker dig; do
command -v "$tool" >/dev/null || {
echo "❌ Required tool missing: $tool"; exit 1;
done
done
}

# Docker network setup
ensure_network() {
if docker network inspect "$STACK_NAME" &>/dev/null; then
$RESET_NET && {
print "Resetting docker network: $STACK_NAME"
run "docker network rm $STACK_NAME"
} || return 0
fi
print "Creating docker network: $STACK_NAME"
run "docker network create \
--driver bridge \
--subnet=172.19.0.0/24 \
--gateway=172.19.0.1 \
$STACK_NAME"
}

# Build image if missing
ensure_image() {
local image=$1 dockerfile=$2
if ! docker image inspect "$image" &>/dev/null; then
print "Building image: $image"
run "docker build -t $image -f $dockerfile $DIR"
else
$REBUILD_ALL && {
print "Rebuilding image: $image"
run "docker build --no-cache -t $image -f $dockerfile $DIR"
}
fi
}

# Bring up the stack
bring_up_stack() {
print "Running docker-compose stack"
run "docker-compose -f $DIR/docker-compose.yml up -d"
}

# Show container IPs
show_ips() {
print "Active container IPs:"
docker inspect -f '{{.Name}} → {{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $(docker ps -q) | sed 's/^/ /'
}

# Sanity script run
run_tests() {
print "Running stack sanity checks..."
run "chmod +x $DIR/tests/stack-sanity.sh"
run "$DIR/tests/stack-sanity.sh"
}

# Main
main() {
require_tools

# Flags
while [[ ${1:-} =~ ^- ]]; do
case $1 in
--dry-run) DRY_RUN=true;;
--rebuild) REBUILD_ALL=true;;
--reset-net) RESET_NET=true;;
--help)
echo "Usage: $0 [--dry-run] [--rebuild] [--reset-net]"; exit 0;;
esac
shift
done

ensure_network
ensure_image "$CLOUDFLARED_IMAGE" "$DIR/docker/cloudflared/Dockerfile"
ensure_image "$UNBOUND_IMAGE" "$DIR/docker/unbound/Dockerfile"
bring_up_stack
show_ips
run_tests
print "✅ DNS stack setup complete."
}

main "$@"
Binary file modified nextlevelapex/.DS_Store
Binary file not shown.
12 changes: 4 additions & 8 deletions nextlevelapex/core/command.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
# ~/Projects/NextLevelApex/nextlevelapex/core/command.py

import logging
import shlex
import subprocess
from typing import Optional

from nextlevelapex.core.logger import LoggerProxy

Expand All @@ -20,7 +18,7 @@
self.stderr = stderr
self.success = success # True if returncode is 0 (or if check=False)

def __bool__(self):
def __bool__(self) -> bool:
"""Allows treating the result object as boolean for success."""
return self.success

Expand All @@ -31,8 +29,8 @@
check: bool = True, # If True, non-zero exit code is considered failure
capture: bool = True, # Capture stdout/stderr
text: bool = True, # Decode output as text
cwd: Optional[str] = None, # Working directory
env: Optional[dict] = None, # Environment variables
cwd: str | None = None, # Working directory
env: dict[str, str] | None = None, # Environment variables
) -> CommandResult:
"""
Runs an external command using subprocess.
Expand All @@ -55,7 +53,7 @@
if dry_run:
print(f"DRYRUN: Would execute: {cmd_str}")
# For dry run, assume success unless we add more complex checks later
return CommandResult(returncode=0, stdout="", stderr="", success=True)

Check failure

Code scanning / CodeQL

Clear-text logging of sensitive information High

This expression logs
sensitive data (password)
as clear text.
This expression logs
sensitive data (password)
as clear text.

try:
process = subprocess.run(
Expand Down Expand Up @@ -98,7 +96,5 @@
success=False,
)
except Exception as e:
log.error(
f"An unexpected error occurred running command: {cmd_str}", exc_info=True
)
log.error(f"An unexpected error occurred running command: {cmd_str}", exc_info=True)
return CommandResult(returncode=-1, stdout="", stderr=str(e), success=False)
20 changes: 13 additions & 7 deletions nextlevelapex/core/config.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
# ~/Projects/NextLevelApex/nextlevelapex/core/config.py

import json
import logging
from importlib import resources
from pathlib import Path
from typing import Any, Dict
from typing import Any

import jsonschema
from jsonschema import Draft7Validator
Expand Down Expand Up @@ -75,11 +74,18 @@ def _set_defaults(validator, properties, instance, schema):
if "default" in subschema:
instance.setdefault(prop, subschema["default"])

for error in _default_properties(validator, properties, instance, schema):
yield error

# after

def _deep_update(base: dict, updates: dict):

def _validate_properties(validator, properties, instance, schema):
"""
Generator function to validate properties using the default properties validator.
"""
yield from _default_properties(validator, properties, instance, schema)


def _deep_update(base: dict[str, Any], updates: dict[str, Any]) -> None:
"""
Recursively update base with updates (mutates base).
"""
Expand All @@ -90,15 +96,15 @@ def _deep_update(base: dict, updates: dict):
base[k] = v


def load_config(config_path: Path = DEFAULT_CONFIG_PATH) -> Dict[str, Any]:
def load_config(config_path: Path = DEFAULT_CONFIG_PATH) -> dict[str, Any]:
"""
Loads and validates configuration against our JSON Schema.
Fills in any missing properties with the schema’s own default values.
"""
log.info(f"Attempting to load configuration from: {config_path}")

# 1) Start with an empty dict
config: Dict[str, Any] = {}
config: dict[str, Any] = {}

# 2) Build two validators:
# - inject_validator: uses _set_defaults to populate defaults
Expand Down
Loading
Loading