diff --git a/.forgejo/workflows/ci.yml b/.forgejo/workflows/ci.yml index 54f3952..6a33561 100644 --- a/.forgejo/workflows/ci.yml +++ b/.forgejo/workflows/ci.yml @@ -33,7 +33,9 @@ jobs: run: cargo build - name: Run tests - run: cargo test + run: | + cargo test + node --test tests/*.test.js - name: Verify package contents run: ./scripts/verify-package.sh diff --git a/.forgejo/workflows/release.yml b/.forgejo/workflows/release.yml index e3c7558..5264ffd 100644 --- a/.forgejo/workflows/release.yml +++ b/.forgejo/workflows/release.yml @@ -29,7 +29,9 @@ jobs: run: cargo build --release - name: Run tests - run: cargo test + run: | + cargo test + node --test tests/*.test.js create-release: name: Create Release diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 42f057e..b94ed5b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,7 +47,9 @@ jobs: run: cargo build - name: Run tests - run: cargo test + run: | + cargo test + node --test tests/*.test.js - name: Verify package contents run: ./scripts/verify-package.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 656079f..b1ac0c7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -48,7 +48,9 @@ jobs: run: cargo build --release - name: Run tests - run: cargo test + run: | + cargo test + node --test tests/*.test.js create-release: name: Create GitHub Release diff --git a/Cargo.toml b/Cargo.toml index 8c3c2d3..2a6a4b9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,4 +24,4 @@ include_dir = "0.7" [dev-dependencies] tokio = { version = "1", features = ["macros", "rt-multi-thread"] } -tower = "0.5" +tower = { version = "0.5", features = ["util"] } diff --git a/Makefile b/Makefile index b40bd1f..169c6d5 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ VERSIONED_CSS := static/sf/sf.$(VERSION).css VERSIONED_JS := static/sf/sf.$(VERSION).js # ============== Phony Targets ============== -.PHONY: banner help assets build build-release test test-quick test-doc test-unit test-one \ +.PHONY: banner help assets build build-release test test-quick test-doc test-unit test-frontend test-one \ lint fmt fmt-check clippy ci-local pre-release version package-verify \ bump-patch bump-minor bump-major bump-dry demo-serve \ publish-dry publish clean watch @@ -84,6 +84,7 @@ test: banner @printf "$(CYAN)$(BOLD)╚══════════════════════════════════════╝$(RESET)\n\n" @printf "$(ARROW) $(BOLD)Running all tests...$(RESET)\n" @cargo test && \ + node --test tests/*.test.js && \ printf "\n$(GREEN)$(CHECK) All tests passed$(RESET)\n\n" || \ (printf "\n$(RED)$(CROSS) Tests failed$(RESET)\n\n" && exit 1) @@ -99,6 +100,10 @@ test-quick: banner @cargo test --lib --quiet && \ printf "$(GREEN)$(CHECK) Unit tests passed$(RESET)\n\n" || \ (printf "$(RED)$(CROSS) Unit tests failed$(RESET)\n\n" && exit 1) + @printf "$(PROGRESS) Running frontend tests...\n" + @node --test tests/*.test.js && \ + printf "$(GREEN)$(CHECK) Frontend tests passed$(RESET)\n\n" || \ + (printf "$(RED)$(CROSS) Frontend tests failed$(RESET)\n\n" && exit 1) test-doc: @printf "$(PROGRESS) Running doctests...\n" @@ -112,6 +117,12 @@ test-unit: printf "$(GREEN)$(CHECK) Unit tests passed$(RESET)\n" || \ (printf "$(RED)$(CROSS) Unit tests failed$(RESET)\n" && exit 1) +test-frontend: + @printf "$(PROGRESS) Running frontend tests...\n" + @node --test tests/*.test.js && \ + printf "$(GREEN)$(CHECK) Frontend tests passed$(RESET)\n" || \ + (printf "$(RED)$(CROSS) Frontend tests failed$(RESET)\n" && exit 1) + test-one: @printf "$(PROGRESS) Running test: $(YELLOW)$(TEST)$(RESET)\n" @RUST_LOG=info cargo test $(TEST) -- --nocapture @@ -155,8 +166,10 @@ ci-local: banner @$(MAKE) clippy --no-print-directory @printf "$(PROGRESS) Step 5/6: Doctests...\n" @cargo test --doc --quiet && printf "$(GREEN)$(CHECK) Doctests passed$(RESET)\n" - @printf "$(PROGRESS) Step 6/6: Unit tests...\n" + @printf "$(PROGRESS) Step 6/7: Unit tests...\n" @cargo test --lib --quiet && printf "$(GREEN)$(CHECK) Unit tests passed$(RESET)\n" + @printf "$(PROGRESS) Step 7/7: Frontend tests...\n" + @node --test tests/*.test.js && printf "$(GREEN)$(CHECK) Frontend tests passed$(RESET)\n" @printf "\n$(GREEN)$(BOLD)╔══════════════════════════════════════════════════════════╗$(RESET)\n" @printf "$(GREEN)$(BOLD)║ $(CHECK) CI SIMULATION PASSED ║$(RESET)\n" @printf "$(GREEN)$(BOLD)╚══════════════════════════════════════════════════════════╝$(RESET)\n\n" @@ -196,7 +209,7 @@ pre-release: banner @$(MAKE) fmt-check --no-print-directory @$(MAKE) clippy --no-print-directory @printf "$(PROGRESS) Running full test suite...\n" - @cargo test --quiet && printf "$(GREEN)$(CHECK) All tests passed$(RESET)\n" + @cargo test --quiet && node --test tests/*.test.js && printf "$(GREEN)$(CHECK) All tests passed$(RESET)\n" @printf "$(PROGRESS) Dry-run publish...\n" @cargo publish --dry-run 2>&1 | tail -1 @printf "$(PROGRESS) Verifying packaged contents...\n" @@ -273,6 +286,7 @@ help: banner @/bin/echo -e " $(GREEN)make test-quick$(RESET) - Run doctests + unit tests (fast)" @/bin/echo -e " $(GREEN)make test-doc$(RESET) - Run doctests only" @/bin/echo -e " $(GREEN)make test-unit$(RESET) - Run unit tests only" + @/bin/echo -e " $(GREEN)make test-frontend$(RESET) - Run frontend Node tests" @/bin/echo -e " $(GREEN)make test-one TEST=name$(RESET) - Run specific test with output" @/bin/echo -e "" @/bin/echo -e "$(CYAN)$(BOLD)Lint & Format:$(RESET)" diff --git a/README.md b/README.md index 354fb58..dc40a98 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,14 @@ Every backend element has a corresponding UI element. The library grows alongside the solver. When you scaffold a new SolverForge project with `solverforge new`, it's already wired in. +## Testing + +Repository coverage now includes the embedded Rust asset routes plus Node-based +frontend tests for backend adapters, solver lifecycle transitions, and core +component rendering. Use `make test` for the full suite, `make test-quick` for +Rust doctests and unit tests plus frontend coverage, or `make test-frontend` +when you only want the JavaScript suite. + ## Quick Start ```html diff --git a/src/lib.rs b/src/lib.rs index 008f959..f61a9d1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -192,4 +192,93 @@ mod tests { assert_eq!(missing_resp.status(), StatusCode::NOT_FOUND); } + + #[tokio::test] + async fn serves_top_level_assets_with_short_cache_and_expected_mime() { + let response = routes() + .oneshot( + Request::builder() + .uri("/sf/sf.css") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get(header::CONTENT_TYPE).unwrap(), + "text/css; charset=utf-8" + ); + assert_eq!( + response.headers().get(header::CACHE_CONTROL).unwrap(), + "public, max-age=3600" + ); + + let body = to_bytes(response.into_body(), usize::MAX).await.unwrap(); + let css = String::from_utf8(body.to_vec()).unwrap(); + assert!(css.contains("--sf-emerald-50")); + assert!(css.contains(".sf-gantt-split")); + } + + #[tokio::test] + async fn serves_immutable_assets_with_long_cache_and_expected_mime() { + let image = routes() + .oneshot( + Request::builder() + .method(Method::GET) + .uri("/sf/img/ouroboros.svg") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(image.status(), StatusCode::OK); + assert_eq!( + image.headers().get(header::CONTENT_TYPE).unwrap(), + "image/svg+xml" + ); + assert_eq!( + image.headers().get(header::CACHE_CONTROL).unwrap(), + "public, max-age=31536000, immutable" + ); + + let vendor = routes() + .oneshot( + Request::builder() + .method(Method::GET) + .uri("/sf/vendor/frappe-gantt/frappe-gantt.min.js") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(vendor.status(), StatusCode::OK); + assert_eq!( + vendor.headers().get(header::CONTENT_TYPE).unwrap(), + "application/javascript; charset=utf-8" + ); + assert_eq!( + vendor.headers().get(header::CACHE_CONTROL).unwrap(), + "public, max-age=31536000, immutable" + ); + } + + #[tokio::test] + async fn returns_not_found_for_missing_assets() { + let response = routes() + .oneshot( + Request::builder() + .method(Method::GET) + .uri("/sf/does-not-exist.js") + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::NOT_FOUND); + } } diff --git a/tests/frontend.test.js b/tests/frontend.test.js new file mode 100644 index 0000000..386f7f7 --- /dev/null +++ b/tests/frontend.test.js @@ -0,0 +1,266 @@ +const assert = require('node:assert/strict'); +const fs = require('node:fs'); +const path = require('node:path'); +const test = require('node:test'); +const vm = require('node:vm'); + +const { createDom } = require('./support/fake-dom'); + +const ROOT = path.resolve(__dirname, '..'); + +function loadSf(files, overrides = {}) { + const { document, window, Node } = createDom(); + const context = vm.createContext({ + console, + document, + window, + Node, + setTimeout, + clearTimeout, + Promise, + ...overrides, + }); + + files.forEach((file) => { + const source = fs.readFileSync(path.join(ROOT, file), 'utf8'); + vm.runInContext(source, context, { filename: file }); + }); + + return { SF: context.window.SF, context, document }; +} + +async function flush() { + await Promise.resolve(); + await Promise.resolve(); +} + +test('HTTP backend uses configured paths, headers, and event stream parsing', async () => { + const requests = []; + const streams = []; + class FakeEventSource { + constructor(url) { + this.url = url; + this.closed = false; + this.onmessage = null; + streams.push(this); + } + + close() { + this.closed = true; + } + } + + const fetch = async (url, options) => { + requests.push({ url, options }); + if (url === '/api/jobs') { + return { + ok: true, + status: 200, + statusText: 'OK', + headers: { + get(name) { + return name === 'content-type' ? 'application/json' : null; + }, + }, + json: async () => ({ id: 'job-7' }), + text: async () => 'ignored', + }; + } + return { + ok: true, + status: 200, + statusText: 'OK', + headers: { + get(name) { + return name === 'content-type' ? 'application/json' : null; + }, + }, + json: async () => ({ ok: true, url }), + text: async () => 'ignored', + }; + }; + + const { SF } = loadSf(['js-src/00-core.js', 'js-src/10-backend.js'], { + EventSource: FakeEventSource, + fetch, + }); + + const backend = SF.createBackend({ + type: 'axum', + baseUrl: '/api', + schedulesPath: '/jobs', + demoDataPath: '/fixtures', + headers: { Authorization: 'Bearer token' }, + }); + + const created = await backend.createSchedule({ plan: 1 }); + const listed = await backend.listDemoData(); + + assert.equal(created, 'job-7'); + assert.deepEqual(listed, { ok: true, url: '/api/fixtures' }); + assert.equal(requests[0].options.method, 'POST'); + assert.equal(requests[0].options.headers['Content-Type'], 'application/json'); + assert.equal(requests[0].options.headers.Authorization, 'Bearer token'); + assert.equal(requests[0].options.body, JSON.stringify({ plan: 1 })); + assert.equal(requests[1].options.method, 'GET'); + + let streamed = null; + const close = backend.streamEvents('job-7', (payload) => { + streamed = payload; + }); + assert.equal(typeof close, 'function'); + assert.equal(streams[0].url, '/api/jobs/job-7/events'); + streams[0].onmessage({ data: JSON.stringify({ score: '0hard/-1soft' }) }); + assert.equal(streamed.score, '0hard/-1soft'); + close(); + assert.equal(streams[0].closed, true); +}); + +test('solver lifecycle updates status callbacks during start, streaming, completion, and stop', async () => { + const { SF } = loadSf(['js-src/00-core.js', 'js-src/10-backend.js', 'js-src/11-solver.js']); + const calls = []; + const statusBar = { + setSolving(value) { + calls.push(['setSolving', value]); + }, + updateMoves(value) { + calls.push(['updateMoves', value]); + }, + updateScore(value) { + calls.push(['updateScore', value]); + }, + colorDotsFromAnalysis(value) { + calls.push(['colorDotsFromAnalysis', value]); + }, + }; + + let onMessage; + let streamClosed = false; + const backend = { + createSchedule: async () => 'job-42', + streamEvents(id, callback) { + calls.push(['streamEvents', id]); + onMessage = callback; + return () => { + streamClosed = true; + }; + }, + getSchedule: async () => ({ id: 'job-42', score: '0hard/-1soft' }), + analyze: async () => ({ constraints: [{ name: 'hard-1', type: 'hard', score: '-1hard' }] }), + deleteSchedule: async (id) => { + calls.push(['deleteSchedule', id]); + }, + }; + + const updates = []; + const completions = []; + const analyses = []; + const solver = SF.createSolver({ + backend, + statusBar, + onUpdate(schedule) { + updates.push(schedule); + }, + onComplete(schedule) { + completions.push(schedule); + }, + onAnalysis(analysis) { + analyses.push(analysis); + }, + }); + + solver.start({ demand: 5 }); + await flush(); + + assert.equal(solver.isRunning(), true); + assert.equal(solver.getJobId(), 'job-42'); + assert.deepEqual(calls.slice(0, 3), [ + ['setSolving', true], + ['updateMoves', null], + ['streamEvents', 'job-42'], + ]); + + onMessage({ score: '0hard/-2soft', movesPerSecond: 12 }); + await flush(); + assert.equal(updates.length, 1); + assert.equal(updates[0].score, '0hard/-2soft'); + assert.equal(updates[0].movesPerSecond, 12); + + onMessage({ solverStatus: 'NOT_SOLVING' }); + await flush(); + assert.equal(streamClosed, true); + assert.equal(solver.isRunning(), false); + assert.equal(solver.getJobId(), null); + assert.equal(completions.length, 1); + assert.deepEqual(completions[0], { id: 'job-42', score: '0hard/-1soft' }); + + solver.start({}); + await flush(); + solver.stop(); + await flush(); + + assert.equal(solver.isRunning(), false); + assert.equal(analyses.length, 1); + assert.equal(analyses[0].constraints.length, 1); + assert.deepEqual(calls.slice(-4), [ + ['setSolving', false], + ['updateMoves', null], + ['colorDotsFromAnalysis', [{ name: 'hard-1', type: 'hard', score: '-1hard' }]], + ['deleteSchedule', 'job-42'], + ]); +}); + +test('button, table, and tabs render and respond to basic interactions', () => { + const { SF, document } = loadSf([ + 'js-src/00-core.js', + 'js-src/03-buttons.js', + 'js-src/07-tabs.js', + 'js-src/08-table.js', + ]); + + let clicked = 0; + const button = SF.createButton({ + text: 'Solve', + variant: 'success', + size: 'small', + id: 'solve-btn', + dataset: { action: 'solve' }, + onClick() { + clicked += 1; + }, + }); + button.click(); + assert.equal(button.classList.contains('sf-btn--success'), true); + assert.equal(button.classList.contains('sf-btn--sm'), true); + assert.equal(button.id, 'solve-btn'); + assert.equal(button.dataset.action, 'solve'); + assert.equal(button.textContent, 'Solve'); + assert.equal(clicked, 1); + + let selectedRow = null; + const table = SF.createTable({ + columns: [{ label: 'Job', className: 'job-col' }, { label: 'Status', align: 'right' }], + rows: [['A-1', 'Ready']], + onRowClick(index, row) { + selectedRow = { index, row }; + }, + }); + document.body.appendChild(table); + const row = table.querySelectorAll('tr')[1]; + row.click(); + assert.deepEqual(selectedRow, { index: 0, row: ['A-1', 'Ready'] }); + assert.equal(table.querySelectorAll('th').length, 2); + assert.equal(table.querySelectorAll('td')[0].textContent, 'A-1'); + assert.equal(table.querySelectorAll('td')[1].style.textAlign, 'right'); + + const tabs = SF.createTabs({ + tabs: [ + { id: 'plan', active: true, content: 'Plan' }, + { id: 'gantt', content: 'Gantt' }, + ], + }); + document.body.appendChild(tabs.el); + SF.showTab('gantt'); + assert.equal(tabs.el.querySelector('[data-tab-id="plan"]').classList.contains('active'), false); + assert.equal(tabs.el.querySelector('[data-tab-id="gantt"]').classList.contains('active'), true); +}); diff --git a/tests/instance-local-dom.test.js b/tests/instance-local-dom.test.js index 5abbb5f..a419229 100644 --- a/tests/instance-local-dom.test.js +++ b/tests/instance-local-dom.test.js @@ -265,7 +265,14 @@ test('gantt initSplit keeps accepting scalar splitMinSize values', () => { }); test('gantt sortable columns render and reorder grid rows without throwing', () => { - const { SF } = loadSf(['js-src/00-core.js', 'js-src/14-gantt.js']); + const { SF } = loadSf(['js-src/00-core.js', 'js-src/14-gantt.js'], { + Gantt: function () { + return { + change_view_mode() {}, + refresh() {}, + }; + }, + }); const gantt = SF.gantt.create({ columns: [ diff --git a/tests/support/fake-dom.js b/tests/support/fake-dom.js index 23b4b0d..6c0b5e2 100644 --- a/tests/support/fake-dom.js +++ b/tests/support/fake-dom.js @@ -4,6 +4,8 @@ class FakeTextNode extends FakeNode { constructor(text) { super(); this.parentNode = null; + this.ownerDocument = null; + this.nodeType = 3; this._text = String(text); } @@ -23,7 +25,9 @@ class FakeClassList { } add(...tokens) { - tokens.forEach((token) => token && this.values.add(token)); + tokens.forEach((token) => { + if (token) this.values.add(token); + }); this.owner._syncClassName(); } @@ -115,6 +119,18 @@ class FakeElement extends FakeNode { this.childNodes = []; } + get children() { + return this.childNodes.filter((child) => child instanceof FakeElement); + } + + get firstChild() { + return this.childNodes[0] || null; + } + + get lastChild() { + return this.childNodes[this.childNodes.length - 1] || null; + } + get textContent() { if (this.childNodes.length === 0) return ''; return this.childNodes.map((child) => child.textContent).join(''); @@ -127,9 +143,9 @@ class FakeElement extends FakeNode { } appendChild(child) { - if (!child) return child; + if (child == null) return child; child.parentNode = this; - if (child instanceof FakeElement) child.ownerDocument = this.ownerDocument; + child.ownerDocument = this.ownerDocument; this.childNodes.push(child); this._innerHTML = ''; return child; @@ -197,13 +213,13 @@ class FakeDocument { } createElement(tagName) { - var element = new FakeElement(tagName); + const element = new FakeElement(tagName); element.ownerDocument = this; return element; } createElementNS(namespaceURI, tagName) { - var element = new FakeElement(tagName, namespaceURI); + const element = new FakeElement(tagName, namespaceURI); element.ownerDocument = this; return element; } @@ -226,7 +242,7 @@ class FakeDocument { } function walk(node, visit) { - node.childNodes.forEach(function (child) { + node.childNodes.forEach((child) => { visit(child); if (child instanceof FakeElement) walk(child, visit); }); @@ -237,8 +253,8 @@ function matchesSelector(node, selector) { if (dataMatch) { return node.dataset[toCamel(dataMatch[1])] === dataMatch[2]; } - if (selector.charAt(0) === '.') return node.classList.contains(selector.slice(1)); - if (selector.charAt(0) === '#') return node.id === selector.slice(1); + if (selector.startsWith('.')) return node.classList.contains(selector.slice(1)); + if (selector.startsWith('#')) return node.id === selector.slice(1); return node.tagName.toLowerCase() === selector.toLowerCase(); } @@ -249,8 +265,8 @@ function toCamel(value) { } function createDom() { - var document = new FakeDocument(); - return { document: document, window: { document: document }, Node: FakeNode }; + const document = new FakeDocument(); + return { document, window: { document }, Node: FakeNode }; } -module.exports = { createDom, FakeElement, FakeNode }; +module.exports = { createDom, FakeElement, FakeNode, FakeTextNode };