diff --git a/.github/workflows/check-types.yml b/.github/workflows/check-types.yml index 2ee0f1d..8aca13e 100644 --- a/.github/workflows/check-types.yml +++ b/.github/workflows/check-types.yml @@ -10,9 +10,9 @@ jobs: check-types: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: pnpm/action-setup@v4 - - uses: actions/setup-node@v4 + - uses: actions/checkout@v6 + - uses: pnpm/action-setup@v5 + - uses: actions/setup-node@v6 with: node-version: 24 cache: 'pnpm' diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 0000000..d456aa8 --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,39 @@ +name: E2E Tests + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + e2e: + runs-on: ubuntu-latest + timeout-minutes: 15 + defaults: + run: + working-directory: demo + + steps: + - uses: actions/checkout@v6 + - uses: pnpm/action-setup@v5 + - uses: actions/setup-node@v6 + with: + node-version: 24 + cache: 'pnpm' + + - run: pnpm install + working-directory: . + + - name: Install Playwright browsers + run: pnpm exec playwright install --with-deps chromium + + - name: Run e2e tests + run: pnpm test:e2e + + - uses: actions/upload-artifact@v7 + if: failure() + with: + name: playwright-report + path: demo/playwright-report/ + retention-days: 30 diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 80ed72d..2fc1fb7 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -10,9 +10,9 @@ jobs: format: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: pnpm/action-setup@v4 - - uses: actions/setup-node@v4 + - uses: actions/checkout@v6 + - uses: pnpm/action-setup@v5 + - uses: actions/setup-node@v6 with: node-version: 24 cache: 'pnpm' diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 283ce61..e0ea070 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -10,10 +10,9 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: pnpm/action-setup@v4 - - - uses: actions/setup-node@v4 + - uses: actions/checkout@v6 + - uses: pnpm/action-setup@v5 + - uses: actions/setup-node@v6 with: node-version: 24 cache: 'pnpm' diff --git a/.gitignore b/.gitignore index de648b4..430c94d 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ dist/ *.db-wal2 rocicorp-zero-virtual-*.tgz docs +.last-run.json diff --git a/demo/.env b/demo/.env index c7ed767..b9e6165 100644 --- a/demo/.env +++ b/demo/.env @@ -2,3 +2,5 @@ AUTH_SECRET="abcd" ZERO_UPSTREAM_DB="postgresql://user:password@127.0.0.1:5430/postgres" ZERO_QUERY_URL="http://localhost:*/api/zero/query" ZERO_MUTATE_URL="http://localhost:*/api/zero/mutate" +VITE_PUBLIC_CACHE_PORT=5858 + diff --git a/demo/e2e/global-setup.ts b/demo/e2e/global-setup.ts new file mode 100644 index 0000000..b0d22a0 --- /dev/null +++ b/demo/e2e/global-setup.ts @@ -0,0 +1,180 @@ +import {spawn} from 'node:child_process'; +import { + existsSync, + mkdirSync, + readFileSync, + rmSync, + writeFileSync, +} from 'node:fs'; +import * as net from 'node:net'; +import {join} from 'node:path'; +import {fileURLToPath} from 'node:url'; +import pg from 'pg'; +import {seedTestDb} from './seed-test.ts'; + +const DEMO_DIR = fileURLToPath(new URL('..', import.meta.url)); + +// Replica dir is wiped on each run so zero-cache starts with clean data. +const REPLICA_DIR = '/tmp/zero-playwright-replica'; +export const REPLICA_FILE = join(REPLICA_DIR, 'replica'); + +// PID file lets globalTeardown kill the zero-cache process. +export const PID_FILE = '/tmp/zero-playwright.pid'; + +export default async function globalSetup(): Promise { + console.log('\n[setup] Starting postgres...'); + await startPostgres(); + + console.log('[setup] Waiting for postgres...'); + await waitForPort(5430); + await waitForPostgres(); + + console.log('[setup] Seeding test data...'); + await seedTestDb(process.env['ZERO_UPSTREAM_DB']!); + + console.log('[setup] Clearing zero-cache replica...'); + killExistingZeroCache(); + if (existsSync(REPLICA_DIR)) { + rmSync(REPLICA_DIR, {recursive: true, force: true}); + } + mkdirSync(REPLICA_DIR, {recursive: true}); + + const port = Number(process.env['VITE_PUBLIC_CACHE_PORT'] ?? 5858); + console.log('[setup] Starting zero-cache...'); + const zeroCacheProc = spawnZeroCache(port); + writeFileSync(PID_FILE, String(zeroCacheProc.pid)); + + console.log(`[setup] Waiting for zero-cache on port ${port}...`); + await waitForPort(port, 60_000); + console.log('[setup] Ready.\n'); +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +async function startPostgres(): Promise { + await new Promise((resolve, reject) => { + const proc = spawn( + 'docker', + [ + 'compose', + '--env-file', + '.env', + '-f', + './docker/docker-compose.yml', + 'up', + '-d', + ], + {cwd: DEMO_DIR, stdio: 'inherit'}, + ); + proc.on('exit', code => { + if (code === 0) resolve(); + else reject(new Error(`docker compose up exited with code ${code}`)); + }); + proc.on('error', reject); + }); +} + +async function waitForPostgres(timeoutMs = 30_000): Promise { + const connStr = process.env['ZERO_UPSTREAM_DB']!; + const deadline = Date.now() + timeoutMs; + while (Date.now() < deadline) { + try { + const pool = new pg.Pool({connectionString: connStr, max: 1}); + const client = await pool.connect(); + client.release(); + await pool.end(); + return; + } catch { + await sleep(500); + } + } + throw new Error('Postgres not ready within timeout'); +} + +function waitForPort(port: number, timeoutMs = 30_000): Promise { + return new Promise((resolve, reject) => { + const deadline = Date.now() + timeoutMs; + + function tryConnect() { + const socket = new net.Socket(); + socket.setTimeout(1_000); + + socket.on('connect', () => { + socket.destroy(); + resolve(); + }); + + const retry = () => { + socket.destroy(); + if (Date.now() >= deadline) { + reject(new Error(`Port ${port} not available after ${timeoutMs}ms`)); + return; + } + setTimeout(tryConnect, 500); + }; + + socket.on('timeout', retry); + socket.on('error', retry); + socket.connect(port, '127.0.0.1'); + } + + tryConnect(); + }); +} + +function spawnZeroCache(port: number) { + const binDir = join(DEMO_DIR, 'node_modules', '.bin'); + const env: NodeJS.ProcessEnv = { + ...process.env, + // Ensure node_modules/.bin is on PATH so zero-cache-dev can find zero-cache. + PATH: `${binDir}:${process.env['PATH'] ?? ''}`, + ZERO_REPLICA_FILE: REPLICA_FILE, + ZERO_LOG_LEVEL: 'error', + }; + + // Prefer the local bin so we use the exact version pinned in demo/package.json. + const bin = join(DEMO_DIR, 'node_modules', '.bin', 'zero-cache-dev'); + const command = existsSync(bin) ? bin : 'zero-cache-dev'; + + const proc = spawn(command, ['--port', String(port)], { + cwd: DEMO_DIR, + env, + stdio: ['ignore', 'pipe', 'pipe'], + detached: true, + }); + + proc.stdout?.on('data', (d: Buffer) => + process.stdout.write(`[zero-cache] ${d}`), + ); + proc.stderr?.on('data', (d: Buffer) => + process.stderr.write(`[zero-cache] ${d}`), + ); + + proc.on('exit', code => { + if (code !== null && code !== 0) { + console.error(`[zero-cache] exited with code ${code}`); + } + }); + + proc.unref(); + return proc; +} + +function killExistingZeroCache(): void { + if (!existsSync(PID_FILE)) return; + const pid = parseInt(readFileSync(PID_FILE, 'utf-8').trim(), 10); + if (!isNaN(pid)) { + try { + process.kill(pid, 'SIGTERM'); + } catch { + // Process may already be gone — that's fine. + } + } + rmSync(PID_FILE, {force: true}); +} + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); +} diff --git a/demo/e2e/global-teardown.ts b/demo/e2e/global-teardown.ts new file mode 100644 index 0000000..c363b95 --- /dev/null +++ b/demo/e2e/global-teardown.ts @@ -0,0 +1,17 @@ +import {existsSync, readFileSync, rmSync} from 'node:fs'; +import {PID_FILE} from './global-setup.ts'; + +export default async function globalTeardown(): Promise { + if (!existsSync(PID_FILE)) return; + + const pid = parseInt(readFileSync(PID_FILE, 'utf-8').trim(), 10); + if (!isNaN(pid)) { + try { + process.kill(pid, 'SIGTERM'); + console.log(`[teardown] Stopped zero-cache (pid ${pid})`); + } catch { + // Already gone — that's fine. + } + } + rmSync(PID_FILE, {force: true}); +} diff --git a/demo/e2e/seed-test.ts b/demo/e2e/seed-test.ts new file mode 100644 index 0000000..f010b9f --- /dev/null +++ b/demo/e2e/seed-test.ts @@ -0,0 +1,167 @@ +import pg from 'pg'; + +// Fixed base timestamp: 2023-11-14T22:13:20.000Z +const BASE = 1_700_000_000_000; +const H = 3_600_000; // 1 hour in ms + +export type TestItem = { + id: string; + title: string; + description: string; + created: number; + modified: number; +}; + +// Named items (1–10): Alpha through Kappa. +// +// Within this group created and modified are inverses of each other. +// Within the extra group they are also inverses. +// This means all four sort orderings produce a distinct first row: +// +// modified DESC (default) → Alpha Item (modified = BASE+10H, highest) +// modified ASC → Test Item 200 (modified = BASE−190H, lowest) +// created DESC → Kappa Item (created = BASE+10H, highest) +// created ASC → Test Item 011 (created = BASE−190H, lowest) +// +// The 200 items also give the virtualizer enough rows to exercise paging +// (the min page size in the demo is 100). +const NAMED: TestItem[] = [ + { + id: 'tstitem001', + title: 'Alpha Item', + description: 'Alpha test item.', + created: BASE + 1 * H, + modified: BASE + 10 * H, + }, + { + id: 'tstitem002', + title: 'Beta Item', + description: 'Beta test item.', + created: BASE + 2 * H, + modified: BASE + 9 * H, + }, + { + id: 'tstitem003', + title: 'Gamma Item', + description: 'Gamma test item.', + created: BASE + 3 * H, + modified: BASE + 8 * H, + }, + { + id: 'tstitem004', + title: 'Delta Item', + description: 'Delta test item.', + created: BASE + 4 * H, + modified: BASE + 7 * H, + }, + { + id: 'tstitem005', + title: 'Epsilon Item', + description: 'Epsilon test item.', + created: BASE + 5 * H, + modified: BASE + 6 * H, + }, + { + id: 'tstitem006', + title: 'Zeta Item', + description: 'Zeta test item.', + created: BASE + 6 * H, + modified: BASE + 5 * H, + }, + { + id: 'tstitem007', + title: 'Eta Item', + description: 'Eta test item.', + created: BASE + 7 * H, + modified: BASE + 4 * H, + }, + { + id: 'tstitem008', + title: 'Theta Item', + description: 'Theta test item.', + created: BASE + 8 * H, + modified: BASE + 3 * H, + }, + { + id: 'tstitem009', + title: 'Iota Item', + description: 'Iota test item.', + created: BASE + 9 * H, + modified: BASE + 2 * H, + }, + { + id: 'tstitem010', + title: 'Kappa Item', + description: 'Kappa test item.', + created: BASE + 10 * H, + modified: BASE + 1 * H, + }, +]; + +// Extra items (11–200): programmatically generated with inverted +// created/modified so the extremes are Test Item 011 and Test Item 200. +// +// i=11: created = BASE−190H (lowest created), modified = BASE−1H +// i=200: created = BASE−1H, modified = BASE−190H (lowest modified) +const EXTRA: TestItem[] = Array.from({length: 190}, (_, k) => { + const i = k + 11; // 11..200 + const n = String(i).padStart(3, '0'); + return { + id: `tstitem${n}`, + title: `Test Item ${n}`, + description: `Test item ${n} description.`, + created: BASE - (201 - i) * H, + modified: BASE - (i - 10) * H, + }; +}); + +export const TEST_ITEMS: TestItem[] = [...NAMED, ...EXTRA]; + +export async function seedTestDb(connectionString: string): Promise { + const pool = new pg.Pool({connectionString}); + const client = await pool.connect(); + try { + await client.query('BEGIN'); + + // Drop stale logical replication slots so zero-cache can start fresh. + // Terminate any backends using the slots first, then drop them. + await client.query(` + SELECT pg_terminate_backend(active_pid) + FROM pg_replication_slots + WHERE slot_type = 'logical' AND active + `); + await client.query(` + SELECT pg_drop_replication_slot(slot_name) + FROM pg_replication_slots + WHERE slot_type = 'logical' + `); + + await client.query('DROP TABLE IF EXISTS item CASCADE'); + await client.query(` + CREATE TABLE item ( + id VARCHAR PRIMARY KEY, + title VARCHAR NOT NULL, + description VARCHAR NOT NULL, + created FLOAT8 NOT NULL, + modified FLOAT8 NOT NULL + ) + `); + + for (const item of TEST_ITEMS) { + await client.query( + `INSERT INTO item (id, title, description, created, modified) + VALUES ($1, $2, $3, $4, $5)`, + [item.id, item.title, item.description, item.created, item.modified], + ); + } + + await client.query('COMMIT'); + console.log(`Seeded ${TEST_ITEMS.length} test items`); + } catch (e) { + await client.query('ROLLBACK'); + throw e; + } finally { + client.release(); + await pool.end(); + } +} diff --git a/demo/e2e/tests/app.spec.ts b/demo/e2e/tests/app.spec.ts new file mode 100644 index 0000000..603ae44 --- /dev/null +++ b/demo/e2e/tests/app.spec.ts @@ -0,0 +1,37 @@ +import {expect, test} from '@playwright/test'; + +test.describe('App', () => { + test.beforeEach(async ({page}) => { + await page.goto('/'); + }); + + test('shows the page heading', async ({page}) => { + await expect(page.getByRole('heading', {level: 1})).toContainText( + 'Zero Virtual Demo', + ); + }); + + test('shows the correct item count', async ({page}) => { + // The virtualizer lazy-loads pages, so the initial count may be an + // estimate of the first page only (e.g. "(~100)"). Just verify that + // some item count is displayed in the heading. + await expect(page.getByText(/\(~?\d+\)/)).toBeVisible({timeout: 15_000}); + }); + + test('renders list rows', async ({page}) => { + // Wait for the first real row (an element, not a placeholder
). + await expect( + page.locator('[class*="viewport"] a[href^="#"]').first(), + ).toBeVisible({ + timeout: 15_000, + }); + }); + + test('default sort is modified descending — Alpha Item is first', async ({ + page, + }) => { + // Alpha Item has the highest modified timestamp so it should be first. + const firstRow = page.getByRole('link', {name: 'Alpha Item'}); + await expect(firstRow).toBeVisible({timeout: 15_000}); + }); +}); diff --git a/demo/e2e/tests/item-detail.spec.ts b/demo/e2e/tests/item-detail.spec.ts new file mode 100644 index 0000000..9a87920 --- /dev/null +++ b/demo/e2e/tests/item-detail.spec.ts @@ -0,0 +1,113 @@ +import {expect, test} from '@playwright/test'; +import {TEST_ITEMS} from '../seed-test.ts'; + +const TIMEOUT = 15_000; + +// In the default sort (modified desc) Alpha Item is at index 0. +const ALPHA = TEST_ITEMS.find(i => i.title === 'Alpha Item')!; + +test.describe('Item detail panel', () => { + test.beforeEach(async ({page}) => { + await page.goto('/'); + // Wait for the list to have real rows loaded. + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + }); + + test('clicking a row opens the detail panel', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + // The panel should appear and show the item title in an

. + await expect(page.getByRole('heading', {level: 2})).toContainText( + 'Alpha Item', + {timeout: TIMEOUT}, + ); + }); + + test('detail panel shows the item description', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + await expect(page.getByText(ALPHA.description)).toBeVisible({ + timeout: TIMEOUT, + }); + }); + + test('detail panel shows the item ID', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + await expect(page.getByText(ALPHA.id)).toBeVisible({timeout: TIMEOUT}); + }); + + test('clicking a row sets the URL hash to the item ID', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + await expect(page).toHaveURL(`/#${ALPHA.id}`, {timeout: TIMEOUT}); + }); + + test('the selected row gets aria-selected="true"', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + // After clicking, the row should carry aria-selected. + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toHaveAttribute( + 'aria-selected', + 'true', + {timeout: TIMEOUT}, + ); + }); + + test('close button hides the detail panel', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + + // Confirm panel opened. + await expect(page.getByRole('heading', {level: 2})).toBeVisible({ + timeout: TIMEOUT, + }); + + // Click the close button (aria-label="Close"). + await page.getByRole('button', {name: 'Close'}).click(); + + // Panel should no longer be visible. + await expect(page.getByRole('heading', {level: 2})).not.toBeVisible(); + }); + + test('closing the panel clears the URL hash', async ({page}) => { + await page.locator(`a[href="#${ALPHA.id}"]`).click(); + await expect(page).toHaveURL(`/#${ALPHA.id}`, {timeout: TIMEOUT}); + + await page.getByRole('button', {name: 'Close'}).click(); + + // Hash should be cleared (URL ends with just /). + await expect(page).toHaveURL(/\/#?$/, {timeout: TIMEOUT}); + }); + + test('navigating directly to a permalink shows the detail panel', async ({ + page, + }) => { + await page.goto(`/#${ALPHA.id}`); + + await expect(page.getByRole('heading', {level: 2})).toContainText( + 'Alpha Item', + {timeout: TIMEOUT}, + ); + }); + + test('permalink to an item far down the list shows loading then resolves', async ({ + page, + }) => { + // Test Item 150 is near index 149 (second page) and is not loaded + // initially. The detail panel should show "Loading…" while the data + // is fetched, then resolve to the item. + const farItem = TEST_ITEMS.find(i => i.title === 'Test Item 150')!; + await page.goto(`/#${farItem.id}`); + + // Initially the detail panel shows a loading indicator. + await expect(page.getByText('Loading…')).toBeVisible({timeout: TIMEOUT}); + + // Eventually the item title appears in the panel heading. + await expect(page.getByRole('heading', {level: 2})).toContainText( + 'Test Item 150', + {timeout: TIMEOUT}, + ); + }); +}); diff --git a/demo/e2e/tests/scroll.spec.ts b/demo/e2e/tests/scroll.spec.ts new file mode 100644 index 0000000..b553f96 --- /dev/null +++ b/demo/e2e/tests/scroll.spec.ts @@ -0,0 +1,375 @@ +import {expect, test, type Page} from '@playwright/test'; +import {TEST_ITEMS} from '../seed-test.ts'; + +const TIMEOUT = 20_000; + +// In default sort (modified DESC), Alpha Item is first. +const ALPHA = TEST_ITEMS.find(i => i.title === 'Alpha Item')!; + +/** + * Wait for the virtualizer's scroll state to be persisted into the + * Navigation API's current entry state. The virtualizer debounces + * `onScrollStateChange` at 100ms, so after the initial data render + * the state is not immediately available. In-page hash navigations + * (navigation.navigate) only trigger re-anchoring when the persisted + * scroll state changes between the old and new history entries, so we + * must wait for it before navigating away. + */ +async function waitForScrollStatePersisted(page: Page) { + await expect(async () => { + const persisted = await page.evaluate( + () => + (navigation.currentEntry?.getState() as Record) + ?.scrollState != null, + ); + expect(persisted).toBe(true); + }).toPass({timeout: TIMEOUT}); +} + +// The virtual list renders only the visible rows. Scrolling causes new pages +// to be fetched and new rows to be inserted into the DOM. With 200 items and a +// min page size of 100, there are at least 2 pages to load. + +test.describe('Scroll / paging', () => { + test.beforeEach(async ({page}) => { + await page.goto('/'); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + }); + + test('initial render shows the correct item count', async ({page}) => { + // The virtualizer lazy-loads pages, so the initial count is an estimate + // based on the first page only (e.g. "(~100)"). Verify a count appears. + await expect(page.getByText(/\(~?\d+\)/)).toBeVisible({timeout: TIMEOUT}); + }); + + test('scrolling to the bottom loads items from the second page', async ({ + page, + }) => { + const viewportEl = await page + .locator('[class*="viewport"]') + .elementHandle(); + + // The virtualizer lazy-loads pages, so we need to scroll to the bottom + // repeatedly — each scroll triggers loading the next page, which extends + // the scrollable area. + await expect(async () => { + await viewportEl!.evaluate(el => { + el.scrollTop = el.scrollHeight; + }); + await expect( + page.locator(`a[href="#${TEST_ITEMS[TEST_ITEMS.length - 1].id}"]`), + ).toBeVisible(); + }).toPass({timeout: TIMEOUT}); + }); + + test('scrolling down and back up restores the first item', async ({page}) => { + const viewportEl = await page + .locator('[class*="viewport"]') + .elementHandle(); + + await viewportEl!.evaluate(el => { + el.scrollTop = el.scrollHeight; + }); + + // Scroll back to the top. + await viewportEl!.evaluate(el => { + el.scrollTop = 0; + }); + + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toContainText( + 'Alpha Item', + ); + }); +}); + +/** + * Wait for a permalink row to become visible and selected. The virtualizer + * scrolls to the target automatically but the initial data fetch and + * pagination adjustments are async. + * + * Strategy: + * 1. Wait for any list rows to appear (data loaded). + * 2. Wait for the scroll position to stabilize (no change for 500 ms). + * 3. If the target row isn't visible yet, scroll the viewport down in + * viewport-sized steps until it appears. This mirrors what a user does + * when the virtualizer's auto-scroll undershoots. + * 4. Assert the row is visible and selected. + */ +async function waitForPermalinkRow(page: Page, id: string) { + const row = page.locator(`a[href="#${id}"]`); + + // 1. Wait for any rows to be rendered. + await expect( + page.locator('[class*="viewport"] a[href^="#"]').first(), + ).toBeVisible({timeout: 10_000}); + + // 2. Wait for scroll to settle. + await page.evaluate( + () => + new Promise(resolve => { + const vp = document.querySelector('[class*="viewport"]'); + if (!vp) { + resolve(); + return; + } + let last = vp.scrollTop; + const check = () => { + if (vp.scrollTop === last) { + resolve(); + } else { + last = vp.scrollTop; + setTimeout(check, 50); + } + }; + setTimeout(check, 50); + }), + ); + + // 3. If the row isn't visible, scroll down in steps until it appears. + await expect(async () => { + // const visible = await row.isVisible().catch(() => false); + // if (!visible) { + // await page.evaluate(() => { + // const vp = document.querySelector('[class*="viewport"]'); + // if (vp) vp.scrollTop += vp.clientHeight; + // }); + // } + await expect(row).toBeVisible({timeout: 1_000}); + }).toPass({timeout: 20_000}); + + // 4. Assert selected. + await expect(row).toHaveAttribute('aria-selected', 'true'); + return row; +} + +// --------------------------------------------------------------------------- +// Direct permalink navigation: load the app with a hash already in the URL +// (no prior `/` load). The app must scroll the target row into view and +// select it on first render. +// --------------------------------------------------------------------------- + +test.describe('Direct permalink navigation', () => { + test('first page item — Beta Item', async ({page}) => { + const beta = TEST_ITEMS.find(i => i.title === 'Beta Item')!; + await page.goto(`/#${beta.id}`); + + const row = page.locator(`a[href="#${beta.id}"]`); + await expect(row).toBeVisible({timeout: TIMEOUT}); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Beta Item'); + }); + + test('page-boundary item — Test Item 100', async ({page}) => { + const mid = TEST_ITEMS.find(i => i.title === 'Test Item 100')!; + await page.goto(`/#${mid.id}`); + + const row = await waitForPermalinkRow(page, mid.id); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Test Item 100'); + }); + + test('last item — Test Item 200', async ({page}) => { + const last = TEST_ITEMS.find(i => i.title === 'Test Item 200')!; + await page.goto(`/#${last.id}`); + + const row = await waitForPermalinkRow(page, last.id); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Test Item 200'); + }); +}); + +// --------------------------------------------------------------------------- +// In-page hash navigation: load `/` first, wait for the list, then set +// location.hash. The app should scroll the target row into view and select it. +// --------------------------------------------------------------------------- + +test.describe('In-page hash navigation', () => { + test.beforeEach(async ({page}) => { + await page.goto('/'); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + await waitForScrollStatePersisted(page); + }); + + test('first page item — scrolls and selects', async ({page}) => { + const beta = TEST_ITEMS.find(i => i.title === 'Beta Item')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, beta.id); + await page.waitForURL(`/#${beta.id}`); + + const row = page.locator(`a[href="#${beta.id}"]`); + await expect(row).toBeVisible({timeout: TIMEOUT}); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Beta Item'); + }); + + test('page-boundary item — scrolls and selects', async ({page}) => { + const mid = TEST_ITEMS.find(i => i.title === 'Test Item 100')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, mid.id); + await page.waitForURL(`/#${mid.id}`); + + const row = await waitForPermalinkRow(page, mid.id); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Test Item 100'); + }); + + test('last item — scrolls and selects', async ({page}) => { + const last = TEST_ITEMS.find(i => i.title === 'Test Item 200')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, last.id); + await page.waitForURL(`/#${last.id}`); + + const row = await waitForPermalinkRow(page, last.id); + await expect(row).toHaveAttribute('aria-selected', 'true'); + await expect(row).toContainText('Test Item 200'); + }); +}); + +// --------------------------------------------------------------------------- +// Back / forward navigation and scroll restore: the virtualizer persists +// scroll state in history.state via the Navigation API. Navigating back +// or forward should restore the scroll position and visible rows. +// --------------------------------------------------------------------------- + +test.describe('Back / forward and scroll restore', () => { + test('back after hash nav restores scroll position at the top', async ({ + page, + }) => { + await page.goto('/'); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toContainText( + 'Alpha Item', + ); + + // Wait for scroll state to be saved before navigating away. + await waitForScrollStatePersisted(page); + + // Navigate to a far item (pushes a new history entry). + const far = TEST_ITEMS.find(i => i.title === 'Test Item 100')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, far.id); + await page.waitForURL(`/#${far.id}`); + await expect(page.locator(`a[href="#${far.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + + // Go back — should restore to the top with Alpha Item visible. + await page.goBack(); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toContainText( + 'Alpha Item', + ); + }); + + test('forward after back restores the permalink position', async ({page}) => { + await page.goto('/'); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + + // Wait for scroll state to be saved before navigating away. + await waitForScrollStatePersisted(page); + + // Navigate to a far item. + const far = TEST_ITEMS.find(i => i.title === 'Test Item 100')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, far.id); + await page.waitForURL(`/#${far.id}`); + const farRow = page.locator(`a[href="#${far.id}"]`); + await expect(farRow).toBeVisible({timeout: TIMEOUT}); + + // Go back, then forward. + await page.goBack(); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + + await page.goForward(); + await page.waitForURL(`/#${far.id}`); + await expect(async () => { + await expect(farRow).toBeVisible(); + await expect(farRow).toHaveAttribute('aria-selected', 'true'); + }).toPass({timeout: TIMEOUT}); + }); + + test('back restores a mid-list scroll position', async ({page}) => { + await page.goto('/'); + await expect(page.locator(`a[href="#${ALPHA.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + + // Grab the viewport element for scrolling. + const viewportEl = await page + .locator('[class*="viewport"]') + .elementHandle(); + + // Scroll partway down — enough to see row ~20 but not the very top. + await viewportEl!.evaluate(el => { + el.scrollTop = 800; + }); + + // Wait for a row around that scroll offset to appear. + await expect( + page + .locator( + 'a[href="#tstitem016"], a[href="#tstitem021"], a[href="#tstitem026"]', + ) + .first(), + ).toBeVisible({timeout: TIMEOUT}); + + // Record which row is visible at the top of the viewport. + const visibleRowHref = await page.evaluate(() => { + const viewport = document.querySelector('[class*="viewport"]'); + if (!viewport) return null; + const rect = viewport.getBoundingClientRect(); + const rows = [...document.querySelectorAll('a[href^="#"]')]; + let best: {href: string | null; top: number} | null = null; + for (const row of rows) { + const rowRect = row.getBoundingClientRect(); + if (rowRect.top >= rect.top - 5) { + if (!best || rowRect.top < best.top) { + best = {href: row.getAttribute('href'), top: rowRect.top}; + } + } + } + return best?.href ?? null; + }); + + // Wait for scroll state to be persisted (debounced at 100ms). + await waitForScrollStatePersisted(page); + + // Navigate to a permalink (pushes new history entry). + const far = TEST_ITEMS.find(i => i.title === 'Test Item 100')!; + await page.evaluate(id => { + navigation.navigate(`#${id}`); + }, far.id); + await page.waitForURL(`/#${far.id}`); + await expect(page.locator(`a[href="#${far.id}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + + // Go back — should restore the mid-list scroll position. + await page.goBack(); + + // The previously visible row should reappear near the same position. + await expect(page.locator(`a[href="${visibleRowHref}"]`)).toBeVisible({ + timeout: TIMEOUT, + }); + }); +}); diff --git a/demo/e2e/tests/sort.spec.ts b/demo/e2e/tests/sort.spec.ts new file mode 100644 index 0000000..c90e068 --- /dev/null +++ b/demo/e2e/tests/sort.spec.ts @@ -0,0 +1,105 @@ +import {expect, test, type Page} from '@playwright/test'; + +// Seed data ordering summary (see seed-test.ts for details): +// +// modified DESC (default) → Alpha Item first (modified = BASE+10H) +// modified ASC → Test Item 200 first (modified = BASE−190H) +// created DESC → Kappa Item first (created = BASE+10H) +// created ASC → Test Item 011 first (created = BASE−190H) + +const TIMEOUT = 15_000; + +/** + * Assert that the row containing `text` is the first visible item in + * the scrollable viewport (i.e. closest to the top edge). Uses a retry + * loop because sort changes are async. + */ +async function expectFirstVisibleRow(page: Page, text: string) { + await expect(async () => { + const isFirst = await page.evaluate((txt: string) => { + const viewport = document.querySelector('[class*="viewport"]'); + if (!viewport) return false; + const vpTop = viewport.getBoundingClientRect().top; + const rows = [...viewport.querySelectorAll('a[href^="#"]')]; + if (rows.length === 0) return false; + // Find the row closest to the viewport top. + let best: {el: Element; dist: number} | null = null; + for (const row of rows) { + const dist = Math.abs(row.getBoundingClientRect().top - vpTop); + if (!best || dist < best.dist) { + best = {el: row, dist}; + } + } + return best?.el.textContent?.includes(txt) ?? false; + }, text); + expect(isFirst).toBe(true); + }).toPass({timeout: TIMEOUT}); +} + +test.describe('Sort controls', () => { + test.beforeEach(async ({page}) => { + await page.goto('/'); + // Wait until the list has loaded real rows. + await expect( + page.locator('[class*="viewport"] a[href^="#"]').first(), + ).toBeVisible({ + timeout: TIMEOUT, + }); + }); + + test('default state: sort field button reads "Modified"', async ({page}) => { + await expect(page.getByRole('button', {name: 'Modified'})).toBeVisible(); + }); + + test('default state: sort direction button title is "Descending"', async ({ + page, + }) => { + await expect(page.getByRole('button', {name: 'Descending'})).toBeVisible(); + }); + + test('default (modified desc): Alpha Item is first', async ({page}) => { + await expectFirstVisibleRow(page, 'Alpha Item'); + }); + + test('toggle sort field to created → Kappa Item is first (created desc)', async ({ + page, + }) => { + // Click the sort-field button — it shows the current field and toggles. + await page.getByRole('button', {name: 'Modified'}).click(); + + await expect(page.getByRole('button', {name: 'Created'})).toBeVisible(); + + // Kappa Item has the highest created timestamp (BASE+10H). + await expectFirstVisibleRow(page, 'Kappa Item'); + }); + + test('toggle direction to asc while on created → Test Item 011 is first (created asc)', async ({ + page, + }) => { + await page.getByRole('button', {name: 'Modified'}).click(); + await expectFirstVisibleRow(page, 'Kappa Item'); + + // Flip direction: "Descending" → "Ascending". + await page.getByRole('button', {name: 'Descending'}).click(); + await expect(page.getByRole('button', {name: 'Ascending'})).toBeVisible(); + + // Test Item 011 has the lowest created timestamp (BASE−190H). + await expectFirstVisibleRow(page, 'Test Item 011'); + }); + + test('toggle field back to modified while on created asc → Test Item 200 is first (modified asc)', async ({ + page, + }) => { + // Navigate to created asc. + await page.getByRole('button', {name: 'Modified'}).click(); + await page.getByRole('button', {name: 'Descending'}).click(); + await expectFirstVisibleRow(page, 'Test Item 011'); + + // Switch field back to modified (direction stays asc → modified asc). + await page.getByRole('button', {name: 'Created'}).click(); + await expect(page.getByRole('button', {name: 'Modified'})).toBeVisible(); + + // Test Item 200 has the lowest modified timestamp (BASE−190H). + await expectFirstVisibleRow(page, 'Test Item 200'); + }); +}); diff --git a/demo/e2e/tsconfig.json b/demo/e2e/tsconfig.json new file mode 100644 index 0000000..5de5553 --- /dev/null +++ b/demo/e2e/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig-shared.json", + "compilerOptions": { + "types": ["@playwright/test"], + "module": "ESNext", + "moduleResolution": "Bundler" + }, + "include": ["./**/*.ts", "../playwright.config.ts"] +} diff --git a/demo/main.tsx b/demo/main.tsx index 4705fce..887a6ea 100644 --- a/demo/main.tsx +++ b/demo/main.tsx @@ -5,6 +5,7 @@ import './index.css'; import {schema} from './schema.ts'; const userID = import.meta.env.VITE_PUBLIC_USER_ID ?? 'anon'; +const cachePort = import.meta.env.VITE_PUBLIC_CACHE_PORT ?? '5858'; const url = new URL(window.location.href); const apiBase = `${url.origin}/api/zero`; @@ -12,7 +13,7 @@ createRoot(document.getElementById('root')!).render( diff --git a/demo/package.json b/demo/package.json index 0cda8af..043a7c5 100644 --- a/demo/package.json +++ b/demo/package.json @@ -9,7 +9,8 @@ "dev:db-up": "docker compose --env-file .env -f ./docker/docker-compose.yml up", "dev:db-down": "docker compose --env-file .env -f ./docker/docker-compose.yml down", "dev:clean": "source .env && docker volume rm -f docker_zstart_pgdata && rm -rf \"${ZERO_REPLICA_FILE}\"*", - "seed": "node --env-file=.env seed.ts" + "seed": "node --env-file=.env seed.ts", + "test:e2e": "node --env-file=.env ./node_modules/@playwright/test/cli.js test" }, "dependencies": { "@hono/node-server": "^1.19.11", @@ -20,9 +21,12 @@ }, "devDependencies": { "@faker-js/faker": "^10.4.0", + "@playwright/test": "^1.59.0", "@tanstack/react-virtual": "^3.13.23", "@types/node": "^25.5.0", "@types/pg": "^8.20.0", + "playwright": "^1.59.0", + "playwright-core": "^1.59.0", "react": "^19.2.4", "react-dom": "^19.2.4", "vite": "^8.0.3" diff --git a/demo/playwright.config.ts b/demo/playwright.config.ts new file mode 100644 index 0000000..0fbaf03 --- /dev/null +++ b/demo/playwright.config.ts @@ -0,0 +1,27 @@ +import {defineConfig} from '@playwright/test'; + +// Environment variables are loaded via `node --env-file=.env` in the +// test:e2e script, so no manual .env parsing is needed here. +export default defineConfig({ + testDir: './e2e/tests', + globalSetup: './e2e/global-setup.ts', + globalTeardown: './e2e/global-teardown.ts', + fullyParallel: true, + forbidOnly: !!process.env.CI, + retries: process.env.CI ? 2 : 0, + // Give CI more time — zero-cache cold-start and replication are slower there. + timeout: process.env.CI ? 60_000 : 30_000, + reporter: 'list', + use: { + baseURL: 'http://localhost:5173', + trace: 'on-first-retry', + }, + // Vite dev server — started after globalSetup, stopped after globalTeardown. + webServer: { + command: 'pnpm dev:ui', + url: 'http://localhost:5173', + reuseExistingServer: !process.env.CI, + stdout: 'pipe', + stderr: 'pipe', + }, +}); diff --git a/demo/tsconfig.app.json b/demo/tsconfig.app.json index 144304f..8aa7448 100644 --- a/demo/tsconfig.app.json +++ b/demo/tsconfig.app.json @@ -3,5 +3,6 @@ "compilerOptions": { "jsx": "react-jsx" }, - "include": ["./*.tsx", "./*.ts"] + "include": ["./*.tsx", "./*.ts"], + "exclude": ["./playwright.config.ts"] } diff --git a/demo/vite-env.d.ts b/demo/vite-env.d.ts index b8415a0..7fd3a4a 100644 --- a/demo/vite-env.d.ts +++ b/demo/vite-env.d.ts @@ -2,6 +2,7 @@ interface ImportMetaEnv { readonly VITE_PUBLIC_USER_ID: string; + readonly VITE_PUBLIC_CACHE_PORT: string; } interface ImportMeta { diff --git a/package.json b/package.json index a5efca6..5ea464c 100644 --- a/package.json +++ b/package.json @@ -28,7 +28,7 @@ }, "scripts": { "dev": "pnpm --filter demo dev", - "check-types": "tsgo -p src/tsconfig.json && tsgo -p demo/tsconfig.app.json && tsgo -p demo/tsconfig.node.json", + "check-types": "tsgo -p src/tsconfig.json && tsgo -p demo/tsconfig.app.json && tsgo -p demo/tsconfig.node.json && tsgo -p demo/e2e/tsconfig.json", "lint": "oxlint --type-aware", "check": "oxlint --type-aware --type-check", "format": "oxfmt", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8c7e59b..704b362 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -79,6 +79,9 @@ importers: '@faker-js/faker': specifier: ^10.4.0 version: 10.4.0 + '@playwright/test': + specifier: ^1.59.0 + version: 1.59.0 '@tanstack/react-virtual': specifier: ^3.13.23 version: 3.13.23(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -88,6 +91,12 @@ importers: '@types/pg': specifier: ^8.20.0 version: 8.20.0 + playwright: + specifier: ^1.59.0 + version: 1.59.0 + playwright-core: + specifier: ^1.59.0 + version: 1.59.0 react: specifier: ^19.2.4 version: 19.2.4 @@ -1199,6 +1208,11 @@ packages: '@pinojs/redact@0.4.0': resolution: {integrity: sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==} + '@playwright/test@1.59.0': + resolution: {integrity: sha512-TOA5sTLd49rTDaZpYpvCQ9hGefHQq/OYOyCVnGqS2mjMfX+lGZv2iddIJd0I48cfxqSPttS9S3OuLKyylHcO1w==} + engines: {node: '>=18'} + hasBin: true + '@polka/url@1.0.0-next.29': resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} @@ -2369,13 +2383,13 @@ packages: resolution: {integrity: sha512-r34yH/GlQpKZbU1BvFFqOjhISRo1MNx1tWYsYvmj6KIRHSPMT2+yHOEb1SG6NMvRoHRF0a07kCOox/9yakl1vg==} hasBin: true - playwright-core@1.58.2: - resolution: {integrity: sha512-yZkEtftgwS8CsfYo7nm0KE8jsvm6i/PTgVtB8DL726wNf6H2IMsDuxCpJj59KDaxCtSnrWan2AeDqM7JBaultg==} + playwright-core@1.59.0: + resolution: {integrity: sha512-PW/X/IoZ6BMUUy8rpwHEZ8Kc0IiLIkgKYGNFaMs5KmQhcfLILNx9yCQD0rnWeWfz1PNeqcFP1BsihQhDOBCwZw==} engines: {node: '>=18'} hasBin: true - playwright@1.58.2: - resolution: {integrity: sha512-vA30H8Nvkq/cPBnNw4Q8TWz1EJyqgpuinBcHET0YVJVFldr8JDNiU9LaWAE1KqSkRYazuaBhTpB5ZzShOezQ6A==} + playwright@1.59.0: + resolution: {integrity: sha512-wihGScriusvATUxmhfENxg0tj1vHEFeIwxlnPFKQTOQVd7aG08mUfvvniRP/PtQOC+2Bs52kBOC/Up1jTXeIbw==} engines: {node: '>=18'} hasBin: true @@ -4014,6 +4028,10 @@ snapshots: '@pinojs/redact@0.4.0': {} + '@playwright/test@1.59.0': + dependencies: + playwright: 1.59.0 + '@polka/url@1.0.0-next.29': optional: true @@ -4308,11 +4326,11 @@ snapshots: '@typescript/native-preview-win32-arm64': 7.0.0-dev.20260327.2 '@typescript/native-preview-win32-x64': 7.0.0-dev.20260327.2 - '@vitest/browser-playwright@4.1.2(playwright@1.58.2)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0))(vitest@4.1.2)': + '@vitest/browser-playwright@4.1.2(playwright@1.59.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0))(vitest@4.1.2)': dependencies: '@vitest/browser': 4.1.2(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0))(vitest@4.1.2) '@vitest/mocker': 4.1.2(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0)) - playwright: 1.58.2 + playwright: 1.59.0 tinyrainbow: 3.1.0 vitest: 4.1.2(@opentelemetry/api@1.9.0)(@types/node@25.5.0)(@vitest/browser-playwright@4.1.2)(happy-dom@20.8.9)(jsdom@29.0.1(@noble/hashes@1.8.0))(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0)) transitivePeerDependencies: @@ -5233,15 +5251,13 @@ snapshots: sonic-boom: 4.2.1 thread-stream: 4.0.0 - playwright-core@1.58.2: - optional: true + playwright-core@1.59.0: {} - playwright@1.58.2: + playwright@1.59.0: dependencies: - playwright-core: 1.58.2 + playwright-core: 1.59.0 optionalDependencies: fsevents: 2.3.2 - optional: true pngjs@7.0.0: optional: true @@ -5638,7 +5654,7 @@ snapshots: optionalDependencies: '@opentelemetry/api': 1.9.0 '@types/node': 25.5.0 - '@vitest/browser-playwright': 4.1.2(playwright@1.58.2)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0))(vitest@4.1.2) + '@vitest/browser-playwright': 4.1.2(playwright@1.59.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.3)(tsx@4.21.0))(vitest@4.1.2) happy-dom: 20.8.9 jsdom: 29.0.1(@noble/hashes@1.8.0) transitivePeerDependencies: diff --git a/src/react/use-zero-virtualizer.ts b/src/react/use-zero-virtualizer.ts index c43fe12..52850bb 100644 --- a/src/react/use-zero-virtualizer.ts +++ b/src/react/use-zero-virtualizer.ts @@ -258,6 +258,11 @@ export function useZeroVirtualizer< // Settled state: starts unsettled, flips to true after settleTime ms of // no scroll activity. Resets on scroll or listContextParams change. const [settled, setSettled] = useState(false); + // Tracks that a programmatic scroll adjustment (scrollToOffset) has been + // issued but the browser scroll event has not yet been processed by the + // virtualizer. While true, virtual items and scrollOffset are stale and + // must not be used for paging decisions. + const awaitingScrollSettleRef = useRef(false); const scrollOffsetRef = useRef(undefined); const resetSettleTimer = useCallback(() => { @@ -389,11 +394,29 @@ export function useZeroVirtualizer< offset !== scrollOffsetRef.current; scrollOffsetRef.current = offset ?? undefined; if (didScroll) { + awaitingScrollSettleRef.current = false; return resetSettleTimer(); } return undefined; }, [virtualizer.scrollOffset, resetSettleTimer]); + // Wrappers that mark a programmatic scroll as pending so paging effects + // skip stale virtual items until the browser fires the real scroll event. + const scrollToOffset = (targetOffset: number) => { + const currentOffset = virtualizer.scrollOffset ?? 0; + virtualizer.scrollToOffset(targetOffset); + if (targetOffset !== currentOffset) { + awaitingScrollSettleRef.current = true; + } + }; + + const scrollToIndex = ( + ...args: Parameters + ) => { + virtualizer.scrollToIndex(...args); + awaitingScrollSettleRef.current = true; + }; + useEffect(() => { // Make sure page size is enough to fill the scroll element at least // 3 times. Don't shrink page size. @@ -466,12 +489,12 @@ export function useZeroVirtualizer< // Apply scroll adjustments synchronously with layout to prevent visual jumps useLayoutEffect(() => { if (pendingScrollAdjustment !== 0) { - virtualizer.scrollToOffset( + const targetOffset = (virtualizer.scrollOffset ?? 0) + - pendingScrollAdjustment * - // TODO: Support dynamic item sizes - estimateSize(0), - ); + pendingScrollAdjustment * + // TODO: Support dynamic item sizes + estimateSize(0); + scrollToOffset(targetOffset); dispatch({type: 'SCROLL_ADJUSTED'}); } @@ -533,7 +556,7 @@ export function useZeroVirtualizer< if (!isListContextCurrent || scrollStateChanged) { if (effectiveScrollState) { - virtualizer.scrollToOffset(effectiveScrollState.scrollTop); + scrollToOffset(effectiveScrollState.scrollTop); dispatch({ type: 'RESET_STATE', estimatedTotal: effectiveScrollState.estimatedTotal, @@ -553,17 +576,17 @@ export function useZeroVirtualizer< : undefined; if (permalinkVirtualItem) { - virtualizer.scrollToIndex(permalinkVirtualItem.index, { + scrollToIndex(permalinkVirtualItem.index, { align: 'auto', }); } else { // TODO(arv): Figure out if we should scroll to top or bottom. - virtualizer.scrollToOffset( + const targetOffset = NUM_ROWS_FOR_LOADING_SKELETON * - // TODO: Support dynamic item sizes - estimateSize(0), - ); + // TODO: Support dynamic item sizes + estimateSize(0); + scrollToOffset(targetOffset); dispatch({ type: 'RESET_STATE', estimatedTotal: NUM_ROWS_FOR_LOADING_SKELETON, @@ -574,7 +597,7 @@ export function useZeroVirtualizer< }); } } else { - virtualizer.scrollToOffset(0); + scrollToOffset(0); dispatch({ type: 'RESET_STATE', estimatedTotal: 0, @@ -612,6 +635,15 @@ export function useZeroVirtualizer< return; } + // After a scroll adjustment (scrollToOffset), the browser fires the scroll + // event asynchronously. Until then the virtualizer's virtual items and + // scrollOffset are stale — they still reflect the *previous* scroll + // position. Acting on stale items would cause spurious anchor updates + // and cascading shifts. + if (awaitingScrollSettleRef.current) { + return; + } + if (atStart) { if (firstRowIndex !== 0) { dispatch({type: 'UPDATE_ANCHOR', anchor: TOP_ANCHOR}); diff --git a/vitest.config.ts b/vitest.config.ts index ee1edf9..625c9f6 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -3,5 +3,6 @@ import {defineConfig} from 'vitest/config'; export default defineConfig({ test: { environment: 'happy-dom', + include: ['src/**/*.test.{ts,tsx}'], }, });