From 563fe06a5b8ee630711a94d266c014698fe7a8de Mon Sep 17 00:00:00 2001 From: Kyle Mathews Date: Thu, 5 Mar 2026 16:15:11 -0700 Subject: [PATCH 01/10] feat: add Intent agent skills for TanStack DB Scaffold 12 SKILL.md files across 7 packages to guide AI coding agents. Covers core DB concepts (collections, queries, mutations, custom adapters), 5 framework bindings (React, Vue, Svelte, Solid, Angular), meta-framework integration (Start, Next, Remix, Nuxt, SvelteKit), and offline transactions. Includes 9 reference files for adapter details, operators, and transaction API. Updates package.json files arrays to publish skills with packages. Co-Authored-By: Claude Opus 4.6 --- _artifacts/skill_tree.yaml | 273 +++++++ domain_map.yaml | 707 ++++++++++++++++++ packages/angular-db/package.json | 3 +- .../angular-db/skills/angular-db/SKILL.md | 261 +++++++ packages/db/package.json | 4 +- packages/db/skills/db-core/SKILL.md | 59 ++ .../skills/db-core/collection-setup/SKILL.md | 406 ++++++++++ .../references/electric-adapter.md | 158 ++++ .../references/local-adapters.md | 188 +++++ .../references/powersync-adapter.md | 197 +++++ .../references/query-adapter.md | 167 +++++ .../references/rxdb-adapter.md | 146 ++++ .../references/schema-patterns.md | 179 +++++ .../references/trailbase-adapter.md | 147 ++++ .../db/skills/db-core/custom-adapter/SKILL.md | 278 +++++++ .../db/skills/db-core/live-queries/SKILL.md | 338 +++++++++ .../live-queries/references/operators.md | 286 +++++++ .../db-core/mutations-optimistic/SKILL.md | 356 +++++++++ .../references/transaction-api.md | 207 +++++ packages/db/skills/meta-framework/SKILL.md | 339 +++++++++ packages/offline-transactions/package.json | 3 +- .../skills/offline/SKILL.md | 350 +++++++++ packages/react-db/package.json | 3 +- packages/react-db/skills/react-db/SKILL.md | 253 +++++++ packages/solid-db/package.json | 3 +- packages/solid-db/skills/solid-db/SKILL.md | 156 ++++ packages/svelte-db/package.json | 3 +- packages/svelte-db/skills/svelte-db/SKILL.md | 163 ++++ packages/vue-db/package.json | 3 +- packages/vue-db/skills/vue-db/SKILL.md | 137 ++++ skill-tree-generator-feedback.md | 25 + skill_spec.md | 206 +++++ 32 files changed, 5997 insertions(+), 7 deletions(-) create mode 100644 _artifacts/skill_tree.yaml create mode 100644 domain_map.yaml create mode 100644 packages/angular-db/skills/angular-db/SKILL.md create mode 100644 packages/db/skills/db-core/SKILL.md create mode 100644 packages/db/skills/db-core/collection-setup/SKILL.md create mode 100644 packages/db/skills/db-core/collection-setup/references/electric-adapter.md create mode 100644 packages/db/skills/db-core/collection-setup/references/local-adapters.md create mode 100644 packages/db/skills/db-core/collection-setup/references/powersync-adapter.md create mode 100644 packages/db/skills/db-core/collection-setup/references/query-adapter.md create mode 100644 packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md create mode 100644 packages/db/skills/db-core/collection-setup/references/schema-patterns.md create mode 100644 packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md create mode 100644 packages/db/skills/db-core/custom-adapter/SKILL.md create mode 100644 packages/db/skills/db-core/live-queries/SKILL.md create mode 100644 packages/db/skills/db-core/live-queries/references/operators.md create mode 100644 packages/db/skills/db-core/mutations-optimistic/SKILL.md create mode 100644 packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md create mode 100644 packages/db/skills/meta-framework/SKILL.md create mode 100644 packages/offline-transactions/skills/offline/SKILL.md create mode 100644 packages/react-db/skills/react-db/SKILL.md create mode 100644 packages/solid-db/skills/solid-db/SKILL.md create mode 100644 packages/svelte-db/skills/svelte-db/SKILL.md create mode 100644 packages/vue-db/skills/vue-db/SKILL.md create mode 100644 skill-tree-generator-feedback.md create mode 100644 skill_spec.md diff --git a/_artifacts/skill_tree.yaml b/_artifacts/skill_tree.yaml new file mode 100644 index 000000000..ad2a85776 --- /dev/null +++ b/_artifacts/skill_tree.yaml @@ -0,0 +1,273 @@ +# _artifacts/skill_tree.yaml +library: + name: "@tanstack/db" + version: "0.5.30" + repository: "https://github.com/TanStack/db" + description: "Reactive client store with normalized collections, sub-millisecond live queries, and instant optimistic mutations" +generated_from: + domain_map: "domain_map.yaml" + skill_spec: "skill_spec.md" +generated_at: "2026-03-04" + +skills: + # ─── Core overview (entry point) ─── + - name: "db-core" + slug: "db-core" + type: "core" + domain: "collection-setup" + path: "skills/db-core/SKILL.md" + package: "packages/db" + description: > + TanStack DB core concepts: createCollection, live queries via query builder, + optimistic mutations with draft proxy, transaction lifecycle. Entry point + for all TanStack DB skills with sub-skill routing table. + sources: + - "TanStack/db:docs/overview.md" + - "TanStack/db:packages/db/src/index.ts" + + # ─── Core sub-skills ─── + - name: "Collection Setup" + slug: "db-core/collection-setup" + type: "sub-skill" + domain: "collection-setup" + path: "skills/db-core/collection-setup/SKILL.md" + package: "packages/db" + description: > + Creating typed collections with createCollection, queryCollectionOptions, + electricCollectionOptions, powerSyncCollectionOptions, rxdbCollectionOptions, + trailbaseCollectionOptions, localOnlyCollectionOptions, localStorageCollectionOptions. + CollectionConfig (getKey, schema, sync, gcTime, autoIndex, syncMode). + StandardSchema validation with Zod/Valibot/ArkType. Collection lifecycle + (idle/loading/ready/error/cleaned-up). Adapter-specific sync patterns. + requires: + - "db-core" + sources: + - "TanStack/db:docs/overview.md" + - "TanStack/db:docs/guides/schemas.md" + - "TanStack/db:docs/collections/query-collection.md" + - "TanStack/db:docs/collections/electric-collection.md" + - "TanStack/db:docs/collections/powersync-collection.md" + - "TanStack/db:docs/collections/rxdb-collection.md" + - "TanStack/db:docs/collections/trailbase-collection.md" + - "TanStack/db:docs/collections/local-only-collection.md" + - "TanStack/db:docs/collections/local-storage-collection.md" + - "TanStack/db:packages/db/src/collection/index.ts" + subsystems: + - "TanStack Query adapter" + - "ElectricSQL adapter" + - "PowerSync adapter" + - "RxDB adapter" + - "TrailBase adapter" + - "Local-only" + - "localStorage" + references: + - "references/query-adapter.md" + - "references/electric-adapter.md" + - "references/powersync-adapter.md" + - "references/rxdb-adapter.md" + - "references/trailbase-adapter.md" + - "references/local-adapters.md" + - "references/schema-patterns.md" + + - name: "Live Queries" + slug: "db-core/live-queries" + type: "sub-skill" + domain: "live-queries" + path: "skills/db-core/live-queries/SKILL.md" + package: "packages/db" + description: > + Query builder fluent API: from, where, join, leftJoin, rightJoin, innerJoin, + fullJoin, select, fn.select, groupBy, having, orderBy, limit, offset, distinct, + findOne. Operators: eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, + isUndefined, and, or, not. Aggregates: count, sum, avg, min, max. String + functions: upper, lower, length, concat, coalesce. Math: add, subtract, + multiply, divide. $selected namespace. createLiveQueryCollection for + standalone queries. Derived collections. Predicate push-down. IVM via + differential dataflow (d2ts). + requires: + - "db-core" + sources: + - "TanStack/db:docs/guides/live-queries.md" + - "TanStack/db:packages/db/src/query/builder/index.ts" + - "TanStack/db:packages/db/src/query/compiler/index.ts" + - "TanStack/db:packages/db-ivm/src/index.ts" + references: + - "references/operators.md" + + - name: "Mutations & Optimistic State" + slug: "db-core/mutations-optimistic" + type: "sub-skill" + domain: "mutations-optimistic" + path: "skills/db-core/mutations-optimistic/SKILL.md" + package: "packages/db" + description: > + collection.insert, collection.update (Immer-style draft proxy), + collection.delete. createOptimisticAction (onMutate + mutationFn). + createPacedMutations with debounceStrategy, throttleStrategy, queueStrategy. + createTransaction, getActiveTransaction, ambient transaction context. + Transaction lifecycle (pending/persisting/completed/failed). Mutation merging + (insert+update→insert, insert+delete→cancel). onInsert/onUpdate/onDelete + handlers. PendingMutation (original, modified, changes, globalKey). + Transaction.isPersisted promise. TanStack Pacer integration. + requires: + - "db-core" + sources: + - "TanStack/db:docs/guides/mutations.md" + - "TanStack/db:packages/db/src/transactions.ts" + - "TanStack/db:packages/db/src/optimistic-action.ts" + - "TanStack/db:packages/db/src/paced-mutations.ts" + - "TanStack/db:packages/db/src/collection/mutations.ts" + references: + - "references/transaction-api.md" + + - name: "Custom Adapter Authoring" + slug: "db-core/custom-adapter" + type: "sub-skill" + domain: "custom-adapter" + path: "skills/db-core/custom-adapter/SKILL.md" + package: "packages/db" + description: > + Building custom collection adapters. SyncConfig interface: sync function + receiving begin, write, commit, markReady, truncate primitives. ChangeMessage + format. loadSubset for on-demand sync. LoadSubsetOptions (where, orderBy, + limit, cursor). Expression parsing: parseWhereExpression, + parseOrderByExpression, extractSimpleComparisons, parseLoadSubsetOptions. + Collection options creator pattern. Subscription lifecycle and cleanup. + requires: + - "db-core" + - "db-core/collection-setup" + sources: + - "TanStack/db:docs/guides/collection-options-creator.md" + - "TanStack/db:packages/db/src/collection/sync.ts" + - "TanStack/db:packages/db/src/query/ir.ts" + + # ─── Framework skills (one per adapter package) ─── + - name: "React DB" + slug: "react-db" + type: "framework" + domain: "framework-integration" + path: "skills/react-db/SKILL.md" + package: "packages/react-db" + description: > + React bindings for TanStack DB. useLiveQuery hook with dependency arrays + and 8 overloads (query function, config object, pre-created collection, + disabled state). useLiveSuspenseQuery for React Suspense with Error Boundaries. + useLiveInfiniteQuery for cursor-based pagination (pageSize, fetchNextPage, + hasNextPage). usePacedMutations for debounced React state. Return shape: + data, state, collection, status, isLoading, isReady, isError. + requires: + - "db-core" + sources: + - "TanStack/db:docs/framework/react/overview.md" + - "TanStack/db:packages/react-db/src/useLiveQuery.ts" + - "TanStack/db:packages/react-db/src/useLiveInfiniteQuery.ts" + - "TanStack/db:packages/react-db/src/usePacedMutations.ts" + + - name: "Vue DB" + slug: "vue-db" + type: "framework" + domain: "framework-integration" + path: "skills/vue-db/SKILL.md" + package: "packages/vue-db" + description: > + Vue bindings for TanStack DB. useLiveQuery composable with + MaybeRefOrGetter query functions and ComputedRef return values. + Reactive deps via Vue refs and computed properties. + requires: + - "db-core" + sources: + - "TanStack/db:docs/framework/vue/overview.md" + - "TanStack/db:packages/vue-db/src/useLiveQuery.ts" + + - name: "Svelte DB" + slug: "svelte-db" + type: "framework" + domain: "framework-integration" + path: "skills/svelte-db/SKILL.md" + package: "packages/svelte-db" + description: > + Svelte 5 bindings for TanStack DB. useLiveQuery with Svelte 5 runes + ($state reactivity). Dependency arrays with getter functions for props. + requires: + - "db-core" + sources: + - "TanStack/db:docs/framework/svelte/overview.md" + - "TanStack/db:packages/svelte-db/src/useLiveQuery.svelte.ts" + + - name: "Solid DB" + slug: "solid-db" + type: "framework" + domain: "framework-integration" + path: "skills/solid-db/SKILL.md" + package: "packages/solid-db" + description: > + SolidJS bindings for TanStack DB. useLiveQuery with fine-grained + reactivity (Accessor, createSignal). Signal reads must happen inside + the query function for tracking. + requires: + - "db-core" + sources: + - "TanStack/db:docs/framework/solid/overview.md" + - "TanStack/db:packages/solid-db/src/useLiveQuery.ts" + + - name: "Angular DB" + slug: "angular-db" + type: "framework" + domain: "framework-integration" + path: "skills/angular-db/SKILL.md" + package: "packages/angular-db" + description: > + Angular bindings for TanStack DB. injectLiveQuery with Angular Signals. + Must be called in injection context. Supports reactive params via + { params: () => T, query: ({ params, q }) => QueryBuilder } pattern. + Uses inject(DestroyRef) for cleanup. + requires: + - "db-core" + sources: + - "TanStack/db:docs/framework/angular/overview.md" + - "TanStack/db:packages/angular-db/src/inject-live-query.ts" + + # ─── Composition skills ─── + - name: "Meta-Framework Integration" + slug: "meta-framework" + type: "composition" + domain: "meta-framework" + path: "skills/meta-framework/SKILL.md" + package: "packages/db" + description: > + Client-side preloading of TanStack DB collections in route loaders. + collection.preload(), stateWhenReady(), toArrayWhenReady(), onFirstReady(). + Pre-creating createLiveQueryCollection in loaders. Setting ssr: false on + routes (SSR not yet supported). TanStack Start and TanStack Router loader + patterns. Coordinating collection lifecycle with route transitions. + requires: + - "db-core" + - "db-core/collection-setup" + sources: + - "TanStack/db:docs/guides/live-queries.md" + - "TanStack/db:examples/react/projects/src/routes/_authenticated/project/$projectId.tsx" + - "TanStack/db:examples/react/projects/README.md" + + # ─── Offline skill ─── + - name: "Offline Transactions" + slug: "offline" + type: "composition" + domain: "offline" + path: "skills/offline/SKILL.md" + package: "packages/offline-transactions" + description: > + Offline-first transaction queueing with @tanstack/offline-transactions. + OfflineExecutor, startOfflineExecutor, OfflineConfig (collections, + mutationFns, storage, maxConcurrency). Storage adapters (IndexedDBAdapter, + LocalStorageAdapter). Retry policies (DefaultRetryPolicy, BackoffCalculator, + NonRetriableError). Leader election (WebLocksLeader, BroadcastChannelLeader). + Online detection (WebOnlineDetector). OutboxManager, KeyScheduler, + TransactionSerializer. React Native support via @react-native-community/netinfo. + requires: + - "db-core" + - "db-core/mutations-optimistic" + sources: + - "TanStack/db:packages/offline-transactions/src/index.ts" + - "TanStack/db:packages/offline-transactions/src/offline-executor.ts" + - "TanStack/db:packages/offline-transactions/src/outbox.ts" + - "TanStack/db:packages/offline-transactions/src/leader/" diff --git a/domain_map.yaml b/domain_map.yaml new file mode 100644 index 000000000..89ba5363b --- /dev/null +++ b/domain_map.yaml @@ -0,0 +1,707 @@ +# domain_map.yaml +# Generated by skill-domain-discovery +# Library: @tanstack/db +# Version: 0.5.30 +# Date: 2026-03-04 +# Status: reviewed + +library: + name: "@tanstack/db" + version: "0.5.30" + repository: "https://github.com/TanStack/db" + description: "Reactive client store with normalized collections, sub-millisecond live queries, and instant optimistic mutations" + primary_framework: "framework-agnostic (React, Vue, Svelte, Solid, Angular adapters)" + +domains: + - name: "Collection Setup & Schema" + slug: "collection-setup" + description: "Creating and configuring typed collections from any data source, with optional schema validation, type transformations, and adapter-specific sync patterns" + + - name: "Live Query Construction" + slug: "live-queries" + description: "Building SQL-like reactive queries across collections using the fluent query builder API with expressions, joins, aggregations, and derived collections" + + - name: "Framework Integration" + slug: "framework-integration" + description: "Binding live queries to UI framework components using framework-specific hooks, dependency tracking, Suspense, and pagination" + + - name: "Mutations & Optimistic State" + slug: "mutations-optimistic" + description: "Writing data to collections with instant optimistic feedback, managing transaction lifecycles, and handling rollback on failure" + + - name: "Meta-Framework Integration" + slug: "meta-framework" + description: "Client-side preloading of collections in route loaders for TanStack Start/Router, Next.js, Remix, Nuxt, and SvelteKit — ensuring data is ready before component render" + + - name: "Custom Adapter Authoring" + slug: "custom-adapter" + description: "Building custom collection adapters that implement the SyncConfig contract for new backends" + + - name: "Offline Transactions" + slug: "offline" + description: "Offline-first transaction queueing with persistence, retry, multi-tab coordination, and connectivity detection" + +skills: + - name: "Collection Setup" + slug: "collection-setup" + domain: "collection-setup" + description: "Creating and configuring typed collections backed by any data source" + type: "core" + covers: + - "createCollection" + - "queryCollectionOptions (@tanstack/query-db-collection)" + - "electricCollectionOptions (@tanstack/electric-db-collection)" + - "trailbaseCollectionOptions (@tanstack/trailbase-db-collection)" + - "powerSyncCollectionOptions (@tanstack/powersync-db-collection)" + - "rxdbCollectionOptions (@tanstack/rxdb-db-collection)" + - "localOnlyCollectionOptions" + - "localStorageCollectionOptions" + - "CollectionConfig interface (id, getKey, schema, sync, compare, autoIndex, startSync, gcTime, utils)" + - "StandardSchema integration (Zod, Valibot, ArkType, Effect)" + - "Schema validation (TInput vs TOutput, transformations, defaults)" + - "Collection lifecycle (idle -> loading -> initialCommit -> ready -> error -> cleaned-up)" + - "Collection status tracking (isReady, isLoading, isError, isCleanedUp)" + - "localOnly -> real backend upgrade path" + - "Adapter-specific sync patterns (Electric txid, Query direct writes, PowerSync persistence)" + - "Sync modes (eager, on-demand, progressive)" + tasks: + - "Create a collection backed by TanStack Query for REST API data" + - "Create a collection synced with ElectricSQL for real-time Postgres data" + - "Create a local-only collection for temporary UI state" + - "Add schema validation with type transformations (e.g. string -> Date)" + - "Configure collection with custom getKey, gcTime, and autoIndex" + - "Create a localStorage collection for cross-tab persistent state" + - "Configure PowerSync collection with custom serializer" + - "Prototype with localOnlyCollectionOptions then swap to a real backend" + - "Configure on-demand sync mode for large datasets" + - "Track Electric txid to prevent optimistic state flash" + - "Use direct writes to update query collection without refetch" + subsystems: + - name: "TanStack Query adapter" + package: "@tanstack/query-db-collection" + config_surface: "queryKey, queryFn, queryClient, select, enabled, refetchInterval, staleTime, syncMode" + - name: "ElectricSQL adapter" + package: "@tanstack/electric-db-collection" + config_surface: "shapeOptions, syncMode (eager/on-demand/progressive), txid tracking via awaitTxId/awaitMatch" + - name: "PowerSync adapter" + package: "@tanstack/powersync-db-collection" + config_surface: "database, table, conversions, batchSize, syncMode" + - name: "RxDB adapter" + package: "@tanstack/rxdb-db-collection" + config_surface: "rxCollection, syncBatchSize; keys always strings" + - name: "TrailBase adapter" + package: "@tanstack/trailbase-db-collection" + config_surface: "recordApi, conversions" + - name: "Local-only" + package: "@tanstack/db" + config_surface: "getKey, schema, initialData" + - name: "localStorage" + package: "@tanstack/db" + config_surface: "storageKey, getKey, schema" + failure_modes: + - mistake: "queryFn returning empty array deletes all collection data" + mechanism: "queryCollectionOptions treats queryFn result as complete server state; returning [] means 'server has no items', causing all existing items to be deleted from the collection" + wrong_pattern: | + queryFn: async () => { + const res = await fetch('/api/todos?status=active') + return res.json() // returns [] when no active todos + } + correct_pattern: | + queryFn: async () => { + const res = await fetch('/api/todos') // fetch ALL todos + return res.json() + } + // Or use on-demand sync mode for filtered queries + source: "docs/collections/query-collection.md - Full State Sync section" + priority: "CRITICAL" + skills: ["collection-setup"] + + - mistake: "Not knowing which collection type to use for a given backend" + mechanism: "AI agents default to bare createCollection or localOnlyCollectionOptions when they should use queryCollectionOptions, electricCollectionOptions, etc.; each adapter handles sync, handlers, and utilities differently" + source: "maintainer interview" + priority: "CRITICAL" + skills: ["collection-setup"] + + - mistake: "Using async schema validation" + mechanism: "Schema validation must be synchronous; returning a Promise from a schema throws SchemaMustBeSynchronousError, but the error only surfaces at mutation time, not at collection creation" + source: "packages/db/src/collection/mutations.ts:101" + priority: "HIGH" + skills: ["collection-setup"] + + - mistake: "getKey returning undefined for some items" + mechanism: "If getKey returns undefined for any item, throws UndefinedKeyError; common when accessing a nested property that doesn't exist on all items" + source: "packages/db/src/collection/mutations.ts:148" + priority: "HIGH" + skills: ["collection-setup"] + + - mistake: "TInput not a superset of TOutput with schema transforms" + mechanism: "When schema transforms types (e.g. string -> Date), the input type for mutations must accept the pre-transform type; mismatches cause type errors that are confusing because they reference internal schema types" + source: "docs/guides/schemas.md - TInput must be superset of TOutput" + priority: "HIGH" + skills: ["collection-setup"] + + - mistake: "Providing both explicit type parameter and schema" + mechanism: "When a schema is provided, the collection infers types from it; also passing an explicit generic type parameter creates conflicting type constraints" + source: "docs/overview.md - schema type inference note" + priority: "MEDIUM" + skills: ["collection-setup"] + + - mistake: "React Native missing crypto.randomUUID polyfill" + mechanism: "TanStack DB uses crypto.randomUUID() internally for IDs; React Native doesn't provide this, causing runtime crash; must install react-native-random-uuid" + source: "docs/overview.md - React Native section" + priority: "HIGH" + skills: ["collection-setup"] + + - mistake: "Electric txid queried outside mutation transaction" + mechanism: "If pg_current_xact_id() is queried in a separate transaction from the actual mutation, the txid won't match the mutation's transaction, causing awaitTxId to stall forever; must query txid INSIDE the same SQL transaction as the mutation" + source: "docs/collections/electric-collection.md - Debugging txid section" + priority: "CRITICAL" + skills: ["collection-setup"] + + - mistake: "queryFn returning partial data without merging" + mechanism: "queryFn result is treated as complete state; returning only new/changed items without merging with existing data causes all non-returned items to be deleted from the collection" + source: "docs/collections/query-collection.md - Handling Partial/Incremental Fetches" + priority: "CRITICAL" + skills: ["collection-setup"] + + - mistake: "Direct writes overridden by next query sync" + mechanism: "Direct writes (writeInsert, etc.) update the collection immediately, but the next queryFn execution returns the complete server state which overwrites the direct writes; must coordinate staleTime and refetch behavior" + source: "docs/collections/query-collection.md - Direct Writes and Query Sync" + priority: "MEDIUM" + skills: ["collection-setup"] + + - name: "Live Queries" + slug: "live-queries" + domain: "live-queries" + description: "Building SQL-like reactive queries across collections" + type: "core" + covers: + - "Query builder fluent API (.from, .where, .join, .select, .groupBy, .having, .orderBy, .limit, .offset, .distinct, .findOne)" + - "Comparison operators (eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isUndefined)" + - "Logical operators (and, or, not)" + - "Aggregate functions (count, sum, avg, min, max)" + - "String functions (upper, lower, length, concat, coalesce)" + - "Math functions (add, subtract, multiply, divide)" + - "Join types (inner, left, right, full)" + - "Derived collections (query results are themselves collections)" + - "createLiveQueryCollection (standalone queries outside components)" + - "QueryIR (intermediate representation)" + - "compileQuery (query compilation)" + - "$selected namespace for accessing SELECT fields in ORDER BY / HAVING" + - "Predicate push-down (loadSubsetOptions for on-demand sync)" + - "Incremental view maintenance via d2ts (differential dataflow)" + tasks: + - "Filter collection items with complex WHERE conditions" + - "Join two collections on a foreign key" + - "Aggregate data with GROUP BY and HAVING" + - "Sort and paginate query results" + - "Create a derived collection from a query for reuse across components" + - "Build a query with computed/projected fields" + - "Use subqueries for complex data access patterns" + - "Move JS array filtering/transformation logic into live queries for better performance" + reference_candidates: + - topic: "Query operators" + reason: ">20 distinct operators (comparison, logical, aggregate, string, math) with signatures" + failure_modes: + - mistake: "Using === instead of eq() in where clauses" + mechanism: "JavaScript === in a where callback returns a boolean, not an expression object; the query silently evaluates to always-false or always-true instead of building the correct filter predicate. Throws InvalidWhereExpressionError." + wrong_pattern: | + q.from({ users }).where(({ users }) => users.active === true) + correct_pattern: | + q.from({ users }).where(({ users }) => eq(users.active, true)) + source: "packages/db/src/query/builder/index.ts:375" + priority: "CRITICAL" + skills: ["live-queries"] + + - mistake: "Filtering/transforming data in JS instead of using live query operators" + mechanism: "AI agents write .filter()/.map()/.reduce() on the data array instead of using the query builder's where/select/groupBy; this throws away incremental maintenance -- the JS code re-runs from scratch on every change, while the query only recomputes the delta" + wrong_pattern: | + const { data } = useLiveQuery(q => q.from({ todos })) + const active = data.filter(t => t.completed === false) + correct_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.completed, false)) + ) + source: "maintainer interview" + priority: "CRITICAL" + skills: ["live-queries"] + + - mistake: "Not using the full set of available query operators" + mechanism: "The library has a comprehensive operator set (string functions, math, aggregates, coalesce, etc.) but agents default to basic eq/gt/lt and do the rest in JS; every operator is incrementally maintained and should be preferred over JS equivalents" + source: "maintainer interview" + priority: "HIGH" + skills: ["live-queries"] + + - mistake: "Using .distinct() without .select()" + mechanism: "distinct() deduplicates by the entire selected object shape; without select(), the shape is undefined, throwing DistinctRequiresSelectError" + source: "packages/db/src/query/compiler/index.ts:218" + priority: "HIGH" + skills: ["live-queries"] + + - mistake: "Using .having() without .groupBy()" + mechanism: "HAVING filters aggregated groups; without GROUP BY there are no groups to filter, throwing HavingRequiresGroupByError" + source: "packages/db/src/query/compiler/index.ts:293" + priority: "HIGH" + skills: ["live-queries"] + + - mistake: "Using .limit() or .offset() without .orderBy()" + mechanism: "Without deterministic ordering, limit/offset results are non-deterministic and cannot be incrementally maintained; throws LimitOffsetRequireOrderByError" + source: "packages/db/src/query/compiler/index.ts:356" + priority: "HIGH" + skills: ["live-queries"] + + - mistake: "Join condition using operator other than eq()" + mechanism: "The D2 differential dataflow join operator only supports equality joins; using gt(), like(), etc. throws JoinConditionMustBeEqualityError" + source: "packages/db/src/query/builder/index.ts:216" + priority: "HIGH" + skills: ["live-queries"] + + - mistake: "Passing source directly instead of as {alias: collection}" + mechanism: "from() and join() require sources wrapped as {alias: collection}; passing the collection directly throws InvalidSourceTypeError" + wrong_pattern: | + q.from(usersCollection) + correct_pattern: | + q.from({ users: usersCollection }) + source: "packages/db/src/query/builder/index.ts:79-96" + priority: "MEDIUM" + skills: ["live-queries"] + + - name: "Framework Integration" + slug: "framework-integration" + domain: "framework-integration" + description: "Binding live queries to UI framework components" + type: "framework" + covers: + - "React: useLiveQuery, useLiveSuspenseQuery, useLiveInfiniteQuery, usePacedMutations" + - "Vue: useLiveQuery composable with computed refs" + - "Svelte: useLiveQuery with Svelte 5 runes" + - "Solid: useLiveQuery with fine-grained reactivity" + - "Angular: injectLiveQuery with signals" + - "Dependency arrays for reactive query parameters" + - "React Suspense integration with Error Boundaries" + - "Infinite query pagination (cursor-based)" + - "Return shape: { data, state, collection, status, isLoading, isReady, isError }" + tasks: + - "Bind a live query to a React component with useLiveQuery" + - "Use React Suspense for loading states with useLiveSuspenseQuery" + - "Implement infinite scroll with useLiveInfiniteQuery" + - "Pass reactive parameters to queries in Vue (refs) / Angular (signals) / Solid (signals)" + - "Set up dependency arrays for dynamic query parameters" + subsystems: + - name: "React" + package: "@tanstack/react-db" + config_surface: "useLiveQuery, useLiveSuspenseQuery, useLiveInfiniteQuery, usePacedMutations" + - name: "Vue" + package: "@tanstack/vue-db" + config_surface: "useLiveQuery composable with MaybeRefOrGetter" + - name: "Svelte" + package: "@tanstack/svelte-db" + config_surface: "useLiveQuery with Svelte 5 runes ($state)" + - name: "Solid" + package: "@tanstack/solid-db" + config_surface: "useLiveQuery with Accessor/createSignal" + - name: "Angular" + package: "@tanstack/angular-db" + config_surface: "injectLiveQuery with Signal, inject(DestroyRef)" + failure_modes: + - mistake: "Missing external values in useLiveQuery dependency array" + mechanism: "When query uses external state (props, local state) not included in deps array, the query won't re-run when those values change, showing stale results" + wrong_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.userId, userId)) + ) // userId not in deps + correct_pattern: | + const { data } = useLiveQuery(q => + q.from({ todos }).where(({ todos }) => eq(todos.userId, userId)), + [userId] + ) + source: "docs/framework/react/overview.md - dependency array section" + priority: "CRITICAL" + skills: ["framework-integration"] + + - mistake: "Reading Solid signals outside the query function" + mechanism: "Solid's reactivity tracks signal reads inside the query function; reading signals before passing to useLiveQuery means changes aren't tracked and query won't re-run" + source: "docs/framework/solid/overview.md - fine-grained reactivity section" + priority: "HIGH" + skills: ["framework-integration"] + + - mistake: "Using useLiveSuspenseQuery without Error Boundary" + mechanism: "Suspense query throws errors during rendering; without an Error Boundary wrapping the component, the entire app crashes instead of showing a fallback" + source: "docs/guides/live-queries.md - React Suspense section" + priority: "HIGH" + skills: ["framework-integration"] + + - mistake: "Passing non-function deps in Svelte instead of getter functions" + mechanism: "In Svelte 5, props and derived values should be wrapped in getter functions in the dependency array to maintain reactivity; passing values directly captures them at creation time" + source: "docs/framework/svelte/overview.md - Props in dependencies" + priority: "MEDIUM" + skills: ["framework-integration"] + compositions: + - library: "meta-framework" + skill: "meta-framework" + + - name: "Mutations & Optimistic State" + slug: "mutations-optimistic" + domain: "mutations-optimistic" + description: "Writing data to collections with instant optimistic feedback" + type: "core" + covers: + - "collection.insert(), collection.update(), collection.delete()" + - "createOptimisticAction (custom mutation actions)" + - "createPacedMutations (debounced/throttled mutations)" + - "createTransaction (manual transaction control)" + - "getActiveTransaction (ambient transaction context)" + - "Transaction lifecycle (pending -> persisting -> completed | failed)" + - "Transaction stacking (concurrent transactions build on each other)" + - "Mutation merging (insert+update -> insert, insert+delete -> null, etc.)" + - "onInsert, onUpdate, onDelete handlers" + - "Optimistic vs non-optimistic updates (optimistic: false)" + - "Automatic rollback on handler error" + - "Change tracking proxy (draft updates via Immer-like API)" + - "PendingMutation type (original, modified, changes, globalKey)" + - "Transaction.isPersisted promise" + - "Temporary ID handling" + - "TanStack Pacer integration for sequential execution" + tasks: + - "Insert a new item with optimistic UI update" + - "Update an item using the draft proxy pattern" + - "Delete items with automatic rollback on server error" + - "Create a custom optimistic action for complex mutations" + - "Use paced mutations for real-time text editing" + - "Batch multiple mutations into a single transaction" + - "Handle temporary IDs that get replaced by server-generated IDs" + - "Use pacer for sequential transaction execution to avoid conflicts" + failure_modes: + - mistake: "Passing a new object to update() instead of mutating the draft" + mechanism: "collection.update(id, {...item, title: 'new'}) is wrong; the API uses an Immer-style draft proxy: collection.update(id, (draft) => { draft.title = 'new' }). Passing an object instead of a callback silently fails or throws a confusing error." + wrong_pattern: | + collection.update(id, { ...item, title: 'new' }) + correct_pattern: | + collection.update(id, (draft) => { draft.title = 'new' }) + source: "maintainer interview" + priority: "CRITICAL" + skills: ["mutations-optimistic"] + + - mistake: "Hallucinating mutation API signatures" + mechanism: "AI agents generate plausible but wrong mutation code -- inventing handler signatures, confusing createOptimisticAction with createTransaction, missing the ambient transaction pattern, or wrong PendingMutation property names (e.g. transaction.mutations[0].changes vs .data)" + source: "maintainer interview" + priority: "CRITICAL" + skills: ["mutations-optimistic"] + + - mistake: "onMutate callback returning a Promise" + mechanism: "onMutate in createOptimisticAction must be synchronous because optimistic state needs to be applied immediately in the current tick; returning a Promise throws OnMutateMustBeSynchronousError" + wrong_pattern: | + createOptimisticAction({ + onMutate: async (text) => { collection.insert({ id: await generateId(), text }) }, + mutationFn: async (text, { transaction }) => { ... } + }) + correct_pattern: | + createOptimisticAction({ + onMutate: (text) => { collection.insert({ id: crypto.randomUUID(), text }) }, + mutationFn: async (text, { transaction }) => { ... } + }) + source: "packages/db/src/optimistic-action.ts:75" + priority: "CRITICAL" + skills: ["mutations-optimistic"] + + - mistake: "Calling insert/update/delete without handler or ambient transaction" + mechanism: "Collection mutations require either an onInsert/onUpdate/onDelete handler or an ambient transaction from createTransaction; without either, throws MissingInsertHandlerError (or Update/Delete variant)" + source: "packages/db/src/collection/mutations.ts:166" + priority: "CRITICAL" + skills: ["mutations-optimistic"] + + - mistake: "Calling .mutate() after transaction is no longer pending" + mechanism: "Transactions can only accept new mutations while in 'pending' state; calling mutate() after commit() or rollback() throws TransactionNotPendingMutateError" + source: "packages/db/src/transactions.ts:289" + priority: "HIGH" + skills: ["mutations-optimistic"] + + - mistake: "Attempting to change an item's primary key via update" + mechanism: "The update proxy detects key changes and throws KeyUpdateNotAllowedError; primary keys are immutable once set" + source: "packages/db/src/collection/mutations.ts:352" + priority: "HIGH" + skills: ["mutations-optimistic"] + + - mistake: "Inserting item with duplicate key" + mechanism: "If an item with the same key already exists in the collection (synced or optimistic), throws DuplicateKeyError; common when using client-generated IDs without checking" + source: "packages/db/src/collection/mutations.ts:181" + priority: "HIGH" + skills: ["mutations-optimistic"] + + - mistake: "Not awaiting refetch after mutation in query collection handler" + mechanism: "In query collection onInsert/onUpdate/onDelete handlers, the optimistic state is only held until the handler resolves; if you don't await the refetch or sync back, the optimistic state is dropped before new server state arrives, causing a flash of missing data" + wrong_pattern: | + onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + // missing: await collection.utils.refetch() + } + correct_pattern: | + onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + await collection.utils.refetch() + } + source: "docs/overview.md - optimistic state lifecycle" + priority: "HIGH" + skills: ["mutations-optimistic"] + + - name: "Meta-Framework Integration" + slug: "meta-framework" + domain: "meta-framework" + description: "Client-side preloading of collections in route loaders for meta-frameworks" + type: "composition" + covers: + - "collection.preload() in route loaders" + - "collection.stateWhenReady() and toArrayWhenReady()" + - "collection.onFirstReady(callback)" + - "Pre-creating createLiveQueryCollection in loaders" + - "Setting ssr: false on routes using collections" + - "TanStack Start / TanStack Router loader patterns" + - "Coordinating collection lifecycle with route transitions" + - "Passing pre-loaded collections to components via loader data" + tasks: + - "Preload a collection in a TanStack Router route loader" + - "Pre-create a live query collection in a loader and pass to component" + - "Configure ssr: false on routes that use TanStack DB collections" + - "Coordinate multiple collection preloads in a single route loader" + - "Handle route transitions when collections are still loading" + failure_modes: + - mistake: "Not preloading collections in route loaders" + mechanism: "Without preload() in the loader, the collection starts syncing only when the component mounts; this causes a loading flash even though the router could have started the sync during navigation" + wrong_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + // no loader -- collection loads on mount + }) + correct_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + ssr: false, + loader: async () => { + await todosCollection.preload() + }, + }) + source: "examples/react/projects/src/routes" + priority: "HIGH" + skills: ["meta-framework"] + + - mistake: "Not setting ssr: false on routes using collections" + mechanism: "Collections are client-side only (no SSR support yet); rendering a route with collections on the server attempts to access browser-only APIs, causing crashes or hydration mismatches" + wrong_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + loader: async () => { + await todosCollection.preload() + }, + }) + correct_pattern: | + export const Route = createFileRoute('/todos')({ + component: TodoList, + ssr: false, + loader: async () => { + await todosCollection.preload() + }, + }) + source: "examples/react/projects/src/start.tsx - defaultSsr: false" + priority: "CRITICAL" + skills: ["meta-framework"] + + - mistake: "Creating new collection instances inside loaders on every navigation" + mechanism: "createLiveQueryCollection should be called once and reused; creating new instances on each navigation leaks D2 graph nodes and subscriptions" + source: "docs/guides/live-queries.md - standalone queries" + priority: "HIGH" + skills: ["meta-framework"] + compositions: + - library: "@tanstack/react-router" + skill: "framework-integration" + + - name: "Custom Adapter Authoring" + slug: "custom-adapter" + domain: "custom-adapter" + description: "Building custom collection adapters for new backends" + type: "core" + covers: + - "SyncConfig interface (sync, getSyncMetadata, rowUpdateMode)" + - "Sync primitives (begin, write, commit, markReady, truncate)" + - "ChangeMessage format (insert, update, delete)" + - "loadSubset for on-demand sync mode" + - "LoadSubsetOptions (where, orderBy, limit, cursor)" + - "Expression parsing helpers (parseWhereExpression, parseOrderByExpression, extractSimpleComparisons)" + - "Collection options creator pattern" + - "Subscription lifecycle and cleanup" + tasks: + - "Build a custom collection adapter for a new backend" + - "Implement loadSubset for on-demand predicate push-down" + - "Use expression parsing helpers to translate query predicates to API params" + - "Handle the sync lifecycle correctly (begin/write/commit/markReady)" + failure_modes: + - mistake: "Not calling markReady() in custom sync implementation" + mechanism: "markReady() transitions the collection from 'loading' to 'ready' status; forgetting to call it means live queries never resolve and useLiveSuspenseQuery hangs forever in Suspense" + wrong_pattern: | + sync: ({ begin, write, commit }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: 'insert', value: item })) + commit() + // forgot markReady()! + }) + } + correct_pattern: | + sync: ({ begin, write, commit, markReady }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: 'insert', value: item })) + commit() + markReady() + }) + } + source: "docs/guides/collection-options-creator.md - markReady section" + priority: "CRITICAL" + skills: ["custom-adapter"] + + - mistake: "Race condition between initial sync and event subscription" + mechanism: "If live change events aren't subscribed BEFORE the initial data fetch, changes that occur during the fetch are lost; the sync implementation must start listening before fetching" + wrong_pattern: | + sync: ({ begin, write, commit, markReady }) => { + // BAD: fetch first, then subscribe + const data = await fetchAll() + writeAll(data) + subscribe(onChange) // missed changes during fetch! + } + correct_pattern: | + sync: ({ begin, write, commit, markReady }) => { + // GOOD: subscribe first, then fetch + subscribe(onChange) + const data = await fetchAll() + writeAll(data) + } + source: "docs/guides/collection-options-creator.md - Race condition prevention" + priority: "HIGH" + skills: ["custom-adapter"] + + - mistake: "write() called without begin() in sync implementation" + mechanism: "Sync data must be written within a transaction (begin -> write -> commit); calling write() without begin() throws NoPendingSyncTransactionWriteError" + source: "packages/db/src/collection/sync.ts:110" + priority: "HIGH" + skills: ["custom-adapter"] + + - name: "Offline Transactions" + slug: "offline" + domain: "offline" + description: "Offline-first transaction queueing with persistence and retry" + type: "composition" + covers: + - "OfflineExecutor / startOfflineExecutor" + - "OfflineConfig (collections, mutationFns, storage, maxConcurrency)" + - "Storage adapters (IndexedDBAdapter, LocalStorageAdapter)" + - "Retry policies (DefaultRetryPolicy, BackoffCalculator, NonRetriableError)" + - "Leader election (WebLocksLeader, BroadcastChannelLeader)" + - "Online detection (WebOnlineDetector)" + - "OutboxManager (transaction queue)" + - "KeyScheduler (prevents concurrent mutations on same key)" + - "TransactionSerializer (persistence)" + - "React Native support (@react-native-community/netinfo)" + tasks: + - "Set up offline-first transactions with @tanstack/offline-transactions" + - "Configure IndexedDB storage for transaction persistence" + - "Handle multi-tab coordination with leader election" + - "Implement custom retry logic with NonRetriableError" + - "Set up offline support for React Native with NetInfo" + failure_modes: + - mistake: "Using offline transactions when not needed" + mechanism: "Offline is inherently complex; @tanstack/offline-transactions adds storage, leader election, and retry overhead. Only adopt when true offline support is required. PowerSync/RxDB handle their own local persistence, which is a different concern." + source: "maintainer interview" + priority: "HIGH" + skills: ["offline"] + + - mistake: "Not handling NonRetriableError for permanent failures" + mechanism: "By default, failed transactions retry with exponential backoff; for permanent failures (e.g. 400 Bad Request), throw NonRetriableError to skip retry and move the transaction to a dead-letter state" + source: "packages/offline-transactions/src/retry.ts" + priority: "HIGH" + skills: ["offline"] + + - mistake: "Multiple tabs executing the same queued transaction" + mechanism: "Without leader election, each tab runs its own OfflineExecutor and processes the outbox independently, causing duplicate mutations; must configure WebLocksLeader or BroadcastChannelLeader" + source: "packages/offline-transactions/src/leader/" + priority: "CRITICAL" + skills: ["offline"] + +tensions: + - name: "Simplicity vs. correctness in sync" + skills: ["collection-setup", "custom-adapter"] + description: "Getting-started simplicity (localOnlyCollectionOptions, eager sync) conflicts with production correctness (on-demand sync, proper adapter selection, race condition prevention)" + implication: "Agents use localOnly or eager mode for everything; production apps need adapter-specific patterns and on-demand sync for large datasets" + + - name: "Optimistic speed vs. data consistency" + skills: ["mutations-optimistic", "collection-setup"] + description: "Instant optimistic updates create a window where client state diverges from server state; resolving conflicts on rollback can lose user work" + implication: "Agents apply optimistic updates without considering rollback UX or awaiting refetch in mutation handlers" + + - name: "Query expressiveness vs. IVM constraints" + skills: ["live-queries", "framework-integration"] + description: "The query builder looks like SQL but has constraints (equality joins only, orderBy required for limit, no distinct without select) that SQL doesn't have" + implication: "Agents write SQL-style queries that violate IVM constraints, producing confusing errors" + + - name: "Offline complexity vs. app simplicity" + skills: ["offline", "mutations-optimistic"] + description: "Offline transaction support adds storage, leader election, and retry complexity; most apps don't need it but agents may recommend it prematurely" + implication: "Agents add @tanstack/offline-transactions to apps that only need basic optimistic mutations" + +cross_references: + - from: "framework-integration" + to: "meta-framework" + reason: "Framework hooks render data; meta-framework loaders preload it. Developers need both for production apps with routing." + + - from: "meta-framework" + to: "framework-integration" + reason: "Preloaded collections are consumed by framework hooks; understanding the hook API informs what to preload." + + - from: "collection-setup" + to: "mutations-optimistic" + reason: "Collection mutation handlers (onInsert/onUpdate/onDelete) are configured at setup time but execute during mutations; understanding both is required for working writes." + + - from: "mutations-optimistic" + to: "collection-setup" + reason: "Mutation handler signatures and behavior depend on which adapter is used (e.g. Electric txid return, Query refetch)." + + - from: "live-queries" + to: "collection-setup" + reason: "Live queries reference collections by alias; understanding collection types and sync modes affects query behavior (e.g. on-demand predicate push-down)." + + - from: "custom-adapter" + to: "collection-setup" + reason: "Custom adapters produce the same CollectionConfig shape that built-in adapters use; understanding the config contract is essential." + + - from: "offline" + to: "mutations-optimistic" + reason: "Offline transactions wrap the same transaction/mutation model; understanding createTransaction and PendingMutation is prerequisite." + +gaps: + - skill: "meta-framework" + question: "What are the specific patterns for non-TanStack-Start frameworks (Next.js App Router, Remix loaders, Nuxt middleware, SvelteKit load functions)?" + context: "Only TanStack Start/Router patterns are documented in examples; other frameworks need guidance" + status: "open" + + - skill: "collection-setup" + question: "What is the recommended pattern for collection cleanup/disposal in single-page apps with route-based code splitting?" + context: "gcTime defaults to 5 minutes, but docs don't clearly explain when/how collections are garbage collected or what triggers cleanup" + status: "open" + + - skill: "live-queries" + question: "Are there performance cliffs with live queries? At what complexity/data size do queries degrade?" + context: "Docs claim sub-millisecond for 100k items, but don't discuss limits (e.g., 5-way joins, deeply nested aggregations)" + status: "open" + + - skill: "mutations-optimistic" + question: "What is the recommended pattern for handling temporary IDs that get replaced by server-generated IDs?" + context: "The mutations guide mentions temporary IDs but the pattern for mapping client IDs to server IDs during sync isn't well documented" + status: "open" + + - skill: "meta-framework" + question: "What are the specific patterns for TanStack Router integration with collection loading/prefetching?" + context: "Maintainer reports this is a major composition pain point; agents struggle with the loading/prefetching pattern" + status: "open" + + - skill: "offline" + question: "What happens to in-flight transactions when the browser goes offline mid-persist?" + context: "The offline executor package handles queuing, but the interaction with the main transaction lifecycle isn't documented" + status: "open" diff --git a/packages/angular-db/package.json b/packages/angular-db/package.json index bb0daaa5e..e6a5ae805 100644 --- a/packages/angular-db/package.json +++ b/packages/angular-db/package.json @@ -41,7 +41,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*" diff --git a/packages/angular-db/skills/angular-db/SKILL.md b/packages/angular-db/skills/angular-db/SKILL.md new file mode 100644 index 000000000..f4b60d779 --- /dev/null +++ b/packages/angular-db/skills/angular-db/SKILL.md @@ -0,0 +1,261 @@ +--- +name: angular-db +description: > + Angular bindings for TanStack DB. injectLiveQuery inject function with + Angular signals (Signal) for all return values. Reactive params pattern + ({ params: () => T, query: ({ params, q }) => QueryBuilder }) for dynamic + queries. Must be called in injection context. Angular 17+ control flow + (@if, @for) and signal inputs supported. Import from @tanstack/angular-db + (re-exports all of @tanstack/db). +type: framework +library: db +framework: angular +library_version: "0.5.30" +requires: + - db-core +sources: + - "TanStack/db:docs/framework/angular/overview.md" + - "TanStack/db:packages/angular-db/src/index.ts" +--- + +This skill builds on db-core. Read it first for collection setup, query builder, and mutation patterns. + +# TanStack DB — Angular + +## Setup + +```typescript +import { Component } from '@angular/core' +import { injectLiveQuery } from '@tanstack/angular-db' +import { eq, not } from '@tanstack/db' + +@Component({ + selector: 'app-todo-list', + standalone: true, + template: ` + @if (query.isLoading()) { +
Loading...
+ } @else { +
    + @for (todo of query.data(); track todo.id) { +
  • {{ todo.text }}
  • + } +
+ } + ` +}) +export class TodoListComponent { + query = injectLiveQuery((q) => + q.from({ todos: todosCollection }) + .where(({ todos }) => not(todos.completed)) + .orderBy(({ todos }) => todos.created_at, "asc") + ) +} +``` + +`@tanstack/angular-db` re-exports everything from `@tanstack/db`. + +## Inject Function + +### injectLiveQuery + +Returns an object with Angular `Signal` properties — call with `()` in templates: + +```typescript +// Static query — no reactive dependencies +const query = injectLiveQuery((q) => + q.from({ todo: todoCollection }) +) +// query.data() → Array +// query.status() → CollectionStatus | 'disabled' +// query.isLoading(), query.isReady(), query.isError() +// query.state() → Map +// query.collection() → Collection | null + +// Reactive params — re-runs when params change +const query = injectLiveQuery({ + params: () => ({ minPriority: this.minPriority() }), + query: ({ params, q }) => + q.from({ todo: todoCollection }) + .where(({ todo }) => gt(todo.priority, params.minPriority)) +}) + +// Config object +const query = injectLiveQuery({ + query: (q) => q.from({ todo: todoCollection }), + gcTime: 60000, +}) + +// Pre-created collection +const query = injectLiveQuery(preloadedCollection) + +// Conditional query — return undefined/null to disable +const query = injectLiveQuery({ + params: () => ({ userId: this.userId() }), + query: ({ params, q }) => { + if (!params.userId) return undefined + return q.from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)) + } +}) +``` + +## Angular-Specific Patterns + +### Reactive params with signals + +```typescript +@Component({ + selector: 'app-filtered-todos', + standalone: true, + template: `
{{ query.data().length }} todos
` +}) +export class FilteredTodosComponent { + minPriority = signal(5) + + query = injectLiveQuery({ + params: () => ({ minPriority: this.minPriority() }), + query: ({ params, q }) => + q.from({ todos: todosCollection }) + .where(({ todos }) => gt(todos.priority, params.minPriority)) + }) +} +``` + +When `params()` return value changes, the previous collection is disposed and a new query is created. + +### Signal inputs (Angular 17+) + +```typescript +@Component({ + selector: 'app-user-todos', + standalone: true, + template: `
{{ query.data().length }} todos
` +}) +export class UserTodosComponent { + userId = input.required() + + query = injectLiveQuery({ + params: () => ({ userId: this.userId() }), + query: ({ params, q }) => + q.from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)) + }) +} +``` + +### Legacy @Input (Angular 16) + +```typescript +export class UserTodosComponent { + @Input({ required: true }) userId!: number + + query = injectLiveQuery({ + params: () => ({ userId: this.userId }), + query: ({ params, q }) => + q.from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, params.userId)) + }) +} +``` + +### Template syntax + +Angular 17+ control flow: + +```html +@if (query.isLoading()) { +
Loading...
+} @else { + @for (todo of query.data(); track todo.id) { +
  • {{ todo.text }}
  • + } +} +``` + +Angular 16 structural directives: + +```html +
    Loading...
    +
  • {{ todo.text }}
  • +``` + +## Common Mistakes + +### CRITICAL Using injectLiveQuery outside injection context + +Wrong: + +```typescript +export class TodoComponent { + ngOnInit() { + this.query = injectLiveQuery((q) => q.from({ todo: todoCollection })) + } +} +``` + +Correct: + +```typescript +export class TodoComponent { + query = injectLiveQuery((q) => q.from({ todo: todoCollection })) +} +``` + +`injectLiveQuery` calls `assertInInjectionContext` internally — it must be called during construction (field initializer or constructor), not in lifecycle hooks. + +Source: packages/angular-db/src/index.ts + +### HIGH Using query function for reactive values instead of params + +Wrong: + +```typescript +export class FilteredComponent { + status = signal('active') + + query = injectLiveQuery((q) => + q.from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.status, this.status())) + ) +} +``` + +Correct: + +```typescript +export class FilteredComponent { + status = signal('active') + + query = injectLiveQuery({ + params: () => ({ status: this.status() }), + query: ({ params, q }) => + q.from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.status, params.status)) + }) +} +``` + +The plain query function overload does not track Angular signal reads. Use the `params` pattern to make reactive values trigger query re-creation. + +Source: packages/angular-db/src/index.ts + +### MEDIUM Forgetting to call signals in templates + +Wrong: + +```html +
    {{ query.data.length }}
    +``` + +Correct: + +```html +
    {{ query.data().length }}
    +``` + +All return values are Angular signals. Without `()`, you get the signal object, not the value. + +See also: db-core/live-queries/SKILL.md — for query builder API. + +See also: db-core/mutations-optimistic/SKILL.md — for mutation patterns. diff --git a/packages/db/package.json b/packages/db/package.json index 9e43b3858..6f09ba46a 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -41,7 +41,9 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills", + "!skills/_artifacts" ], "dependencies": { "@standard-schema/spec": "^1.1.0", diff --git a/packages/db/skills/db-core/SKILL.md b/packages/db/skills/db-core/SKILL.md new file mode 100644 index 000000000..ff0555879 --- /dev/null +++ b/packages/db/skills/db-core/SKILL.md @@ -0,0 +1,59 @@ +--- +name: db-core +description: > + TanStack DB core concepts: createCollection with queryCollectionOptions, + electricCollectionOptions, powerSyncCollectionOptions, rxdbCollectionOptions, + trailbaseCollectionOptions, localOnlyCollectionOptions. Live queries via + query builder (from, where, join, select, groupBy, orderBy, limit). Optimistic + mutations with draft proxy (collection.insert, collection.update, + collection.delete). createOptimisticAction, createTransaction, + createPacedMutations. Entry point for all TanStack DB skills. +type: core +library: db +library_version: "0.5.30" +--- + +# TanStack DB — Core Concepts + +TanStack DB is a reactive client-side data store. It loads data into typed +collections from any backend (REST APIs, sync engines, local storage), provides +sub-millisecond live queries via differential dataflow, and supports instant +optimistic mutations with automatic rollback. + +In React projects, import from `@tanstack/react-db` — it re-exports everything +from `@tanstack/db` plus React hooks. + +## Sub-Skills + +| Need to... | Read | +| ----------------------------------------------- | --------------------------------------- | +| Create a collection, pick an adapter, add schema | db-core/collection-setup/SKILL.md | +| Query data with where, join, groupBy, select | db-core/live-queries/SKILL.md | +| Insert, update, delete with optimistic UI | db-core/mutations-optimistic/SKILL.md | +| Build a custom sync adapter | db-core/custom-adapter/SKILL.md | +| Preload collections in route loaders | meta-framework/SKILL.md | +| Add offline transaction queueing | offline/SKILL.md (in @tanstack/offline-transactions) | + +For framework-specific hooks: + +| Framework | Read | +| --------- | ---------------------------- | +| React | react-db/SKILL.md | +| Vue | vue-db/SKILL.md | +| Svelte | svelte-db/SKILL.md | +| Solid | solid-db/SKILL.md | +| Angular | angular-db/SKILL.md | + +## Quick Decision Tree + +- Setting up for the first time? → db-core/collection-setup +- Building queries on collection data? → db-core/live-queries +- Writing data / handling optimistic state? → db-core/mutations-optimistic +- Using React hooks? → react-db +- Preloading in route loaders (Start, Next, Remix)? → meta-framework +- Building an adapter for a new backend? → db-core/custom-adapter +- Need offline transaction persistence? → offline + +## Version + +Targets @tanstack/db v0.5.30. diff --git a/packages/db/skills/db-core/collection-setup/SKILL.md b/packages/db/skills/db-core/collection-setup/SKILL.md new file mode 100644 index 000000000..27ff71984 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/SKILL.md @@ -0,0 +1,406 @@ +--- +name: db-core/collection-setup +description: > + Creating typed collections with createCollection. Adapter selection: + queryCollectionOptions (REST/TanStack Query), electricCollectionOptions + (ElectricSQL real-time sync), powerSyncCollectionOptions (PowerSync SQLite), + rxdbCollectionOptions (RxDB), trailbaseCollectionOptions (TrailBase), + localOnlyCollectionOptions, localStorageCollectionOptions. CollectionConfig + options: getKey, schema, sync, gcTime, autoIndex, syncMode (eager/on-demand/ + progressive). StandardSchema validation with Zod/Valibot/ArkType. Collection + lifecycle (idle/loading/ready/error). Adapter-specific sync patterns including + Electric txid tracking and Query direct writes. +type: sub-skill +library: db +library_version: "0.5.30" +sources: + - "TanStack/db:docs/overview.md" + - "TanStack/db:docs/guides/schemas.md" + - "TanStack/db:docs/collections/query-collection.md" + - "TanStack/db:docs/collections/electric-collection.md" + - "TanStack/db:docs/collections/powersync-collection.md" + - "TanStack/db:docs/collections/rxdb-collection.md" + - "TanStack/db:docs/collections/trailbase-collection.md" + - "TanStack/db:packages/db/src/collection/index.ts" +--- + +This skill builds on db-core. Read it first for the overall mental model. + +# Collection Setup & Schema + +## Setup + +```ts +import { createCollection } from "@tanstack/react-db" +import { queryCollectionOptions } from "@tanstack/query-db-collection" +import { QueryClient } from "@tanstack/query-core" +import { z } from "zod" + +const queryClient = new QueryClient() + +const todoSchema = z.object({ + id: z.number(), + text: z.string(), + completed: z.boolean().default(false), + created_at: z.union([z.string(), z.date()]) + .transform(val => typeof val === "string" ? new Date(val) : val), +}) + +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: async () => { + const res = await fetch("/api/todos") + return res.json() + }, + queryClient, + getKey: (item) => item.id, + schema: todoSchema, + onInsert: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + await todoCollection.utils.refetch() + }, + onUpdate: async ({ transaction }) => { + const mut = transaction.mutations[0] + await api.todos.update(mut.key, mut.changes) + await todoCollection.utils.refetch() + }, + onDelete: async ({ transaction }) => { + await api.todos.delete(transaction.mutations[0].key) + await todoCollection.utils.refetch() + }, + }) +) +``` + +## Choosing an Adapter + +| Backend | Adapter | Package | +|---------|---------|---------| +| REST API / TanStack Query | `queryCollectionOptions` | `@tanstack/query-db-collection` | +| ElectricSQL (real-time Postgres) | `electricCollectionOptions` | `@tanstack/electric-db-collection` | +| PowerSync (SQLite offline) | `powerSyncCollectionOptions` | `@tanstack/powersync-db-collection` | +| RxDB (reactive database) | `rxdbCollectionOptions` | `@tanstack/rxdb-db-collection` | +| TrailBase (event streaming) | `trailbaseCollectionOptions` | `@tanstack/trailbase-db-collection` | +| No backend (UI state) | `localOnlyCollectionOptions` | `@tanstack/db` | +| Browser localStorage | `localStorageCollectionOptions` | `@tanstack/db` | + +Use `localOnlyCollectionOptions` for prototyping — the collection API is uniform, so swapping to a real backend later only changes the options creator. + +## Sync Modes + +```ts +queryCollectionOptions({ + syncMode: "eager", // default — loads all data upfront + // syncMode: "on-demand", // loads only what live queries request + // syncMode: "progressive", // (Electric only) query subset first, full sync in background +}) +``` + +| Mode | Best for | Data size | +|------|----------|-----------| +| `eager` | Mostly-static datasets | <10k rows | +| `on-demand` | Search, catalogs, large tables | >50k rows | +| `progressive` | Collaborative apps needing instant first paint | Any | + +## Core Patterns + +### Local-only collection for prototyping + +```ts +import { createCollection, localOnlyCollectionOptions } from "@tanstack/react-db" + +const todoCollection = createCollection( + localOnlyCollectionOptions({ + getKey: (item) => item.id, + initialData: [ + { id: 1, text: "Learn TanStack DB", completed: false }, + ], + }) +) +``` + +### Schema with type transformations + +```ts +const schema = z.object({ + id: z.number(), + title: z.string(), + due_date: z.union([z.string(), z.date()]) + .transform(val => typeof val === "string" ? new Date(val) : val), + priority: z.number().default(0), +}) +``` + +Use `z.union([z.string(), z.date()])` for transformed fields — this ensures `TInput` is a superset of `TOutput` so that `update()` works correctly with the draft proxy. + +### ElectricSQL with txid tracking + +```ts +import { electricCollectionOptions } from "@tanstack/electric-db-collection" + +const todoCollection = createCollection( + electricCollectionOptions({ + shapeOptions: { url: "/api/electric/todos" }, + getKey: (item) => item.id, + onInsert: async ({ transaction }) => { + const res = await api.todos.create(transaction.mutations[0].modified) + return { txid: res.txid } + }, + }) +) +``` + +The returned `txid` tells the collection to hold optimistic state until Electric streams back that transaction. + +## Common Mistakes + +### CRITICAL queryFn returning empty array deletes all data + +Wrong: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const res = await fetch("/api/todos?status=active") + return res.json() // returns [] when no active todos — deletes everything + }, +}) +``` + +Correct: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const res = await fetch("/api/todos") // fetch complete state + return res.json() + }, + // Use on-demand mode + live query where() for filtering + syncMode: "on-demand", +}) +``` + +`queryFn` result is treated as complete server state. Returning `[]` means "server has no items", deleting all existing collection data. + +Source: docs/collections/query-collection.md + +### CRITICAL Not using the correct adapter for your backend + +Wrong: + +```ts +const todoCollection = createCollection( + localOnlyCollectionOptions({ + getKey: (item) => item.id, + }) +) +// Manually fetching and inserting... +``` + +Correct: + +```ts +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: async () => fetch("/api/todos").then(r => r.json()), + queryClient, + getKey: (item) => item.id, + }) +) +``` + +Each backend has a dedicated adapter that handles sync, mutation handlers, and utilities. Using `localOnlyCollectionOptions` or bare `createCollection` for a real backend bypasses all of this. + +Source: maintainer interview + +### CRITICAL Electric txid queried outside mutation transaction + +Wrong: + +```ts +// Backend handler +app.post("/api/todos", async (req, res) => { + const txid = await generateTxId(sql) // WRONG: separate transaction + await sql`INSERT INTO todos ${sql(req.body)}` + res.json({ txid }) +}) +``` + +Correct: + +```ts +app.post("/api/todos", async (req, res) => { + let txid + await sql.begin(async (tx) => { + txid = await generateTxId(tx) // CORRECT: same transaction + await tx`INSERT INTO todos ${tx(req.body)}` + }) + res.json({ txid }) +}) +``` + +`pg_current_xact_id()` must be queried inside the same SQL transaction as the mutation. Otherwise the txid doesn't match and `awaitTxId` stalls forever. + +Source: docs/collections/electric-collection.md + +### CRITICAL queryFn returning partial data without merging + +Wrong: + +```ts +queryCollectionOptions({ + queryFn: async () => { + const newItems = await fetch("/api/todos?since=" + lastSync) + return newItems.json() // only new items — everything else deleted + }, +}) +``` + +Correct: + +```ts +queryCollectionOptions({ + queryFn: async (ctx) => { + const existing = ctx.queryClient.getQueryData(["todos"]) || [] + const newItems = await fetch("/api/todos?since=" + lastSync).then(r => r.json()) + return [...existing, ...newItems] + }, +}) +``` + +`queryFn` result replaces all collection data. For incremental fetches, merge with existing data. + +Source: docs/collections/query-collection.md + +### HIGH Using async schema validation + +Wrong: + +```ts +const schema = z.object({ + email: z.string().refine(async (val) => { + const exists = await checkEmail(val) + return !exists + }), +}) +``` + +Correct: + +```ts +const schema = z.object({ + email: z.string().email(), +}) +// Do async validation in the mutation handler instead +``` + +Schema validation must be synchronous. Async validation throws `SchemaMustBeSynchronousError` at mutation time. + +Source: packages/db/src/collection/mutations.ts:101 + +### HIGH getKey returning undefined for some items + +Wrong: + +```ts +createCollection(queryCollectionOptions({ + getKey: (item) => item.metadata.id, // undefined if metadata missing +})) +``` + +Correct: + +```ts +createCollection(queryCollectionOptions({ + getKey: (item) => item.id, // always present +})) +``` + +`getKey` must return a defined value for every item. Throws `UndefinedKeyError` otherwise. + +Source: packages/db/src/collection/mutations.ts:148 + +### HIGH TInput not a superset of TOutput with schema transforms + +Wrong: + +```ts +const schema = z.object({ + created_at: z.string().transform(val => new Date(val)), +}) +// update() fails — draft.created_at is Date but schema only accepts string +``` + +Correct: + +```ts +const schema = z.object({ + created_at: z.union([z.string(), z.date()]) + .transform(val => typeof val === "string" ? new Date(val) : val), +}) +``` + +When a schema transforms types, `TInput` must accept both the pre-transform and post-transform types for `update()` to work with the draft proxy. + +Source: docs/guides/schemas.md + +### HIGH React Native missing crypto.randomUUID polyfill + +TanStack DB uses `crypto.randomUUID()` internally. React Native doesn't provide this. Install `react-native-random-uuid` and import it at your app entry point. + +Source: docs/overview.md + +### MEDIUM Providing both explicit type parameter and schema + +Wrong: + +```ts +createCollection(queryCollectionOptions({ schema: todoSchema, ... })) +``` + +Correct: + +```ts +createCollection(queryCollectionOptions({ schema: todoSchema, ... })) +``` + +When a schema is provided, the collection infers types from it. An explicit generic creates conflicting type constraints. + +Source: docs/overview.md + +### MEDIUM Direct writes overridden by next query sync + +Wrong: + +```ts +todoCollection.utils.writeInsert(newItem) +// Next queryFn execution replaces all data, losing the direct write +``` + +Correct: + +```ts +todoCollection.utils.writeInsert(newItem) +// Use staleTime to prevent immediate refetch +// Or return { refetch: false } from mutation handlers +``` + +Direct writes update the collection immediately, but the next `queryFn` returns complete server state which overwrites them. + +Source: docs/collections/query-collection.md + +## References + +- [TanStack Query adapter](references/query-adapter.md) +- [ElectricSQL adapter](references/electric-adapter.md) +- [PowerSync adapter](references/powersync-adapter.md) +- [RxDB adapter](references/rxdb-adapter.md) +- [TrailBase adapter](references/trailbase-adapter.md) +- [Local adapters (local-only, localStorage)](references/local-adapters.md) +- [Schema validation patterns](references/schema-patterns.md) + +See also: db-core/mutations-optimistic/SKILL.md — mutation handlers configured here execute during mutations. + +See also: db-core/custom-adapter/SKILL.md — for building your own adapter. diff --git a/packages/db/skills/db-core/collection-setup/references/electric-adapter.md b/packages/db/skills/db-core/collection-setup/references/electric-adapter.md new file mode 100644 index 000000000..c97385b0d --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/electric-adapter.md @@ -0,0 +1,158 @@ +# Electric Adapter Reference + +## Install + +```bash +pnpm add @tanstack/electric-db-collection @tanstack/react-db +``` + +## Required Config + +```typescript +import { createCollection } from "@tanstack/react-db" +import { electricCollectionOptions } from "@tanstack/electric-db-collection" + +const collection = createCollection( + electricCollectionOptions({ + shapeOptions: { url: "/api/todos" }, + getKey: (item) => item.id, + }) +) +``` + +- `shapeOptions` -- ElectricSQL ShapeStream config; `url` is the proxy URL to Electric +- `getKey` -- extracts unique key from each item + +## Optional Config + +| Option | Default | Description | +|---|---|---| +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator | +| `shapeOptions.params` | (none) | Additional shape params (e.g. `{ table: 'todos' }`) | +| `onInsert` | (none) | Persistence handler; should return `{ txid }` | +| `onUpdate` | (none) | Persistence handler; should return `{ txid }` | +| `onDelete` | (none) | Persistence handler; should return `{ txid }` | + +## Three Sync Strategies + +### 1. Txid Return (Recommended) + +Handler returns `{ txid }`. Client waits for that txid in the Electric stream. + +```typescript +onInsert: async ({ transaction }) => { + const response = await api.todos.create(transaction.mutations[0].modified) + return { txid: response.txid } +}, +``` + +### 2. awaitMatch (Custom Match) + +Use when txids are unavailable. Import `isChangeMessage` to match on message content. + +```typescript +import { isChangeMessage } from "@tanstack/electric-db-collection" + +onInsert: async ({ transaction, collection }) => { + const newItem = transaction.mutations[0].modified + await api.todos.create(newItem) + await collection.utils.awaitMatch( + (message) => + isChangeMessage(message) && + message.headers.operation === "insert" && + message.value.text === newItem.text, + 5000 // timeout ms, defaults to 3000 + ) +}, +``` + +### 3. Simple Timeout (Prototyping) + +```typescript +onInsert: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + await new Promise((resolve) => setTimeout(resolve, 2000)) +}, +``` + +## Utility Methods (`collection.utils`) + +- `awaitTxId(txid, timeout?)` -- wait for txid in Electric stream; default timeout 30s +- `awaitMatch(matchFn, timeout?)` -- wait for message matching predicate; default timeout 3000ms + +### Helper Exports + +```typescript +import { isChangeMessage, isControlMessage } from "@tanstack/electric-db-collection" +// isChangeMessage(msg) -- true for insert/update/delete +// isControlMessage(msg) -- true for up-to-date/must-refetch +``` + +## generateTxId Backend Pattern + +The txid **must** be queried inside the same Postgres transaction as the mutation. + +```typescript +async function generateTxId(tx: any): Promise { + const result = await tx`SELECT pg_current_xact_id()::xid::text as txid` + const txid = result[0]?.txid + if (txid === undefined) throw new Error("Failed to get transaction ID") + return parseInt(txid, 10) +} + +async function createTodo(data) { + let txid!: number + const result = await sql.begin(async (tx) => { + txid = await generateTxId(tx) // INSIDE the transaction + const [todo] = await tx`INSERT INTO todos ${tx(data)} RETURNING *` + return todo + }) + return { todo: result, txid } +} +``` + +Querying txid outside the transaction produces a mismatched txid -- `awaitTxId` stalls indefinitely. + +## Debug Logging + +```javascript +localStorage.debug = "ts/db:electric" +``` + +## Complete Example + +```typescript +import { createCollection } from "@tanstack/react-db" +import { electricCollectionOptions } from "@tanstack/electric-db-collection" +import { z } from "zod" + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1), + completed: z.boolean(), + created_at: z.string(), +}) + +const todosCollection = createCollection( + electricCollectionOptions({ + id: "todos", + schema: todoSchema, + getKey: (item) => item.id, + shapeOptions: { url: "/api/todos", params: { table: "todos" } }, + onInsert: async ({ transaction }) => { + const response = await api.todos.create(transaction.mutations[0].modified) + return { txid: response.txid } + }, + onUpdate: async ({ transaction }) => { + const { original, changes } = transaction.mutations[0] + const response = await api.todos.update({ where: { id: original.id }, data: changes }) + return { txid: response.txid } + }, + onDelete: async ({ transaction }) => { + const response = await api.todos.delete(transaction.mutations[0].key) + return { txid: response.txid } + }, + }) +) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/local-adapters.md b/packages/db/skills/db-core/collection-setup/references/local-adapters.md new file mode 100644 index 000000000..182eb45c3 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/local-adapters.md @@ -0,0 +1,188 @@ +# Local Adapters Reference + +Both adapters are included in the core package. + +## Install + +```bash +pnpm add @tanstack/react-db +``` + +--- + +## localOnlyCollectionOptions + +In-memory only. No persistence. No cross-tab sync. + +### Required Config + +```typescript +import { createCollection, localOnlyCollectionOptions } from "@tanstack/react-db" + +const collection = createCollection( + localOnlyCollectionOptions({ + id: "ui-state", + getKey: (item) => item.id, + }) +) +``` + +- `id` -- unique collection identifier +- `getKey` -- extracts unique key from each item + +### Optional Config + +| Option | Default | Description | +|---|---|---| +| `schema` | (none) | StandardSchema validator | +| `initialData` | (none) | Array of items to populate on creation | +| `onInsert` | (none) | Handler before confirming inserts | +| `onUpdate` | (none) | Handler before confirming updates | +| `onDelete` | (none) | Handler before confirming deletes | + +### Direct Mutations + +```typescript +collection.insert({ id: "theme", mode: "dark" }) +collection.update("theme", (draft) => { draft.mode = "light" }) +collection.delete("theme") +``` + +### initialData + +```typescript +localOnlyCollectionOptions({ + id: "ui-state", + getKey: (item) => item.id, + initialData: [ + { id: "sidebar", isOpen: false }, + { id: "theme", mode: "light" }, + ], +}) +``` + +### acceptMutations in Manual Transactions + +When using `createTransaction`, call `collection.utils.acceptMutations(transaction)` in `mutationFn`: + +```typescript +import { createTransaction } from "@tanstack/react-db" + +const tx = createTransaction({ + mutationFn: async ({ transaction }) => { + // Handle server mutations first, then: + localData.utils.acceptMutations(transaction) + }, +}) +tx.mutate(() => { localData.insert({ id: "draft-1", data: "..." }) }) +await tx.commit() +``` + +--- + +## localStorageCollectionOptions + +Persists to `localStorage`. Cross-tab sync via storage events. Survives reloads. + +### Required Config + +```typescript +import { createCollection, localStorageCollectionOptions } from "@tanstack/react-db" + +const collection = createCollection( + localStorageCollectionOptions({ + id: "user-preferences", + storageKey: "app-user-prefs", + getKey: (item) => item.id, + }) +) +``` + +- `id` -- unique collection identifier +- `storageKey` -- localStorage key for all collection data +- `getKey` -- extracts unique key from each item + +### Optional Config + +| Option | Default | Description | +|---|---|---| +| `schema` | (none) | StandardSchema validator | +| `storage` | `localStorage` | Custom storage (`sessionStorage` or any localStorage-compatible API) | +| `storageEventApi` | `window` | Event API for cross-tab sync | +| `onInsert` | (none) | Handler on insert | +| `onUpdate` | (none) | Handler on update | +| `onDelete` | (none) | Handler on delete | + +### Using sessionStorage + +```typescript +localStorageCollectionOptions({ + id: "session-data", + storageKey: "session-key", + storage: sessionStorage, + getKey: (item) => item.id, +}) +``` + +### Custom Storage Backend + +Provide any object with `getItem`, `setItem`, `removeItem`: + +```typescript +const encryptedStorage = { + getItem: (key) => { const v = localStorage.getItem(key); return v ? decrypt(v) : null }, + setItem: (key, value) => localStorage.setItem(key, encrypt(value)), + removeItem: (key) => localStorage.removeItem(key), +} +localStorageCollectionOptions({ id: "secure", storageKey: "enc-key", storage: encryptedStorage, getKey: (i) => i.id }) +``` + +### acceptMutations + +Same as LocalOnly -- call `collection.utils.acceptMutations(transaction)` in manual transactions. + +--- + +## Comparison + +| Feature | LocalOnly | LocalStorage | +|---|---|---| +| Persistence | None (in-memory) | localStorage | +| Cross-tab sync | No | Yes | +| Survives reload | No | Yes | +| Performance | Fastest | Fast | +| Size limits | Memory | ~5-10MB | + +## Complete Example + +```typescript +import { createCollection, localOnlyCollectionOptions, localStorageCollectionOptions } from "@tanstack/react-db" +import { z } from "zod" + +// In-memory UI state +const modalState = createCollection( + localOnlyCollectionOptions({ + id: "modal-state", + getKey: (item) => item.id, + initialData: [{ id: "confirm-delete", isOpen: false }, { id: "settings", isOpen: false }], + }) +) + +// Persistent user prefs +const userPrefs = createCollection( + localStorageCollectionOptions({ + id: "user-preferences", + storageKey: "app-user-prefs", + getKey: (item) => item.id, + schema: z.object({ + id: z.string(), + theme: z.enum(["light", "dark", "auto"]), + language: z.string(), + notifications: z.boolean(), + }), + }) +) + +modalState.update("settings", (draft) => { draft.isOpen = true }) +userPrefs.insert({ id: "current-user", theme: "dark", language: "en", notifications: true }) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md b/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md new file mode 100644 index 000000000..56a0e32f6 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/powersync-adapter.md @@ -0,0 +1,197 @@ +# PowerSync Adapter Reference + +## Install + +```bash +pnpm add @tanstack/powersync-db-collection @powersync/web @journeyapps/wa-sqlite +``` + +## Required Config + +```typescript +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" +import { Schema, Table, column, PowerSyncDatabase } from "@powersync/web" + +const APP_SCHEMA = new Schema({ + documents: new Table({ + name: column.text, + author: column.text, + created_at: column.text, + archived: column.integer, + }), +}) + +const db = new PowerSyncDatabase({ + database: { dbFilename: "app.sqlite" }, + schema: APP_SCHEMA, +}) + +const documentsCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.documents, + }) +) +``` + +- `database` -- `PowerSyncDatabase` instance +- `table` -- PowerSync `Table` from schema (provides `getKey` and type inference) + +## Optional Config (with defaults) + +| Option | Default | Description | +|---|---|---| +| `schema` | (none) | StandardSchema for mutation validation | +| `deserializationSchema` | (none) | Transforms SQLite types to output types; required when input types differ from SQLite | +| `onDeserializationError` | (none) | Fatal error handler; **required** when using `schema` or `deserializationSchema` | +| `serializer` | (none) | Per-field functions to serialize output types back to SQLite | +| `syncBatchSize` | `1000` | Batch size for initial sync | + +### SQLite Type Mapping + +| PowerSync Column | TypeScript Type | +|---|---| +| `column.text` | `string \| null` | +| `column.integer` | `number \| null` | +| `column.real` | `number \| null` | + +All columns nullable by default. `id: string` is always included automatically. + +## Conversions (4 patterns) + +### 1. Type Inference Only (no schema) + +```typescript +const collection = createCollection( + powerSyncCollectionOptions({ database: db, table: APP_SCHEMA.props.documents }) +) +// Input/Output: { id: string, name: string | null, created_at: string | null, ... } +``` + +### 2. Schema Validation (same SQLite types) + +```typescript +const schema = z.object({ + id: z.string(), + name: z.string().min(3), + author: z.string(), + created_at: z.string(), + archived: z.number(), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: (error) => { /* fatal */ }, + }) +) +``` + +### 3. Transform SQLite to Rich Output Types + +```typescript +const schema = z.object({ + id: z.string(), + name: z.string().nullable(), + created_at: z.string().nullable().transform((val) => val ? new Date(val) : null), + archived: z.number().nullable().transform((val) => val != null ? val > 0 : null), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, table: APP_SCHEMA.props.documents, schema, + onDeserializationError: (error) => { /* fatal */ }, + serializer: { created_at: (value) => value ? value.toISOString() : null }, + }) +) +// Input: { created_at: string | null, ... } +// Output: { created_at: Date | null, archived: boolean | null, ... } +``` + +### 4. Custom Input + Output with deserializationSchema + +```typescript +const schema = z.object({ + id: z.string(), name: z.string(), created_at: z.date(), archived: z.boolean(), +}) +const deserializationSchema = z.object({ + id: z.string(), name: z.string(), + created_at: z.string().transform((val) => new Date(val)), + archived: z.number().transform((val) => val > 0), +}) +const collection = createCollection( + powerSyncCollectionOptions({ + database: db, table: APP_SCHEMA.props.documents, + schema, deserializationSchema, + onDeserializationError: (error) => { /* fatal */ }, + }) +) +// Input: { created_at: Date, archived: boolean } +// Output: { created_at: Date, archived: boolean } +``` + +## Metadata Tracking + +Enable on the table, then pass metadata with operations: + +```typescript +const APP_SCHEMA = new Schema({ + documents: new Table({ name: column.text }, { trackMetadata: true }), +}) + +await collection.insert( + { id: crypto.randomUUID(), name: "Report" }, + { metadata: { source: "web-app", userId: "user-123" } } +).isPersisted.promise +``` + +Metadata appears as `entry.metadata` (stringified JSON) in PowerSync `CrudEntry`. + +## Advanced Transactions + +```typescript +import { createTransaction } from "@tanstack/react-db" +import { PowerSyncTransactor } from "@tanstack/powersync-db-collection" + +const tx = createTransaction({ + autoCommit: false, + mutationFn: async ({ transaction }) => { + await new PowerSyncTransactor({ database: db }).applyTransaction(transaction) + }, +}) +tx.mutate(() => { + documentsCollection.insert({ id: crypto.randomUUID(), name: "Doc 1", created_at: new Date().toISOString() }) +}) +await tx.commit() +await tx.isPersisted.promise +``` + +## Complete Example + +```typescript +import { Schema, Table, column, PowerSyncDatabase } from "@powersync/web" +import { createCollection } from "@tanstack/react-db" +import { powerSyncCollectionOptions } from "@tanstack/powersync-db-collection" +import { z } from "zod" + +const APP_SCHEMA = new Schema({ + tasks: new Table({ title: column.text, due_date: column.text, completed: column.integer }), +}) +const db = new PowerSyncDatabase({ database: { dbFilename: "app.sqlite" }, schema: APP_SCHEMA }) + +const taskSchema = z.object({ + id: z.string(), + title: z.string().nullable(), + due_date: z.string().nullable().transform((val) => val ? new Date(val) : null), + completed: z.number().nullable().transform((val) => val != null ? val > 0 : null), +}) + +const tasksCollection = createCollection( + powerSyncCollectionOptions({ + database: db, + table: APP_SCHEMA.props.tasks, + schema: taskSchema, + onDeserializationError: (error) => console.error("Fatal:", error), + syncBatchSize: 500, + }) +) +``` diff --git a/packages/db/skills/db-core/collection-setup/references/query-adapter.md b/packages/db/skills/db-core/collection-setup/references/query-adapter.md new file mode 100644 index 000000000..dd9c57560 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/query-adapter.md @@ -0,0 +1,167 @@ +# Query Adapter Reference + +## Install + +```bash +pnpm add @tanstack/query-db-collection @tanstack/query-core @tanstack/db +``` + +## Required Config + +```typescript +import { QueryClient } from "@tanstack/query-core" +import { createCollection } from "@tanstack/db" +import { queryCollectionOptions } from "@tanstack/query-db-collection" + +const queryClient = new QueryClient() +const collection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: async () => fetch("/api/todos").then((r) => r.json()), + queryClient, + getKey: (item) => item.id, + }) +) +``` + +- `queryKey` -- TanStack Query cache key +- `queryFn` -- fetches data; must be provided (throws `QueryFnRequiredError` if missing) +- `queryClient` -- `QueryClient` instance +- `getKey` -- extracts unique key from each item + +## Optional Config (with defaults) + +| Option | Default | Description | +|---|---|---| +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator | +| `select` | (none) | Extracts array items when wrapped with metadata | +| `enabled` | `true` | Whether query runs automatically | +| `refetchInterval` | `0` | Polling interval in ms; 0 = disabled | +| `retry` | (TQ default) | Retry config for failed queries | +| `retryDelay` | (TQ default) | Delay between retries | +| `staleTime` | (TQ default) | How long data is considered fresh | +| `meta` | (none) | Metadata passed to queryFn context | +| `startSync` | `true` | Start syncing immediately | +| `syncMode` | (none) | Set `"on-demand"` for predicate push-down | + +### Persistence Handlers + +```typescript +onInsert: async ({ transaction }) => { + await api.createTodos(transaction.mutations.map((m) => m.modified)) + // return nothing or { refetch: true } to trigger refetch + // return { refetch: false } to skip refetch +}, +onUpdate: async ({ transaction }) => { + await api.updateTodos(transaction.mutations.map((m) => ({ id: m.key, changes: m.changes }))) +}, +onDelete: async ({ transaction }) => { + await api.deleteTodos(transaction.mutations.map((m) => m.key)) +}, +``` + +## Utility Methods (`collection.utils`) + +- `refetch(opts?)` -- manual refetch; `opts.throwOnError` (default `false`); bypasses `enabled: false` +- `writeInsert(data)` -- insert directly to synced store (bypasses optimistic system) +- `writeUpdate(data)` -- update directly in synced store +- `writeDelete(keys)` -- delete directly from synced store +- `writeUpsert(data)` -- insert or update directly +- `writeBatch(callback)` -- multiple write ops atomically + +Direct writes bypass optimistic updates, do NOT trigger refetches, and update TQ cache immediately. + +```typescript +collection.utils.writeBatch(() => { + collection.utils.writeInsert({ id: "1", text: "Buy milk" }) + collection.utils.writeUpdate({ id: "2", completed: true }) + collection.utils.writeDelete("3") +}) +``` + +## Predicate Push-Down (syncMode: "on-demand") + +Query predicates (where, orderBy, limit, offset) passed to `queryFn` via `ctx.meta.loadSubsetOptions`. + +```typescript +import { parseLoadSubsetOptions } from "@tanstack/query-db-collection" + +queryFn: async (ctx) => { + const { filters, sorts, limit, offset } = parseLoadSubsetOptions(ctx.meta?.loadSubsetOptions) + // filters: [{ field: ['category'], operator: 'eq', value: 'electronics' }] + // sorts: [{ field: ['price'], direction: 'asc', nulls: 'last' }] +} +``` + +### Expression Helpers (from `@tanstack/db`) + +- `parseLoadSubsetOptions(opts)` -- returns `{ filters, sorts, limit, offset }` +- `parseWhereExpression(expr, { handlers })` -- custom handlers per operator +- `parseOrderByExpression(expr)` -- returns `[{ field, direction, nulls }]` +- `extractSimpleComparisons(expr)` -- flat AND-ed comparisons only + +Supported operators: `eq`, `gt`, `gte`, `lt`, `lte`, `and`, `or`, `in` + +## Dynamic queryKey + +```typescript +queryKey: (opts) => { + const parsed = parseLoadSubsetOptions(opts) + const key = ["products"] + parsed.filters.forEach((f) => key.push(`${f.field.join(".")}-${f.operator}-${f.value}`)) + if (parsed.limit) key.push(`limit-${parsed.limit}`) + return key +}, +``` + +## Complete Example + +```typescript +import { QueryClient } from "@tanstack/query-core" +import { createCollection } from "@tanstack/react-db" +import { queryCollectionOptions, parseLoadSubsetOptions } from "@tanstack/query-db-collection" + +const queryClient = new QueryClient() + +const productsCollection = createCollection( + queryCollectionOptions({ + id: "products", + queryKey: ["products"], + queryClient, + getKey: (item) => item.id, + syncMode: "on-demand", + queryFn: async (ctx) => { + const { filters, sorts, limit } = parseLoadSubsetOptions(ctx.meta?.loadSubsetOptions) + const params = new URLSearchParams() + filters.forEach(({ field, operator, value }) => { + params.set(`${field.join(".")}_${operator}`, String(value)) + }) + if (sorts.length > 0) { + params.set("sort", sorts.map((s) => `${s.field.join(".")}:${s.direction}`).join(",")) + } + if (limit) params.set("limit", String(limit)) + return fetch(`/api/products?${params}`).then((r) => r.json()) + }, + onInsert: async ({ transaction }) => { + const serverItems = await api.createProducts(transaction.mutations.map((m) => m.modified)) + productsCollection.utils.writeBatch(() => { + serverItems.forEach((item) => productsCollection.utils.writeInsert(item)) + }) + return { refetch: false } + }, + onUpdate: async ({ transaction }) => { + await api.updateProducts(transaction.mutations.map((m) => ({ id: m.key, changes: m.changes }))) + }, + onDelete: async ({ transaction }) => { + await api.deleteProducts(transaction.mutations.map((m) => m.key)) + }, + }) +) +``` + +## Key Behaviors + +- `queryFn` result is treated as **complete state** -- missing items are deleted +- Empty array from `queryFn` deletes all items +- Direct writes update TQ cache but are overridden by subsequent `queryFn` results diff --git a/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md b/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md new file mode 100644 index 000000000..b1fa295e4 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/rxdb-adapter.md @@ -0,0 +1,146 @@ +# RxDB Adapter Reference + +## Install + +```bash +pnpm add @tanstack/rxdb-db-collection rxdb @tanstack/react-db +``` + +## Required Config + +```typescript +import { createCollection } from "@tanstack/react-db" +import { rxdbCollectionOptions } from "@tanstack/rxdb-db-collection" + +const todosCollection = createCollection( + rxdbCollectionOptions({ + rxCollection: db.todos, + }) +) +``` + +- `rxCollection` -- the underlying RxDB `RxCollection` instance + +## Optional Config (with defaults) + +| Option | Default | Description | +|---|---|---| +| `id` | (none) | Unique collection identifier | +| `schema` | (none) | StandardSchema validator (RxDB has its own validation; this adds TanStack DB-side validation) | +| `startSync` | `true` | Start ingesting RxDB data immediately | +| `syncBatchSize` | `1000` | Max documents per batch during initial sync from RxDB; only affects initial load, not live updates | +| `onInsert` | (default: `bulkUpsert`) | Override default insert persistence | +| `onUpdate` | (default: `patch`) | Override default update persistence | +| `onDelete` | (default: `bulkRemove`) | Override default delete persistence | + +## Key Behavior: String Keys + +RxDB primary keys are always strings. The `getKey` function is derived from the RxDB schema's `primaryKey` field automatically. All key values will be strings. + +## RxDB Setup (prerequisite) + +```typescript +import { createRxDatabase } from "rxdb/plugins/core" +import { getRxStorageLocalstorage } from "rxdb/plugins/storage-localstorage" + +const db = await createRxDatabase({ + name: "my-app", + storage: getRxStorageLocalstorage(), +}) + +await db.addCollections({ + todos: { + schema: { + title: "todos", + version: 0, + type: "object", + primaryKey: "id", + properties: { + id: { type: "string", maxLength: 100 }, + text: { type: "string" }, + completed: { type: "boolean" }, + }, + required: ["id", "text", "completed"], + }, + }, +}) +``` + +## Backend Sync (optional, RxDB-managed) + +Replication is configured directly on the RxDB collection, independent of TanStack DB. Changes from replication flow into the TanStack DB collection via RxDB's change stream automatically. + +```typescript +import { replicateRxCollection } from "rxdb/plugins/replication" + +const replicationState = replicateRxCollection({ + collection: db.todos, + pull: { handler: myPullHandler }, + push: { handler: myPushHandler }, +}) +``` + +## Data Flow + +- Writes via `todosCollection.insert/update/delete` persist to RxDB +- Direct RxDB writes (or replication changes) flow into the TanStack collection via change streams +- Initial sync loads data in batches of `syncBatchSize` +- Ongoing updates stream one by one via RxDB's change feed + +## Indexes + +RxDB schema indexes do not affect TanStack DB query performance (queries run in-memory). Indexes may still matter if you query RxDB directly, use filtered replication, or selectively load subsets. + +## Complete Example + +```typescript +import { createRxDatabase } from "rxdb/plugins/core" +import { getRxStorageLocalstorage } from "rxdb/plugins/storage-localstorage" +import { createCollection } from "@tanstack/react-db" +import { rxdbCollectionOptions } from "@tanstack/rxdb-db-collection" +import { z } from "zod" + +type Todo = { id: string; text: string; completed: boolean } + +const db = await createRxDatabase({ + name: "my-todos", + storage: getRxStorageLocalstorage(), +}) + +await db.addCollections({ + todos: { + schema: { + title: "todos", + version: 0, + type: "object", + primaryKey: "id", + properties: { + id: { type: "string", maxLength: 100 }, + text: { type: "string" }, + completed: { type: "boolean" }, + }, + required: ["id", "text", "completed"], + }, + }, +}) + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1), + completed: z.boolean(), +}) + +const todosCollection = createCollection( + rxdbCollectionOptions({ + rxCollection: db.todos, + schema: todoSchema, + startSync: true, + syncBatchSize: 500, + }) +) + +// Usage +todosCollection.insert({ id: crypto.randomUUID(), text: "Buy milk", completed: false }) +todosCollection.update("some-id", (draft) => { draft.completed = true }) +todosCollection.delete("some-id") +``` diff --git a/packages/db/skills/db-core/collection-setup/references/schema-patterns.md b/packages/db/skills/db-core/collection-setup/references/schema-patterns.md new file mode 100644 index 000000000..3fe979afe --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/schema-patterns.md @@ -0,0 +1,179 @@ +# Schema Patterns Reference + +## StandardSchema Integration + +TanStack DB accepts any [StandardSchema](https://standardschema.dev)-compatible library via the `schema` option. + +### Supported Libraries + +- [Zod](https://zod.dev), [Valibot](https://valibot.dev), [ArkType](https://arktype.io), [Effect Schema](https://effect.website/docs/schema/introduction/) + +## TInput vs TOutput + +- **TInput** -- type accepted by `insert()` and `update()` +- **TOutput** -- type stored in collection and returned from queries + +When no transforms exist, TInput === TOutput. + +```typescript +const schema = z.object({ + id: z.string(), + created_at: z.string().transform((val) => new Date(val)), +}) +// TInput: { id: string, created_at: string } +// TOutput: { id: string, created_at: Date } +``` + +## Union Pattern for Transforms (Required) + +When a schema transforms A to B, TInput **must** accept both A and B. During `update()`, the draft contains TOutput data. + +```typescript +// WRONG -- update() fails because draft.created_at is Date but schema expects string +z.string().transform((val) => new Date(val)) + +// CORRECT +z.union([z.string(), z.date()]) + .transform((val) => (typeof val === "string" ? new Date(val) : val)) +// TInput: string | Date, TOutput: Date +``` + +## Defaults + +```typescript +const schema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean().default(false), + priority: z.number().default(0), + tags: z.array(z.string()).default([]), + created_at: z.date().default(() => new Date()), +}) +// insert({ id: "1", text: "Task" }) -- missing fields auto-filled +``` + +## Computed Fields + +```typescript +const schema = z.object({ + id: z.string(), + first_name: z.string(), + last_name: z.string(), +}).transform((data) => ({ + ...data, + full_name: `${data.first_name} ${data.last_name}`, +})) +``` + +## Combining Defaults with Transforms + +```typescript +const schema = z.object({ + created_at: z.string() + .default(() => new Date().toISOString()) + .transform((val) => new Date(val)), +}) +``` + +## Validation Examples + +```typescript +// Basic constraints +z.string().min(3).max(100) +z.string().email() +z.number().int().positive() +z.enum(["active", "inactive"]) +z.array(z.string()).min(1) + +// Optional/nullable +z.string().optional() // can be omitted +z.string().nullable() // can be null + +// Cross-field +z.object({ start: z.string(), end: z.string() }) + .refine((d) => new Date(d.end) > new Date(d.start), "End must be after start") + +// Custom +z.string().refine((v) => /^[a-zA-Z0-9_]+$/.test(v), "Alphanumeric only") +``` + +## SchemaValidationError + +```typescript +import { SchemaValidationError } from "@tanstack/db" + +try { + collection.insert({ id: "1", email: "bad", age: -5 }) +} catch (error) { + if (error instanceof SchemaValidationError) { + error.type // "insert" or "update" + error.message // "Validation failed with 2 issues" + error.issues // [{ path: ["email"], message: "Invalid email" }, ...] + } +} +``` + +## Scope + +- Schemas validate **client mutations only** (`insert()`, `update()`) +- Server/sync data is NOT validated by the schema +- Validation is synchronous, runs on every mutation + +## Where TOutput Appears + +- Data stored in collection and returned from queries +- `PendingMutation.modified` +- Mutation handler `transaction.mutations[].modified` + +## Performance + +Keep transforms simple -- validation runs synchronously on every mutation. + +## Complete Example + +```typescript +import { z } from "zod" +import { createCollection, SchemaValidationError } from "@tanstack/react-db" +import { queryCollectionOptions } from "@tanstack/query-db-collection" + +const todoSchema = z.object({ + id: z.string(), + text: z.string().min(1, "Text is required"), + completed: z.boolean().default(false), + priority: z.enum(["low", "medium", "high"]).default("medium"), + created_at: z + .union([z.string(), z.date()]) + .transform((val) => (typeof val === "string" ? new Date(val) : val)) + .default(() => new Date()), +}) + +const todosCollection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: async () => fetch("/api/todos").then((r) => r.json()), + queryClient, + getKey: (item) => item.id, + schema: todoSchema, + onInsert: async ({ transaction }) => { + const todo = transaction.mutations[0].modified + await api.todos.create({ ...todo, created_at: todo.created_at.toISOString() }) + }, + }) +) + +// Defaults and transforms applied +todosCollection.insert({ id: "1", text: "Buy groceries" }) +// => { id: "1", text: "Buy groceries", completed: false, priority: "medium", created_at: Date } + +// Update works -- draft contains TOutput, schema accepts via union +todosCollection.update("1", (draft) => { draft.completed = true }) + +// Error handling +try { + todosCollection.insert({ id: "2", text: "" }) +} catch (e) { + if (e instanceof SchemaValidationError) { + console.log(e.issues) // [{ path: ["text"], message: "Text is required" }] + } +} +``` diff --git a/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md b/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md new file mode 100644 index 000000000..0d2322369 --- /dev/null +++ b/packages/db/skills/db-core/collection-setup/references/trailbase-adapter.md @@ -0,0 +1,147 @@ +# TrailBase Adapter Reference + +## Install + +```bash +pnpm add @tanstack/trailbase-db-collection @tanstack/react-db trailbase +``` + +## Required Config + +```typescript +import { createCollection } from "@tanstack/react-db" +import { trailBaseCollectionOptions } from "@tanstack/trailbase-db-collection" +import { initClient } from "trailbase" + +const trailBaseClient = initClient("https://your-trailbase-instance.com") + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: "todos", + recordApi: trailBaseClient.records("todos"), + getKey: (item) => item.id, + }) +) +``` + +- `id` -- unique collection identifier +- `recordApi` -- TrailBase Record API instance from `trailBaseClient.records(tableName)` +- `getKey` -- extracts unique key from each item + +## Optional Config + +| Option | Default | Description | +|---|---|---| +| `schema` | (none) | StandardSchema validator | +| `parse` | (none) | Object mapping field names to functions that transform data coming FROM TrailBase | +| `serialize` | (none) | Object mapping field names to functions that transform data going TO TrailBase | +| `onInsert` | (none) | Handler called on insert | +| `onUpdate` | (none) | Handler called on update | +| `onDelete` | (none) | Handler called on delete | + +## Conversions (parse/serialize) + +TrailBase uses different data formats (e.g. Unix timestamps). Use `parse` and `serialize` for field-level transformations. + +```typescript +type SelectTodo = { + id: string + text: string + created_at: number // Unix timestamp from TrailBase + completed: boolean +} + +type Todo = { + id: string + text: string + created_at: Date // Rich JS type for app usage + completed: boolean +} + +const collection = createCollection( + trailBaseCollectionOptions({ + id: "todos", + recordApi: trailBaseClient.records("todos"), + getKey: (item) => item.id, + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + }) +) +``` + +## Real-time Subscriptions + +Automatic when `enable_subscriptions` is enabled on the TrailBase server. No additional client config needed -- the collection subscribes to changes automatically. + +## Persistence Handlers + +```typescript +onInsert: async ({ transaction }) => { + const newItem = transaction.mutations[0].modified +}, +onUpdate: async ({ transaction }) => { + const { original, modified } = transaction.mutations[0] +}, +onDelete: async ({ transaction }) => { + const deletedItem = transaction.mutations[0].original +}, +``` + +TrailBase handles persistence through the Record API automatically. Custom handlers are for additional logic only. + +## Complete Example + +```typescript +import { createCollection } from "@tanstack/react-db" +import { trailBaseCollectionOptions } from "@tanstack/trailbase-db-collection" +import { initClient } from "trailbase" +import { z } from "zod" + +const trailBaseClient = initClient("https://your-trailbase-instance.com") + +const todoSchema = z.object({ + id: z.string(), + text: z.string(), + completed: z.boolean(), + created_at: z.date(), +}) + +type SelectTodo = { + id: string + text: string + completed: boolean + created_at: number +} + +type Todo = z.infer + +const todosCollection = createCollection( + trailBaseCollectionOptions({ + id: "todos", + recordApi: trailBaseClient.records("todos"), + getKey: (item) => item.id, + schema: todoSchema, + parse: { + created_at: (ts) => new Date(ts * 1000), + }, + serialize: { + created_at: (date) => Math.floor(date.valueOf() / 1000), + }, + onInsert: async ({ transaction }) => { + console.log("Created:", transaction.mutations[0].modified) + }, + }) +) + +// Usage +todosCollection.insert({ + id: crypto.randomUUID(), + text: "Review PR", + completed: false, + created_at: new Date(), +}) +``` diff --git a/packages/db/skills/db-core/custom-adapter/SKILL.md b/packages/db/skills/db-core/custom-adapter/SKILL.md new file mode 100644 index 000000000..4596df9fb --- /dev/null +++ b/packages/db/skills/db-core/custom-adapter/SKILL.md @@ -0,0 +1,278 @@ +--- +name: db-core/custom-adapter +description: > + Building custom collection adapters for new backends. SyncConfig interface: + sync function receiving begin, write, commit, markReady, truncate primitives. + ChangeMessage format (insert, update, delete). loadSubset for on-demand sync. + LoadSubsetOptions (where, orderBy, limit, cursor). Expression parsing: + parseWhereExpression, parseOrderByExpression, extractSimpleComparisons, + parseLoadSubsetOptions. Collection options creator pattern. rowUpdateMode + (partial vs full). Subscription lifecycle and cleanup functions. +type: sub-skill +library: db +library_version: "0.5.30" +sources: + - "TanStack/db:docs/guides/collection-options-creator.md" + - "TanStack/db:packages/db/src/collection/sync.ts" +--- + +This skill builds on db-core and db-core/collection-setup. Read those first. + +# Custom Adapter Authoring + +## Setup + +```ts +import { createCollection } from "@tanstack/db" +import type { SyncConfig, CollectionConfig } from "@tanstack/db" + +interface MyItem { + id: string + name: string +} + +function myBackendCollectionOptions(config: { + endpoint: string + getKey: (item: T) => string +}): CollectionConfig { + return { + getKey: config.getKey, + sync: { + sync: ({ begin, write, commit, markReady, collection }) => { + let isInitialSyncComplete = false + const bufferedEvents: Array = [] + + // 1. Subscribe to real-time events FIRST + const unsubscribe = myWebSocket.subscribe(config.endpoint, (event) => { + if (!isInitialSyncComplete) { + bufferedEvents.push(event) + return + } + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() + }) + + // 2. Fetch initial data + fetch(config.endpoint).then(async (res) => { + const items = await res.json() + begin() + for (const item of items) { + write({ type: "insert", value: item }) + } + commit() + + // 3. Process buffered events + isInitialSyncComplete = true + for (const event of bufferedEvents) { + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() + } + + // 4. Signal readiness + markReady() + }) + + // 5. Return cleanup function + return () => { + unsubscribe() + } + }, + rowUpdateMode: "partial", + }, + onInsert: async ({ transaction }) => { + await fetch(config.endpoint, { + method: "POST", + body: JSON.stringify(transaction.mutations[0].modified), + }) + }, + onUpdate: async ({ transaction }) => { + const mut = transaction.mutations[0] + await fetch(`${config.endpoint}/${mut.key}`, { + method: "PATCH", + body: JSON.stringify(mut.changes), + }) + }, + onDelete: async ({ transaction }) => { + await fetch(`${config.endpoint}/${transaction.mutations[0].key}`, { + method: "DELETE", + }) + }, + } +} +``` + +## Core Patterns + +### ChangeMessage format + +```ts +// Insert +write({ type: "insert", value: item }) + +// Update (partial — only changed fields) +write({ type: "update", key: itemId, value: partialItem }) + +// Update (full row replacement) +write({ type: "update", key: itemId, value: fullItem }) +// Set rowUpdateMode: "full" in sync config + +// Delete +write({ type: "delete", key: itemId, value: item }) +``` + +### On-demand sync with loadSubset + +```ts +import { parseLoadSubsetOptions } from "@tanstack/db" + +sync: { + sync: ({ begin, write, commit, markReady }) => { + // Initial sync... + markReady() + return () => {} + }, + loadSubset: async (options) => { + const { filters, sorts, limit, offset } = parseLoadSubsetOptions(options) + // filters: [{ field: ['category'], operator: 'eq', value: 'electronics' }] + // sorts: [{ field: ['price'], direction: 'asc', nulls: 'last' }] + const params = new URLSearchParams() + for (const f of filters) { + params.set(f.field.join("."), `${f.operator}:${f.value}`) + } + const res = await fetch(`/api/items?${params}`) + return res.json() + }, +} +``` + +### Managing optimistic state duration + +Mutation handlers must not resolve until server changes have synced back to the collection. Five strategies: + +1. **Refetch** (simplest): `await collection.utils.refetch()` +2. **Transaction ID**: return `{ txid }` and track via sync stream +3. **ID-based tracking**: await specific record ID appearing in sync stream +4. **Version/timestamp**: wait until sync stream catches up to mutation time +5. **Provider method**: `await backend.waitForPendingWrites()` + +### Expression parsing for predicate push-down + +```ts +import { + parseWhereExpression, + parseOrderByExpression, + extractSimpleComparisons, +} from "@tanstack/db" + +// In loadSubset or queryFn: +const comparisons = extractSimpleComparisons(options.where) +// Returns: [{ field: ['name'], operator: 'eq', value: 'John' }] + +const orderBy = parseOrderByExpression(options.orderBy) +// Returns: [{ field: ['created_at'], direction: 'desc', nulls: 'last' }] +``` + +## Common Mistakes + +### CRITICAL Not calling markReady() in sync implementation + +Wrong: + +```ts +sync: ({ begin, write, commit }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: "insert", value: item })) + commit() + // forgot markReady()! + }) +} +``` + +Correct: + +```ts +sync: ({ begin, write, commit, markReady }) => { + fetchData().then(items => { + begin() + items.forEach(item => write({ type: "insert", value: item })) + commit() + markReady() + }) +} +``` + +`markReady()` transitions the collection to "ready" status. Without it, live queries never resolve and `useLiveSuspenseQuery` hangs forever in Suspense. + +Source: docs/guides/collection-options-creator.md + +### HIGH Race condition: subscribing after initial fetch + +Wrong: + +```ts +sync: ({ begin, write, commit, markReady }) => { + fetchAll().then(data => { + writeAll(data) + subscribe(onChange) // changes during fetch are LOST + markReady() + }) +} +``` + +Correct: + +```ts +sync: ({ begin, write, commit, markReady }) => { + const buffer = [] + subscribe((event) => { + if (!ready) { buffer.push(event); return } + begin(); write(event); commit() + }) + fetchAll().then(data => { + writeAll(data) + ready = true + buffer.forEach(e => { begin(); write(e); commit() }) + markReady() + }) +} +``` + +Subscribe to real-time events before fetching initial data. Buffer events during the fetch, then replay them after the initial sync completes. + +Source: docs/guides/collection-options-creator.md + +### HIGH write() called without begin() + +Wrong: + +```ts +onMessage((event) => { + write({ type: event.type, key: event.id, value: event.data }) + commit() +}) +``` + +Correct: + +```ts +onMessage((event) => { + begin() + write({ type: event.type, key: event.id, value: event.data }) + commit() +}) +``` + +Sync data must be written within a transaction (`begin` → `write` → `commit`). Calling `write()` without `begin()` throws `NoPendingSyncTransactionWriteError`. + +Source: packages/db/src/collection/sync.ts:110 + +### HIGH Tension: simplicity vs. correctness in sync + +This domain's patterns conflict with collection-setup. Getting-started simplicity (localOnly, eager mode) conflicts with production correctness (on-demand sync, race condition prevention, proper markReady handling). Agents optimizing for quick setup tend to skip buffering, markReady, and cleanup functions. + +See also: db-core/collection-setup/SKILL.md § Common Mistakes + +See also: db-core/collection-setup/SKILL.md — for built-in adapter patterns to model after. diff --git a/packages/db/skills/db-core/live-queries/SKILL.md b/packages/db/skills/db-core/live-queries/SKILL.md new file mode 100644 index 000000000..506f02c61 --- /dev/null +++ b/packages/db/skills/db-core/live-queries/SKILL.md @@ -0,0 +1,338 @@ +--- +name: db-core/live-queries +description: > + Query builder fluent API: from, where, join, leftJoin, rightJoin, innerJoin, + fullJoin, select, fn.select, groupBy, having, orderBy, limit, offset, distinct, + findOne. Operators: eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, + isUndefined, and, or, not. Aggregates: count, sum, avg, min, max. String + functions: upper, lower, length, concat, coalesce. Math: add. $selected + namespace. createLiveQueryCollection. Derived collections. Predicate push-down. + Incremental view maintenance via differential dataflow (d2ts). +type: sub-skill +library: db +library_version: "0.5.30" +sources: + - "TanStack/db:docs/guides/live-queries.md" + - "TanStack/db:packages/db/src/query/builder/index.ts" + - "TanStack/db:packages/db/src/query/compiler/index.ts" +--- + +# Live Queries + +> This skill builds on db-core. + +TanStack DB live queries use a SQL-like fluent query builder to create **reactive derived collections** that automatically update when underlying data changes. The query engine compiles queries into incremental view maintenance (IVM) pipelines using differential dataflow (d2ts), so only deltas are recomputed. + +All operators, string functions, math functions, and aggregates are incrementally maintained. Prefer them over equivalent JS code. + +## Setup + +Minimal example using the core API (no framework hooks): + +```ts +import { + createCollection, + createLiveQueryCollection, + liveQueryCollectionOptions, + eq, +} from '@tanstack/db' + +// Assume usersCollection is already created via createCollection(...) + +// Option 1: createLiveQueryCollection shorthand +const activeUsers = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + email: user.email, + })) +) + +// Option 2: full options via liveQueryCollectionOptions +const activeUsers2 = createCollection( + liveQueryCollectionOptions({ + query: (q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .select(({ user }) => ({ + id: user.id, + name: user.name, + })), + getKey: (user) => user.id, + }) +) + +// The result is a live collection -- iterate, subscribe, or use as source +for (const user of activeUsers) { + console.log(user.name) +} +``` + +## Core Patterns + +### 1. Filtering with where + operators + +Chain `.where()` calls (ANDed together) using expression operators. Use `and()`, `or()`, `not()` for complex logic. + +```ts +import { eq, gt, or, and, not, inArray, like } from '@tanstack/db' + +const results = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) + .where(({ user }) => + and( + gt(user.age, 18), + or(eq(user.role, 'admin'), eq(user.role, 'moderator')), + not(inArray(user.id, bannedIds)) + ) + ) +) +``` + +Boolean column references work directly: + +```ts +.where(({ user }) => user.active) // bare boolean ref +.where(({ user }) => not(user.suspended)) // negated boolean ref +``` + +### 2. Joining two collections + +Join conditions **must** use `eq()` (equality only -- IVM constraint). Default join type is `left`. Convenience methods: `leftJoin`, `rightJoin`, `innerJoin`, `fullJoin`. + +```ts +import { eq } from '@tanstack/db' + +const userPosts = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .innerJoin({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + userName: user.name, + postTitle: post.title, + })) +) +``` + +Multiple joins: + +```ts +q.from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .join({ comment: commentsCollection }, ({ post, comment }) => + eq(post.id, comment.postId) + ) +``` + +### 3. Aggregation with groupBy + having + +Use `groupBy` to group rows, then aggregate in `select`. Filter groups with `having`. The `$selected` namespace lets `having` and `orderBy` reference fields defined in `select`. + +```ts +import { count, sum, gt } from '@tanstack/db' + +const topCustomers = createLiveQueryCollection((q) => + q + .from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .having(({ $selected }) => gt($selected.totalSpent, 1000)) + .orderBy(({ $selected }) => $selected.totalSpent, 'desc') + .limit(10) +) +``` + +Without `groupBy`, aggregates in `select` treat the entire collection as one group: + +```ts +const stats = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .select(({ user }) => ({ + totalUsers: count(user.id), + avgAge: avg(user.age), + })) +) +``` + +### 4. Standalone derived collection with createLiveQueryCollection + +Derived collections are themselves collections. Use one as a source for another query to cache intermediate results: + +```ts +// Base derived collection +const activeUsers = createLiveQueryCollection((q) => + q + .from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) +) + +// Second query uses the derived collection as its source +const activeUserPosts = createLiveQueryCollection((q) => + q + .from({ user: activeUsers }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) + .select(({ user, post }) => ({ + userName: user.name, + postTitle: post.title, + })) +) +``` + +Create derived collections once at module scope and reuse them. Do not recreate on every render or navigation. + +## Common Mistakes + +### CRITICAL: Using === instead of eq() + +JavaScript `===` in a where callback returns a boolean primitive, not an expression object. Throws `InvalidWhereExpressionError`. + +```ts +// WRONG +q.from({ user: usersCollection }) + .where(({ user }) => user.active === true) + +// CORRECT +q.from({ user: usersCollection }) + .where(({ user }) => eq(user.active, true)) +``` + +### CRITICAL: Filtering in JS instead of query operators + +JS `.filter()` / `.map()` on the result array throws away incremental maintenance -- the JS code re-runs from scratch on every change. + +```ts +// WRONG -- re-runs filter on every change +const { data } = useLiveQuery((q) => q.from({ todos: todosCollection })) +const active = data.filter((t) => t.completed === false) + +// CORRECT -- incrementally maintained +const { data } = useLiveQuery((q) => + q + .from({ todos: todosCollection }) + .where(({ todos }) => eq(todos.completed, false)) +) +``` + +### HIGH: Not using the full operator set + +The library provides string functions (`upper`, `lower`, `length`, `concat`), math (`add`), utility (`coalesce`), and aggregates (`count`, `sum`, `avg`, `min`, `max`). All are incrementally maintained. Prefer them over JS equivalents. + +```ts +// WRONG +.fn.select((row) => ({ + name: row.user.name.toUpperCase(), + total: row.order.price + row.order.tax, +})) + +// CORRECT +.select(({ user, order }) => ({ + name: upper(user.name), + total: add(order.price, order.tax), +})) +``` + +### HIGH: .distinct() without .select() + +`distinct()` deduplicates by the selected columns. Without `select()`, throws `DistinctRequiresSelectError`. + +```ts +// WRONG +q.from({ user: usersCollection }).distinct() + +// CORRECT +q.from({ user: usersCollection }) + .select(({ user }) => ({ country: user.country })) + .distinct() +``` + +### HIGH: .having() without .groupBy() + +`having` filters aggregated groups. Without `groupBy`, there are no groups. Throws `HavingRequiresGroupByError`. + +```ts +// WRONG +q.from({ order: ordersCollection }) + .having(({ order }) => gt(count(order.id), 5)) + +// CORRECT +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .having(({ order }) => gt(count(order.id), 5)) +``` + +### HIGH: .limit() / .offset() without .orderBy() + +Without deterministic ordering, limit/offset results are non-deterministic and cannot be incrementally maintained. Throws `LimitOffsetRequireOrderByError`. + +```ts +// WRONG +q.from({ user: usersCollection }).limit(10) + +// CORRECT +q.from({ user: usersCollection }) + .orderBy(({ user }) => user.name) + .limit(10) +``` + +### HIGH: Join condition using non-eq() operator + +The differential dataflow join operator only supports equality joins. Using `gt()`, `like()`, etc. throws `JoinConditionMustBeEqualityError`. + +```ts +// WRONG +q.from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + gt(user.id, post.userId) + ) + +// CORRECT +q.from({ user: usersCollection }) + .join({ post: postsCollection }, ({ user, post }) => + eq(user.id, post.userId) + ) +``` + +### MEDIUM: Passing source directly instead of {alias: collection} + +`from()` and `join()` require sources wrapped as `{alias: collection}`. Passing the collection directly throws `InvalidSourceTypeError`. + +```ts +// WRONG +q.from(usersCollection) + +// CORRECT +q.from({ users: usersCollection }) +``` + +## Tension: Query expressiveness vs. IVM constraints + +The query builder looks like SQL but has constraints that SQL does not: +- **Equality joins only** -- `eq()` is the only allowed join condition operator. +- **orderBy required for limit/offset** -- non-deterministic pagination cannot be incrementally maintained. +- **distinct requires select** -- deduplication needs an explicit projection. +- **fn.select() cannot be used with groupBy()** -- the compiler must statically analyze select to discover aggregate functions. + +These constraints exist because the underlying d2ts differential dataflow engine requires them for correct incremental view maintenance. + +See also: react-db/SKILL.md for React hooks (`useLiveQuery`, `useLiveSuspenseQuery`, `useLiveInfiniteQuery`). + +## References + +- [Query Operators Reference](./references/operators.md) -- full signatures and examples for all operators, functions, and aggregates. diff --git a/packages/db/skills/db-core/live-queries/references/operators.md b/packages/db/skills/db-core/live-queries/references/operators.md new file mode 100644 index 000000000..4d036b496 --- /dev/null +++ b/packages/db/skills/db-core/live-queries/references/operators.md @@ -0,0 +1,286 @@ +# Query Operators Reference + +All operators are imported from `@tanstack/db` (also re-exported by `@tanstack/react-db` and other framework packages). + +```ts +import { + // Comparison + eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isUndefined, + // Logical + and, or, not, + // Aggregate + count, sum, avg, min, max, + // String + upper, lower, length, concat, + // Math + add, + // Utility + coalesce, +} from '@tanstack/db' +``` + +--- + +## Comparison Operators + +### eq(left, right) -> BasicExpression\ + +Equality comparison. Works with any type. + +```ts +eq(user.id, 1) +eq(user.name, 'Alice') +``` + +### ne(left, right) -> BasicExpression\ + +Not-equal comparison. Inverse of `eq`. + +```ts +ne(user.role, 'banned') +``` + +### gt, gte, lt, lte (left, right) -> BasicExpression\ + +Ordering comparisons. Work with numbers, strings, dates. + +```ts +gt(user.age, 18) // greater than +gte(user.salary, 50000) // greater than or equal +lt(user.age, 65) // less than +lte(user.rating, 5) // less than or equal +gt(user.createdAt, new Date('2024-01-01')) +``` + +### like(left, right) -> BasicExpression\ + +Case-sensitive string pattern matching. Use `%` as wildcard. + +```ts +like(user.name, 'John%') // starts with John +like(user.email, '%@corp.com') // ends with @corp.com +``` + +### ilike(left, right) -> BasicExpression\ + +Case-insensitive string pattern matching. + +```ts +ilike(user.email, '%@gmail.com') +``` + +### inArray(value, array) -> BasicExpression\ + +Check if value is contained in an array. + +```ts +inArray(user.id, [1, 2, 3]) +inArray(user.role, ['admin', 'moderator']) +``` + +### isNull(value) -> BasicExpression\ + +Check if value is explicitly `null`. + +```ts +isNull(user.bio) +``` + +### isUndefined(value) -> BasicExpression\ + +Check if value is `undefined` (absent). Especially useful after left joins where unmatched rows produce `undefined`. + +```ts +isUndefined(profile) // no matching profile in left join +``` + +--- + +## Logical Operators + +### and(...conditions) -> BasicExpression\ + +Combine two or more conditions with AND logic. + +```ts +and(eq(user.active, true), gt(user.age, 18)) +and(eq(user.active, true), gt(user.age, 18), eq(user.role, 'user')) +``` + +### or(...conditions) -> BasicExpression\ + +Combine two or more conditions with OR logic. + +```ts +or(eq(user.role, 'admin'), eq(user.role, 'moderator')) +``` + +### not(condition) -> BasicExpression\ + +Negate a condition. + +```ts +not(eq(user.active, false)) +not(inArray(user.id, bannedIds)) +``` + +--- + +## Aggregate Functions + +Used inside `.select()` with `.groupBy()`, or without `groupBy` to aggregate the entire collection as one group. + +### count(value) -> Aggregate\ + +Count non-null values in a group. + +```ts +count(user.id) +``` + +### sum(value), avg(value) -> Aggregate\ + +Sum or average of numeric values. + +```ts +sum(order.amount) +avg(user.salary) +``` + +### min(value), max(value) -> Aggregate\ + +Minimum/maximum value (numbers, strings, dates). + +```ts +min(order.amount) +max(user.createdAt) +``` + +--- + +## String Functions + +### upper(value), lower(value) -> BasicExpression\ + +Convert string case. + +```ts +upper(user.name) // 'ALICE' +lower(user.email) // 'alice@example.com' +``` + +### length(value) -> BasicExpression\ + +Get string or array length. + +```ts +length(user.name) // string length +length(user.tags) // array length +``` + +### concat(...values) -> BasicExpression\ + +Concatenate any number of values into a string. + +```ts +concat(user.firstName, ' ', user.lastName) +``` + +--- + +## Math Functions + +### add(left, right) -> BasicExpression\ + +Add two numeric values. + +```ts +add(order.price, order.tax) +add(user.salary, coalesce(user.bonus, 0)) +``` + +--- + +## Utility Functions + +### coalesce(...values) -> BasicExpression\ + +Return the first non-null, non-undefined value. + +```ts +coalesce(user.displayName, user.name, 'Unknown') +coalesce(user.bonus, 0) +``` + +--- + +## $selected Namespace + +When a query has a `.select()` clause, the `$selected` namespace becomes available in `.orderBy()` and `.having()` callbacks. It provides access to the computed/aggregated fields defined in `select`. + +```ts +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .having(({ $selected }) => gt($selected.totalSpent, 1000)) + .orderBy(({ $selected }) => $selected.totalSpent, 'desc') +``` + +`$selected` is only available when `.select()` (or `.fn.select()`) has been called on the query. + +--- + +## Functional Variants (fn.select, fn.where, fn.having) + +Escape hatches for logic that cannot be expressed with declarative operators. These execute arbitrary JS on each row but **cannot be optimized** by the query compiler (no predicate push-down, no index use). + +### fn.select(callback) + +```ts +q.from({ user: usersCollection }) + .fn.select((row) => ({ + id: row.user.id, + domain: row.user.email.split('@')[1], + tier: row.user.salary > 100000 ? 'senior' : 'junior', + })) +``` + +**Limitation**: `fn.select()` cannot be used with `groupBy()`. The compiler must statically analyze select to discover aggregate functions. + +### fn.where(callback) + +```ts +q.from({ user: usersCollection }) + .fn.where((row) => + row.user.active && row.user.email.endsWith('@company.com') + ) +``` + +### fn.having(callback) + +Receives `$selected` when a `select()` clause exists. + +```ts +q.from({ order: ordersCollection }) + .groupBy(({ order }) => order.customerId) + .select(({ order }) => ({ + customerId: order.customerId, + totalSpent: sum(order.amount), + orderCount: count(order.id), + })) + .fn.having(({ $selected }) => + $selected.totalSpent > 1000 && $selected.orderCount >= 3 + ) +``` + +### When to use functional variants + +- String manipulation not covered by `upper`/`lower`/`concat`/`like` (e.g., `split`, `slice`, regex) +- Complex conditional logic (ternaries, multi-branch) +- External function calls or lookups + +Prefer declarative operators whenever possible for incremental maintenance. diff --git a/packages/db/skills/db-core/mutations-optimistic/SKILL.md b/packages/db/skills/db-core/mutations-optimistic/SKILL.md new file mode 100644 index 000000000..947a31b60 --- /dev/null +++ b/packages/db/skills/db-core/mutations-optimistic/SKILL.md @@ -0,0 +1,356 @@ +--- +name: db-core/mutations-optimistic +description: > + collection.insert, collection.update (Immer-style draft proxy), + collection.delete. createOptimisticAction (onMutate + mutationFn). + createPacedMutations with debounceStrategy, throttleStrategy, queueStrategy. + createTransaction, getActiveTransaction, ambient transaction context. + Transaction lifecycle (pending/persisting/completed/failed). Mutation merging. + onInsert/onUpdate/onDelete handlers. PendingMutation type. Transaction.isPersisted. +type: sub-skill +library: db +library_version: "0.5.30" +sources: + - "TanStack/db:docs/guides/mutations.md" + - "TanStack/db:packages/db/src/transactions.ts" + - "TanStack/db:packages/db/src/optimistic-action.ts" + - "TanStack/db:packages/db/src/paced-mutations.ts" +--- + +# Mutations & Optimistic State + +> **Depends on:** `db-core/collection-setup` -- you need a configured collection +> (with `getKey`, sync adapter, and optionally `onInsert`/`onUpdate`/`onDelete` +> handlers) before you can mutate. + +TanStack DB mutations follow a unidirectional loop: +**optimistic mutation -> handler persists to backend -> sync back -> confirmed state**. +Optimistic state is applied in the current tick and dropped when the handler resolves. + +--- + +## Setup -- Collection Write Operations + +### insert + +```ts +// Single item +todoCollection.insert({ + id: crypto.randomUUID(), + text: "Buy groceries", + completed: false, +}) + +// Multiple items +todoCollection.insert([ + { id: crypto.randomUUID(), text: "Buy groceries", completed: false }, + { id: crypto.randomUUID(), text: "Walk dog", completed: false }, +]) + +// With metadata / non-optimistic +todoCollection.insert(item, { metadata: { source: "import" } }) +todoCollection.insert(item, { optimistic: false }) +``` + +### update (Immer-style draft proxy) + +```ts +// Single item -- mutate the draft, do NOT reassign it +todoCollection.update(todo.id, (draft) => { + draft.completed = true + draft.completedAt = new Date() +}) + +// Multiple items +todoCollection.update([id1, id2], (drafts) => { + drafts.forEach((d) => { d.completed = true }) +}) + +// With metadata +todoCollection.update( + todo.id, + { metadata: { reason: "user-edit" } }, + (draft) => { draft.text = "Updated" } +) +``` + +### delete + +```ts +todoCollection.delete(todo.id) +todoCollection.delete([id1, id2]) +todoCollection.delete(todo.id, { metadata: { reason: "completed" } }) +``` + +All three return a `Transaction` object. Use `tx.isPersisted.promise` to await +persistence or catch rollback errors. + +--- + +## Core Patterns + +### 1. createOptimisticAction -- intent-based mutations + +Use when the optimistic change is a *guess* at how the server will transform +the data, or when you need to mutate multiple collections atomically. + +```ts +import { createOptimisticAction } from "@tanstack/db" + +const likePost = createOptimisticAction({ + // MUST be synchronous -- applied in the current tick + onMutate: (postId) => { + postCollection.update(postId, (draft) => { + draft.likeCount += 1 + draft.likedByMe = true + }) + }, + mutationFn: async (postId, { transaction }) => { + await api.posts.like(postId) + // IMPORTANT: wait for server state to sync back before returning + await postCollection.utils.refetch() + }, +}) + +// Returns a Transaction +const tx = likePost(postId) +await tx.isPersisted.promise +``` + +Multi-collection example: + +```ts +const createProject = createOptimisticAction<{ name: string; ownerId: string }>({ + onMutate: ({ name, ownerId }) => { + projectCollection.insert({ id: crypto.randomUUID(), name, ownerId }) + userCollection.update(ownerId, (d) => { d.projectCount += 1 }) + }, + mutationFn: async ({ name, ownerId }) => { + await api.projects.create({ name, ownerId }) + await Promise.all([ + projectCollection.utils.refetch(), + userCollection.utils.refetch(), + ]) + }, +}) +``` + +### 2. createPacedMutations -- auto-save with debounce / throttle / queue + +```ts +import { createPacedMutations, debounceStrategy } from "@tanstack/db" + +const autoSaveNote = createPacedMutations({ + onMutate: (text) => { + noteCollection.update(noteId, (draft) => { draft.body = text }) + }, + mutationFn: async ({ transaction }) => { + const mutation = transaction.mutations[0] + await api.notes.update(mutation.key, mutation.changes) + await noteCollection.utils.refetch() + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// Each call resets the debounce timer; mutations merge into one transaction +autoSaveNote("Hello") +autoSaveNote("Hello, world") // only this version persists +``` + +Other strategies: + +```ts +import { throttleStrategy, queueStrategy } from "@tanstack/db" + +// Evenly spaced (sliders, scroll) +throttleStrategy({ wait: 200, leading: true, trailing: true }) + +// Sequential FIFO -- every mutation persisted in order +queueStrategy({ wait: 0, maxSize: 100 }) +``` + +### 3. createTransaction -- manual batching + +```ts +import { createTransaction } from "@tanstack/db" + +const tx = createTransaction({ + autoCommit: false, // wait for explicit commit() + mutationFn: async ({ transaction }) => { + await api.batchUpdate(transaction.mutations) + }, +}) + +tx.mutate(() => { + todoCollection.update(id1, (d) => { d.status = "reviewed" }) + todoCollection.update(id2, (d) => { d.status = "reviewed" }) +}) + +// User reviews... then commits or rolls back +await tx.commit() +// OR: tx.rollback() +``` + +Inside `tx.mutate(() => { ... })`, the transaction is pushed onto an ambient +stack. Any `collection.insert/update/delete` call joins the ambient transaction +automatically via `getActiveTransaction()`. + +### 4. Mutation handler with refetch (QueryCollection pattern) + +```ts +const todoCollection = createCollection( + queryCollectionOptions({ + queryKey: ["todos"], + queryFn: () => api.todos.getAll(), + getKey: (t) => t.id, + onInsert: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => api.todos.create(m.modified)) + ) + // IMPORTANT: handler must not resolve until server state is synced back + // QueryCollection auto-refetches after handler completes + }, + onUpdate: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => + api.todos.update(m.original.id, m.changes) + ) + ) + }, + onDelete: async ({ transaction }) => { + await Promise.all( + transaction.mutations.map((m) => api.todos.delete(m.original.id)) + ) + }, + }) +) +``` + +For ElectricCollection, return `{ txid }` instead of refetching: + +```ts +onUpdate: async ({ transaction }) => { + const txids = await Promise.all( + transaction.mutations.map(async (m) => { + const res = await api.todos.update(m.original.id, m.changes) + return res.txid + }) + ) + return { txid: txids } +} +``` + +--- + +## Common Mistakes + +### CRITICAL: Passing an object to update() instead of a draft callback + +```ts +// WRONG -- silently fails or throws +collection.update(id, { ...item, title: "new" }) + +// CORRECT -- mutate the draft proxy +collection.update(id, (draft) => { draft.title = "new" }) +``` + +### CRITICAL: Hallucinating mutation API signatures + +The most common AI-generated errors: +- Inventing handler signatures (e.g. `onMutate` on a collection config) +- Confusing `createOptimisticAction` with `createTransaction` +- Wrong PendingMutation property names (`mutation.data` does not exist -- + use `mutation.modified`, `mutation.changes`, `mutation.original`) +- Missing the ambient transaction pattern + +Always reference the exact types in `references/transaction-api.md`. + +### CRITICAL: onMutate returning a Promise + +`onMutate` in `createOptimisticAction` **must be synchronous**. Optimistic state +is applied in the current tick. Returning a Promise throws +`OnMutateMustBeSynchronousError`. + +```ts +// WRONG +createOptimisticAction({ + onMutate: async (text) => { + collection.insert({ id: await generateId(), text }) + }, + ... +}) + +// CORRECT +createOptimisticAction({ + onMutate: (text) => { + collection.insert({ id: crypto.randomUUID(), text }) + }, + ... +}) +``` + +### CRITICAL: Mutations without handler or ambient transaction + +Collection mutations require either: +1. An `onInsert`/`onUpdate`/`onDelete` handler on the collection, OR +2. An ambient transaction from `createTransaction`/`createOptimisticAction` + +Without either, throws `MissingInsertHandlerError` (or the Update/Delete variant). + +### HIGH: Calling .mutate() after transaction is no longer pending + +Transactions only accept new mutations while in `pending` state. Calling +`mutate()` after `commit()` or `rollback()` throws +`TransactionNotPendingMutateError`. Create a new transaction instead. + +### HIGH: Changing primary key via update + +The update proxy detects key changes and throws `KeyUpdateNotAllowedError`. +Primary keys are immutable once set. If you need a different key, delete and +re-insert. + +### HIGH: Inserting item with duplicate key + +If an item with the same key already exists (synced or optimistic), throws +`DuplicateKeyError`. Always generate a unique key (e.g. `crypto.randomUUID()`) +or check before inserting. + +### HIGH: Not awaiting refetch after mutation in query collection handler + +The optimistic state is held only until the handler resolves. If the handler +returns before server state has synced back, optimistic state is dropped and +users see a flash of missing data. + +```ts +// WRONG -- optimistic state dropped before new server state arrives +onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + // missing: await collection.utils.refetch() +} + +// CORRECT +onInsert: async ({ transaction }) => { + await api.createTodo(transaction.mutations[0].modified) + await collection.utils.refetch() +} +``` + +--- + +## Tension: Optimistic Speed vs. Data Consistency + +Instant optimistic updates create a window where client state diverges from +server state. If the handler fails, the rollback removes the optimistic state -- +which can discard user work the user thought was saved. Consider: +- Showing pending/saving indicators so users know state is unconfirmed +- Using `{ optimistic: false }` for destructive operations +- Designing idempotent server endpoints so retries are safe +- Handling `tx.isPersisted.promise` rejection to surface errors to the user + +--- + +## References + +- [Transaction API Reference](references/transaction-api.md) -- createTransaction config, + Transaction object, PendingMutation type, mutation merging rules, strategy types +- [TanStack DB Mutations Guide](https://tanstack.com/db/latest/docs/guides/mutations) diff --git a/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md b/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md new file mode 100644 index 000000000..af926ccf2 --- /dev/null +++ b/packages/db/skills/db-core/mutations-optimistic/references/transaction-api.md @@ -0,0 +1,207 @@ +# Transaction API Reference + +## createTransaction + +```ts +import { createTransaction } from "@tanstack/db" + +const tx = createTransaction({ + id?: string, // defaults to crypto.randomUUID() + autoCommit?: boolean, // default true -- commit after mutate() + mutationFn: MutationFn, // (params: { transaction }) => Promise + metadata?: Record, // custom data attached to the transaction +}) +``` + +## Transaction Object + +```ts +interface Transaction { + id: string + state: "pending" | "persisting" | "completed" | "failed" + mutations: Array> + autoCommit: boolean + createdAt: Date + sequenceNumber: number + metadata: Record + error?: { message: string; error: Error } + + // Deferred promise -- resolves when mutationFn completes, rejects on failure + isPersisted: { + promise: Promise> + resolve: (value: Transaction) => void + reject: (reason?: any) => void + } + + // Execute collection operations inside the ambient transaction context + mutate(callback: () => void): Transaction + + // Commit -- calls mutationFn, transitions to persisting -> completed|failed + commit(): Promise> + + // Rollback -- transitions to failed, also rolls back conflicting transactions + rollback(config?: { isSecondaryRollback?: boolean }): Transaction +} +``` + +**Lifecycle:** `pending` -> `persisting` -> `completed` | `failed` + +- `mutate()` only allowed in `pending` state (throws `TransactionNotPendingMutateError`) +- `commit()` only allowed in `pending` state (throws `TransactionNotPendingCommitError`) +- `rollback()` allowed in `pending` or `persisting` (throws `TransactionAlreadyCompletedRollbackError` if completed) +- Failed `mutationFn` automatically triggers `rollback()` +- Rollback cascades to other pending transactions sharing the same item keys + +## PendingMutation Type + +```ts +interface PendingMutation { + mutationId: string // unique id for this mutation + original: TOperation extends "insert" ? {} : T // state before mutation + modified: T // state after mutation + changes: Partial // only the changed fields + key: any // collection-local key + globalKey: string // globally unique key (collectionId + key) + type: TOperation // "insert" | "update" | "delete" + metadata: unknown // user-provided metadata + syncMetadata: Record // adapter-specific metadata + optimistic: boolean // whether applied optimistically (default true) + createdAt: Date + updatedAt: Date + collection: Collection // reference to the source collection +} +``` + +## Mutation Merging Rules + +When multiple mutations target the same item (same `globalKey`) within a +transaction, they merge: + +| Existing | Incoming | Result | Notes | +|----------|----------|--------|-------| +| insert | update | insert | Merge changes, keep empty original | +| insert | delete | *removed* | Both mutations cancel out | +| update | update | update | Union changes, keep first original | +| update | delete | delete | Delete dominates | +| delete | delete | delete | Replace with latest | +| insert | insert | insert | Replace with latest | + +`(delete, update)` and `(delete, insert)` cannot occur -- the collection +prevents operations on deleted items within the same transaction. + +## getActiveTransaction / Ambient Transaction Context + +```ts +import { getActiveTransaction } from "@tanstack/db" + +const tx = getActiveTransaction() // Transaction | undefined +``` + +Inside `tx.mutate(() => { ... })`, the transaction is pushed onto an internal +stack. Any `collection.insert/update/delete` call automatically joins the +topmost ambient transaction. This is how `createOptimisticAction` and +`createPacedMutations` wire collection operations into their transactions. + +## createOptimisticAction + +```ts +import { createOptimisticAction } from "@tanstack/db" + +const action = createOptimisticAction({ + // Synchronous -- apply optimistic state immediately (MUST NOT return a Promise) + onMutate: (variables: TVariables) => void, + + // Async -- persist to backend, wait for sync back + mutationFn: (variables: TVariables, params: { transaction }) => Promise, + + // Optional: same as createTransaction config + id?: string, + autoCommit?: boolean, // always true (commit happens after mutate) + metadata?: Record, +}) + +// Returns a function: (variables: TVariables) => Transaction +const tx = action(variables) +await tx.isPersisted.promise +``` + +## createPacedMutations + +```ts +import { createPacedMutations } from "@tanstack/db" + +const mutate = createPacedMutations({ + onMutate: (variables: TVariables) => void, // synchronous optimistic update + mutationFn: MutationFn, // persists merged transaction + strategy: Strategy, // timing control + metadata?: Record, +}) + +// Returns a function: (variables: TVariables) => Transaction +const tx = mutate(variables) +``` + +Rapid calls merge into the active transaction (via `applyMutations`) until the +strategy fires the commit. A new transaction is created for subsequent calls. + +## Strategy Types + +### debounceStrategy + +```ts +import { debounceStrategy } from "@tanstack/db" + +debounceStrategy({ + wait: number, // ms to wait after last call before committing + leading?: boolean, // execute on the leading edge (default false) + trailing?: boolean, // execute on the trailing edge (default true) +}) +``` + +### throttleStrategy + +```ts +import { throttleStrategy } from "@tanstack/db" + +throttleStrategy({ + wait: number, // minimum ms between commits + leading?: boolean, // execute on the leading edge + trailing?: boolean, // execute on the trailing edge +}) +``` + +### queueStrategy + +```ts +import { queueStrategy } from "@tanstack/db" + +queueStrategy({ + wait?: number, // ms between processing items (default 0) + maxSize?: number, // drop items if queue exceeds this + addItemsTo?: "front" | "back", // default "back" (FIFO) + getItemsFrom?: "front" | "back", // default "front" (FIFO) +}) +``` + +Queue creates a **separate transaction per call** (unlike debounce/throttle +which merge). Each transaction commits and awaits `isPersisted` before the next +starts. Failed transactions do not block subsequent ones. + +## Transaction.isPersisted.promise + +```ts +const tx = collection.insert({ id: "1", text: "Hello" }) + +try { + await tx.isPersisted.promise // resolves with the Transaction on success + console.log(tx.state) // "completed" +} catch (error) { + console.log(tx.state) // "failed" + // optimistic state has been rolled back +} +``` + +The promise is a `Deferred` -- it is created at transaction construction time +and settled when `commit()` completes or `rollback()` is called. For +`autoCommit: true` transactions, the promise settles shortly after `mutate()` +returns (the commit runs asynchronously). diff --git a/packages/db/skills/meta-framework/SKILL.md b/packages/db/skills/meta-framework/SKILL.md new file mode 100644 index 000000000..38df17757 --- /dev/null +++ b/packages/db/skills/meta-framework/SKILL.md @@ -0,0 +1,339 @@ +--- +name: meta-framework +description: > + Integrating TanStack DB with meta-frameworks (TanStack Start, Next.js, + Remix, Nuxt, SvelteKit). Client-side only: SSR is NOT supported — routes + must disable SSR. Preloading collections in route loaders with + collection.preload(). Pattern: ssr: false + await collection.preload() in + loader. Multiple collection preloading with Promise.all. Framework-specific + loader APIs. +type: composition +library: db +library_version: "0.5.30" +requires: + - db-core + - db-core/collection-setup +sources: + - "TanStack/db:examples/react/todo/src/routes/electric.tsx" + - "TanStack/db:examples/react/todo/src/routes/query.tsx" + - "TanStack/db:examples/react/todo/src/start.tsx" +--- + +This skill builds on db-core. Read it first for collection setup and query builder. + +# TanStack DB — Meta-Framework Integration + +## Setup + +TanStack DB collections are **client-side only**. SSR is not implemented. Routes using TanStack DB **must disable SSR**. The setup pattern is: + +1. Set `ssr: false` on the route +2. Call `collection.preload()` in the route loader +3. Use `useLiveQuery` in the component + +## TanStack Start + +### Global SSR disable + +```ts +// start.tsx +import { createStart } from '@tanstack/react-start' + +export const startInstance = createStart(() => { + return { + defaultSsr: false, + } +}) +``` + +### Per-route SSR disable + preload + +```tsx +import { createFileRoute } from '@tanstack/react-router' +import { useLiveQuery } from '@tanstack/react-db' + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: TodoPage, +}) + +function TodoPage() { + const { data: todos } = useLiveQuery((q) => + q.from({ todo: todoCollection }) + ) + return
      {todos.map(t =>
    • {t.text}
    • )}
    +} +``` + +### Multiple collection preloading + +```tsx +export const Route = createFileRoute('/electric')({ + ssr: false, + loader: async () => { + await Promise.all([ + todoCollection.preload(), + configCollection.preload(), + ]) + return null + }, + component: ElectricPage, +}) +``` + +## Next.js (App Router) + +### Client component with preloading + +```tsx +// app/todos/page.tsx +'use client' + +import { useEffect, useState } from 'react' +import { useLiveQuery } from '@tanstack/react-db' + +export default function TodoPage() { + const { data: todos, isLoading } = useLiveQuery((q) => + q.from({ todo: todoCollection }) + ) + + if (isLoading) return
    Loading...
    + return
      {todos.map(t =>
    • {t.text}
    • )}
    +} +``` + +Next.js App Router components using TanStack DB must be client components (`'use client'`). There is no server-side preloading — collections sync on mount. + +### With route-level preloading (experimental) + +```tsx +// app/todos/page.tsx +'use client' + +import { useEffect } from 'react' +import { useLiveQuery } from '@tanstack/react-db' + +// Trigger preload immediately when module is loaded +const preloadPromise = todoCollection.preload() + +export default function TodoPage() { + const { data: todos } = useLiveQuery((q) => + q.from({ todo: todoCollection }) + ) + return
      {todos.map(t =>
    • {t.text}
    • )}
    +} +``` + +## Remix + +### Client loader pattern + +```tsx +// app/routes/todos.tsx +import { useLiveQuery } from '@tanstack/react-db' +import type { ClientLoaderFunctionArgs } from '@remix-run/react' + +export const clientLoader = async ({ request }: ClientLoaderFunctionArgs) => { + await todoCollection.preload() + return null +} + +// Prevent server loader from running +export const loader = () => null + +export default function TodoPage() { + const { data: todos } = useLiveQuery((q) => + q.from({ todo: todoCollection }) + ) + return
      {todos.map(t =>
    • {t.text}
    • )}
    +} +``` + +## Nuxt + +### Client-only component + +```vue + + + + +``` + +Wrap TanStack DB components in `` to prevent SSR. + +## SvelteKit + +### Client-side only page + +```svelte + + + +{#if todosQuery} + {#each todosQuery.data as todo (todo.id)} +
  • {todo.text}
  • + {/each} +{/if} +``` + +Or disable SSR for the route: + +```ts +// src/routes/todos/+page.ts +export const ssr = false +``` + +## Core Patterns + +### What preload() does + +`collection.preload()` starts the sync process and returns a promise that resolves when the collection reaches "ready" status. This means: + +1. The sync function connects to the backend +2. Initial data is fetched and written to the collection +3. `markReady()` is called by the adapter +4. The promise resolves + +Subsequent calls to `preload()` on an already-ready collection return immediately. + +### Collection module pattern + +Define collections in a shared module, import in both loaders and components: + +```ts +// lib/collections.ts +import { createCollection, queryCollectionOptions } from '@tanstack/react-db' + +export const todoCollection = createCollection( + queryCollectionOptions({ ... }) +) +``` + +```tsx +// routes/todos.tsx — loader uses the same collection instance +import { todoCollection } from '../lib/collections' + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: () => { + const { data } = useLiveQuery((q) => q.from({ todo: todoCollection })) + // ... + }, +}) +``` + +## Common Mistakes + +### CRITICAL Enabling SSR with TanStack DB + +Wrong: + +```tsx +export const Route = createFileRoute('/todos')({ + loader: async () => { + await todoCollection.preload() + return null + }, +}) +``` + +Correct: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, +}) +``` + +TanStack DB collections are client-side only. Without `ssr: false`, the route loader runs on the server where collections cannot sync, causing hangs or errors. + +Source: examples/react/todo/src/start.tsx + +### HIGH Forgetting to preload in route loader + +Wrong: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + component: TodoPage, +}) +``` + +Correct: + +```tsx +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { + await todoCollection.preload() + return null + }, + component: TodoPage, +}) +``` + +Without preloading, the collection starts syncing only when the component mounts, causing a loading flash. Preloading in the route loader starts sync during navigation, making data available immediately when the component renders. + +### MEDIUM Creating separate collection instances + +Wrong: + +```tsx +// routes/todos.tsx +const todoCollection = createCollection(queryCollectionOptions({ ... })) + +export const Route = createFileRoute('/todos')({ + ssr: false, + loader: async () => { await todoCollection.preload() }, + component: () => { + const { data } = useLiveQuery((q) => q.from({ todo: todoCollection })) + }, +}) +``` + +Correct: + +```ts +// lib/collections.ts — single shared instance +export const todoCollection = createCollection(queryCollectionOptions({ ... })) +``` + +Collections are singletons. Creating multiple instances for the same data causes duplicate syncs, wasted bandwidth, and inconsistent state between components. + +See also: react-db/SKILL.md, vue-db/SKILL.md, svelte-db/SKILL.md, solid-db/SKILL.md, angular-db/SKILL.md — for framework-specific hook usage. + +See also: db-core/collection-setup/SKILL.md — for collection creation and adapter selection. diff --git a/packages/offline-transactions/package.json b/packages/offline-transactions/package.json index 62b78ac13..f55188c1b 100644 --- a/packages/offline-transactions/package.json +++ b/packages/offline-transactions/package.json @@ -55,7 +55,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*" diff --git a/packages/offline-transactions/skills/offline/SKILL.md b/packages/offline-transactions/skills/offline/SKILL.md new file mode 100644 index 000000000..fbf1e2549 --- /dev/null +++ b/packages/offline-transactions/skills/offline/SKILL.md @@ -0,0 +1,350 @@ +--- +name: offline +description: > + Offline transaction support for TanStack DB. OfflineExecutor orchestrates + persistent outbox (IndexedDB/localStorage), leader election (WebLocks/ + BroadcastChannel), retry with backoff, and connectivity detection. + createOfflineTransaction/createOfflineAction wrap TanStack DB primitives + with offline persistence. Idempotency keys for at-least-once delivery. + Graceful degradation to online-only mode when storage unavailable. + React Native support via separate entry point. +type: composition +library: db +library_version: "0.5.30" +requires: + - db-core + - db-core/mutations-optimistic +sources: + - "TanStack/db:packages/offline-transactions/src/OfflineExecutor.ts" + - "TanStack/db:packages/offline-transactions/src/types.ts" + - "TanStack/db:packages/offline-transactions/src/index.ts" +--- + +This skill builds on db-core and mutations-optimistic. Read those first. + +# TanStack DB — Offline Transactions + +## Setup + +```ts +import { + startOfflineExecutor, + IndexedDBAdapter, +} from '@tanstack/offline-transactions' +import { todoCollection } from './collections' + +const executor = startOfflineExecutor({ + collections: { todos: todoCollection }, + mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + const mutation = transaction.mutations[0] + await api.todos.create({ + ...mutation.modified, + idempotencyKey, + }) + }, + updateTodo: async ({ transaction, idempotencyKey }) => { + const mutation = transaction.mutations[0] + await api.todos.update(mutation.key, { + ...mutation.changes, + idempotencyKey, + }) + }, + }, +}) + +// Wait for initialization (storage probe, leader election, outbox replay) +await executor.waitForInit() +``` + +## Core API + +### createOfflineTransaction + +```ts +const tx = executor.createOfflineTransaction({ + mutationFnName: 'createTodo', + autoCommit: true, +}) + +// Use like a normal TanStack DB transaction +todoCollection.insert(tx, { id: crypto.randomUUID(), text: 'New todo' }) +``` + +If the executor is not the leader tab, falls back to `createTransaction` directly (no offline persistence). + +### createOfflineAction + +```ts +const addTodo = executor.createOfflineAction({ + mutationFnName: 'createTodo', + onMutate: (variables) => { + todoCollection.insert(undefined, { + id: crypto.randomUUID(), + text: variables.text, + }) + }, +}) + +// Call it +addTodo({ text: 'Buy milk' }) +``` + +If the executor is not the leader tab, falls back to `createOptimisticAction` directly. + +## Architecture + +### Components + +| Component | Purpose | Default | +|-----------|---------|---------| +| **Storage** | Persist transactions to survive page reload | IndexedDB → localStorage fallback | +| **OutboxManager** | FIFO queue of pending transactions | Automatic | +| **KeyScheduler** | Serialize transactions touching same keys | Automatic | +| **TransactionExecutor** | Execute with retry + backoff | Automatic | +| **LeaderElection** | Only one tab processes the outbox | WebLocks → BroadcastChannel | +| **OnlineDetector** | Pause/resume on connectivity changes | navigator.onLine + events | + +### Transaction lifecycle + +1. Mutation applied optimistically to collection (instant UI update) +2. Transaction serialized and persisted to storage (outbox) +3. Leader tab picks up transaction and executes `mutationFn` +4. On success: removed from outbox, optimistic state resolved +5. On failure: retried with exponential backoff +6. On page reload: outbox replayed, optimistic state restored + +### Leader election + +Only one tab processes the outbox to prevent duplicate execution. Non-leader tabs use regular `createTransaction`/`createOptimisticAction` (online-only, no persistence). + +```ts +const executor = startOfflineExecutor({ + // ... + onLeadershipChange: (isLeader) => { + console.log(isLeader ? 'This tab is processing offline transactions' : 'Another tab is leader') + }, +}) + +executor.isOfflineEnabled // true only if leader AND storage available +``` + +### Storage degradation + +The executor probes storage availability on startup: + +```ts +const executor = startOfflineExecutor({ + // ... + onStorageFailure: (diagnostic) => { + // diagnostic.code: 'STORAGE_BLOCKED' | 'QUOTA_EXCEEDED' | 'UNKNOWN_ERROR' + // diagnostic.mode: 'online-only' + console.warn(diagnostic.message) + }, +}) + +executor.mode // 'offline' | 'online-only' +executor.storageDiagnostic // Full diagnostic info +``` + +When storage is unavailable (private browsing, quota exceeded), the executor operates in online-only mode — mutations work normally but aren't persisted across page reloads. + +## Configuration + +```ts +interface OfflineConfig { + collections: Record // Collections for optimistic state restoration + mutationFns: Record // Named mutation functions + storage?: StorageAdapter // Custom storage (default: auto-detect) + maxConcurrency?: number // Parallel execution limit + jitter?: boolean // Add jitter to retry delays + beforeRetry?: (txs) => txs // Transform/filter before retry + onUnknownMutationFn?: (name, tx) => void // Handle orphaned transactions + onLeadershipChange?: (isLeader) => void // Leadership state callback + onStorageFailure?: (diagnostic) => void // Storage probe failure callback + leaderElection?: LeaderElection // Custom leader election + onlineDetector?: OnlineDetector // Custom connectivity detection +} +``` + +### Custom storage adapter + +```ts +interface StorageAdapter { + get: (key: string) => Promise + set: (key: string, value: string) => Promise + delete: (key: string) => Promise + keys: () => Promise> + clear: () => Promise +} +``` + +## Error Handling + +### NonRetriableError + +```ts +import { NonRetriableError } from '@tanstack/offline-transactions' + +const executor = startOfflineExecutor({ + mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + const res = await fetch('/api/todos', { method: 'POST', body: ... }) + if (res.status === 409) { + throw new NonRetriableError('Duplicate detected') + } + if (!res.ok) throw new Error('Server error') + }, + }, +}) +``` + +Throwing `NonRetriableError` stops retry and removes the transaction from the outbox. Use for permanent failures (validation errors, conflicts, 4xx responses). + +### Idempotency keys + +Every offline transaction includes an `idempotencyKey`. Pass it to your API to prevent duplicate execution on retry: + +```ts +mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + await fetch('/api/todos', { + method: 'POST', + headers: { 'Idempotency-Key': idempotencyKey }, + body: JSON.stringify(transaction.mutations[0].modified), + }) + }, +} +``` + +## React Native + +```ts +import { + startOfflineExecutor, +} from '@tanstack/offline-transactions/react-native' + +// Uses ReactNativeOnlineDetector automatically +// Uses AsyncStorage-compatible storage +const executor = startOfflineExecutor({ ... }) +``` + +## Outbox Management + +```ts +// Inspect pending transactions +const pending = await executor.peekOutbox() + +// Get counts +executor.getPendingCount() // Queued transactions +executor.getRunningCount() // Currently executing + +// Clear all pending transactions +await executor.clearOutbox() + +// Cleanup +executor.dispose() +``` + +## Common Mistakes + +### CRITICAL Not passing idempotencyKey to the API + +Wrong: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + await api.todos.create(transaction.mutations[0].modified) + }, +} +``` + +Correct: + +```ts +mutationFns: { + createTodo: async ({ transaction, idempotencyKey }) => { + await api.todos.create({ + ...transaction.mutations[0].modified, + idempotencyKey, + }) + }, +} +``` + +Offline transactions retry on failure. Without idempotency keys, retries can create duplicate records on the server. + +### HIGH Not waiting for initialization + +Wrong: + +```ts +const executor = startOfflineExecutor({ ... }) +const tx = executor.createOfflineTransaction({ mutationFnName: 'createTodo' }) +``` + +Correct: + +```ts +const executor = startOfflineExecutor({ ... }) +await executor.waitForInit() +const tx = executor.createOfflineTransaction({ mutationFnName: 'createTodo' }) +``` + +`startOfflineExecutor` initializes asynchronously (probes storage, requests leadership, replays outbox). Creating transactions before initialization completes may miss the leader election result and use the wrong code path. + +### HIGH Missing collection in collections map + +Wrong: + +```ts +const executor = startOfflineExecutor({ + collections: {}, + mutationFns: { createTodo: ... }, +}) +``` + +Correct: + +```ts +const executor = startOfflineExecutor({ + collections: { todos: todoCollection }, + mutationFns: { createTodo: ... }, +}) +``` + +The `collections` map is used to restore optimistic state from the outbox on page reload. Without it, previously pending mutations won't show their optimistic state while being replayed. + +### MEDIUM Not handling NonRetriableError for permanent failures + +Wrong: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + const res = await fetch('/api/todos', { ... }) + if (!res.ok) throw new Error('Failed') + }, +} +``` + +Correct: + +```ts +mutationFns: { + createTodo: async ({ transaction }) => { + const res = await fetch('/api/todos', { ... }) + if (res.status >= 400 && res.status < 500) { + throw new NonRetriableError(`Client error: ${res.status}`) + } + if (!res.ok) throw new Error('Server error') + }, +} +``` + +Without distinguishing retriable from permanent errors, 4xx responses (validation, auth, not found) will retry forever until max retries, wasting resources and filling logs. + +See also: db-core/mutations-optimistic/SKILL.md — for the underlying mutation primitives. + +See also: db-core/collection-setup/SKILL.md — for setting up collections used with offline transactions. diff --git a/packages/react-db/package.json b/packages/react-db/package.json index c38f73fe1..56b0e71c3 100644 --- a/packages/react-db/package.json +++ b/packages/react-db/package.json @@ -42,7 +42,8 @@ "sideEffects": false, "files": [ "dist", - "src" + "src", + "skills" ], "dependencies": { "@tanstack/db": "workspace:*", diff --git a/packages/react-db/skills/react-db/SKILL.md b/packages/react-db/skills/react-db/SKILL.md new file mode 100644 index 000000000..706cc6712 --- /dev/null +++ b/packages/react-db/skills/react-db/SKILL.md @@ -0,0 +1,253 @@ +--- +name: react-db +description: > + React bindings for TanStack DB. useLiveQuery hook with dependency arrays + (8 overloads: query function, config object, pre-created collection, + disabled state via returning undefined/null). useLiveSuspenseQuery for + React Suspense with Error Boundaries (data always defined). + useLiveInfiniteQuery for cursor-based pagination (pageSize, fetchNextPage, + hasNextPage, isFetchingNextPage). usePacedMutations for debounced React + state updates. Return shape: data, state, collection, status, isLoading, + isReady, isError. Import from @tanstack/react-db (re-exports all of + @tanstack/db). +type: framework +library: db +framework: react +library_version: "0.5.30" +requires: + - db-core +sources: + - "TanStack/db:docs/framework/react/overview.md" + - "TanStack/db:docs/guides/live-queries.md" + - "TanStack/db:packages/react-db/src/useLiveQuery.ts" + - "TanStack/db:packages/react-db/src/useLiveInfiniteQuery.ts" +--- + +This skill builds on db-core. Read it first for collection setup, query builder, and mutation patterns. + +# TanStack DB — React + +## Setup + +```tsx +import { useLiveQuery, eq, not } from "@tanstack/react-db" + +function TodoList() { + const { data: todos, isLoading } = useLiveQuery((q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => not(todo.completed)) + .orderBy(({ todo }) => todo.created_at, "asc") + ) + + if (isLoading) return
    Loading...
    + + return ( +
      + {todos.map((todo) => ( +
    • {todo.text}
    • + ))} +
    + ) +} +``` + +`@tanstack/react-db` re-exports everything from `@tanstack/db`. In React projects, import everything from `@tanstack/react-db`. + +## Hooks + +### useLiveQuery + +```tsx +// Query function with dependency array +const { data, state, collection, status, isLoading, isReady, isError } = + useLiveQuery( + (q) => + q + .from({ todo: todoCollection }) + .where(({ todo }) => eq(todo.userId, userId)), + [userId] + ) + +// Config object +const { data } = useLiveQuery({ + query: (q) => q.from({ todo: todoCollection }), + gcTime: 60000, +}) + +// Pre-created collection (from route loader) +const { data } = useLiveQuery(preloadedCollection) + +// Conditional query — return undefined/null to disable +const { data, status } = useLiveQuery( + (q) => { + if (!userId) return undefined + return q.from({ todo: todoCollection }).where(({ todo }) => eq(todo.userId, userId)) + }, + [userId] +) +// When disabled: status='disabled', data=undefined +``` + +### useLiveSuspenseQuery + +```tsx +// data is ALWAYS defined — never undefined +// Must wrap in and +function TodoList() { + const { data: todos } = useLiveSuspenseQuery((q) => + q.from({ todo: todoCollection }) + ) + + return
      {todos.map((t) =>
    • {t.text}
    • )}
    +} + +// With deps — re-suspends when deps change +const { data } = useLiveSuspenseQuery( + (q) => q.from({ todo: todoCollection }).where(({ todo }) => eq(todo.category, category)), + [category] +) +``` + +### useLiveInfiniteQuery + +```tsx +const { data, fetchNextPage, hasNextPage, isFetchingNextPage } = + useLiveInfiniteQuery( + (q) => + q + .from({ posts: postsCollection }) + .orderBy(({ posts }) => posts.createdAt, "desc"), + { pageSize: 20 }, + [category] + ) + +// data is the flat array of all loaded pages +// fetchNextPage() loads the next page +// hasNextPage is true when more data is available +``` + +### usePacedMutations + +```tsx +import { usePacedMutations, debounceStrategy } from "@tanstack/react-db" + +const mutate = usePacedMutations({ + onMutate: (value: string) => { + noteCollection.update(noteId, (draft) => { + draft.content = value + }) + }, + mutationFn: async ({ transaction }) => { + await api.notes.update(noteId, transaction.mutations[0].changes) + }, + strategy: debounceStrategy({ wait: 500 }), +}) + +// In handler: +