From 328c7c267a3f682136f491a9ae5ca968eff13458 Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 3 Sep 2025 15:41:16 +0200 Subject: [PATCH 01/31] feat: support multiple channel lists with ChannelPaginatorsOrchestrator --- src/ChannelPaginatorsOrchestrator.ts | 327 ++++++++++ src/EventHandlerPipeline.ts | 163 +++++ src/index.ts | 2 + src/pagination/BasePaginator.ts | 202 +++++- src/pagination/ChannelPaginator.ts | 206 +++++++ src/pagination/FilterBuilder.ts | 5 +- src/pagination/ReminderPaginator.ts | 4 +- src/pagination/filterCompiler.ts | 192 ++++++ src/pagination/index.ts | 1 + src/pagination/sortCompiler.ts | 97 +++ src/pagination/types.normalization.ts | 7 + src/pagination/utility.normalization.ts | 108 ++++ src/pagination/utility.queryChannel.ts | 77 +++ src/pagination/utility.search.ts | 56 ++ .../ChannelPaginatorsOrchestrator.test.ts | 580 ++++++++++++++++++ test/unit/EventHandlerPipeline.test.ts | 525 ++++++++++++++++ test/unit/LiveLocationManager.test.ts | 2 + test/unit/pagination/BasePaginator.test.ts | 467 +++++++++++++- test/unit/pagination/ChannelPaginator.test.ts | 441 +++++++++++++ test/unit/pagination/FilterBuilder.test.ts | 2 +- test/unit/pagination/filterCompiler.test.ts | 368 +++++++++++ test/unit/pagination/sortCompiler.test.ts | 267 ++++++++ 22 files changed, 4094 insertions(+), 5 deletions(-) create mode 100644 src/ChannelPaginatorsOrchestrator.ts create mode 100644 src/EventHandlerPipeline.ts create mode 100644 src/pagination/ChannelPaginator.ts create mode 100644 src/pagination/filterCompiler.ts create mode 100644 src/pagination/sortCompiler.ts create mode 100644 src/pagination/types.normalization.ts create mode 100644 src/pagination/utility.normalization.ts create mode 100644 src/pagination/utility.queryChannel.ts create mode 100644 src/pagination/utility.search.ts create mode 100644 test/unit/ChannelPaginatorsOrchestrator.test.ts create mode 100644 test/unit/EventHandlerPipeline.test.ts create mode 100644 test/unit/pagination/ChannelPaginator.test.ts create mode 100644 test/unit/pagination/filterCompiler.test.ts create mode 100644 test/unit/pagination/sortCompiler.test.ts diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts new file mode 100644 index 0000000000..481eba2029 --- /dev/null +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -0,0 +1,327 @@ +import { EventHandlerPipeline } from './EventHandlerPipeline'; +import { WithSubscriptions } from './utils/WithSubscriptions'; +import type { Event, EventTypes } from './types'; +import type { ChannelPaginator } from './pagination'; +import type { StreamChat } from './client'; +import type { Unsubscribe } from './store'; +import { StateStore } from './store'; +import type { + EventHandlerPipelineHandler, + InsertEventHandlerPayload, + LabeledEventHandler, +} from './EventHandlerPipeline'; +import { getChannel } from './pagination/utility.queryChannel'; +import type { Channel } from './channel'; + +type ChannelPaginatorsOrchestratorEventHandlerContext = { + orchestrator: ChannelPaginatorsOrchestrator; +}; + +type SupportedEventType = EventTypes | (string & {}); + +const reEmit: EventHandlerPipelineHandler< + ChannelPaginatorsOrchestratorEventHandlerContext +> = ({ event, ctx: { orchestrator } }) => { + if (!event.cid) return; + const channel = orchestrator.client.activeChannels[event.cid]; + if (!channel) return; + orchestrator.paginators.forEach((paginator) => { + const items = paginator.items; + if (paginator.findItem(channel) && items) { + paginator.state.partialNext({ items: [...items] }); + } + }); +}; + +const removeItem: EventHandlerPipelineHandler< + ChannelPaginatorsOrchestratorEventHandlerContext +> = ({ event, ctx: { orchestrator } }) => { + if (!event.cid) return; + const channel = orchestrator.client.activeChannels[event.cid]; + orchestrator.paginators.forEach((paginator) => { + paginator.removeItem({ id: event.cid, item: channel }); + }); +}; + +const updateLists: EventHandlerPipelineHandler< + ChannelPaginatorsOrchestratorEventHandlerContext +> = async ({ event, ctx: { orchestrator } }) => { + let channel: Channel | undefined = undefined; + if (event.cid) { + channel = orchestrator.client.activeChannels[event.cid]; + } else if (event.channel_id && event.channel_type) { + // todo: is there a central method to construct the cid from type and channel id? + channel = + orchestrator.client.activeChannels[`${event.channel_type}:${event.channel_id}`]; + } else if (event.channel) { + channel = orchestrator.client.activeChannels[event.channel.cid]; + } else { + return; + } + + if (!channel) { + const [type, id] = event.cid + ? event.cid.split(':') + : [event.channel_type, event.channel_id]; + + channel = await getChannel({ + client: orchestrator.client, + id, + type, + }); + } + + if (!channel) return; + + // todo: can these state updates be made atomic across all the paginators? + // maybe we could add to state store API that would allow to queue changes and then commit? + orchestrator.paginators.forEach((paginator) => { + if (paginator.matchesFilter(channel)) { + // todo: does it make sense to move channel at the top of the items array (original implementation) + // if items are supposed to be ordered by the sort object? + paginator.ingestItem(channel); + } else { + // remove if it does not match the filter anymore + paginator.removeItem({ item: channel }); + } + }); +}; + +// todo: we have to make sure that client.activeChannels is always up-to-date +const channelDeletedHandler: LabeledEventHandler = + { + handle: removeItem, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.deleted', + }; + +// fixme: is it ok, remove item just because its property hidden is switched to hidden: true? What about offset cursor, should we update it? +const channelHiddenHandler: LabeledEventHandler = + { + handle: removeItem, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.hidden', + }; + +// fixme: this handler should not be handled by the orchestrator but as Channel does not have reactive state, +// we need to re-emit the whole list to reflect the changes +const channelUpdatedHandler: LabeledEventHandler = + { + handle: reEmit, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.updated', + }; + +// fixme: this handler should not be handled by the orchestrator but as Channel does not have reactive state, +// we need to re-emit the whole list to reflect the changes +const channelTruncatedHandler: LabeledEventHandler = + { + handle: reEmit, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.truncated', + }; + +const channelVisibleHandler: LabeledEventHandler = + { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.visible', + }; + +// members filter - should not be impacted as id is stable - cannot be updated +// member.user.name - can be impacted +const memberUpdatedHandler: LabeledEventHandler = + { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:member.updated', + }; + +const messageNewHandler: LabeledEventHandler = + { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:message.new', + }; + +const notificationAddedToChannelHandler: LabeledEventHandler = + { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.added_to_channel', + }; + +const notificationMessageNewHandler: LabeledEventHandler = + { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.message_new', + }; + +const notificationRemovedFromChannelHandler: LabeledEventHandler = + { + handle: removeItem, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.removed_from_channel', + }; + +// fixme: updates users for member object in all the channels which are loaded with that member - normalization would be beneficial +const userPresenceChangedHandler: LabeledEventHandler = + { + handle: ({ event, ctx: { orchestrator } }) => { + const eventUser = event.user; + if (!eventUser?.id) return; + orchestrator.paginators.forEach((paginator) => { + const paginatorItems = paginator.items; + if (!paginatorItems) return; + let updated = false; + paginatorItems.forEach((channel) => { + if (channel.state.members[eventUser.id]) { + channel.state.members[eventUser.id].user = event.user; + updated = true; + } + if (channel.state.membership.user?.id === eventUser.id) { + channel.state.membership.user = eventUser; + updated = true; + } + }); + if (updated) { + // fixme: user is not reactive and so the whole list has to be re-rendered + paginator.state.partialNext({ items: [...paginatorItems] }); + } + }); + }, + id: 'ChannelPaginatorsOrchestrator:default-handler:user.presence.changed', + }; + +export type ChannelPaginatorsOrchestratorState = { + paginators: ChannelPaginator[]; +}; + +type EventHandlers = Partial< + Record< + SupportedEventType, + LabeledEventHandler[] + > +>; + +export type ChannelPaginatorsOrchestratorOptions = { + client: StreamChat; + paginators?: ChannelPaginator[]; + eventHandlers?: EventHandlers; +}; + +export class ChannelPaginatorsOrchestrator extends WithSubscriptions { + client: StreamChat; + state: StateStore; + protected pipelines = new Map< + SupportedEventType, + EventHandlerPipeline + >(); + + protected static readonly defaultEventHandlers: EventHandlers = { + 'channel.deleted': [channelDeletedHandler], + 'channel.hidden': [channelHiddenHandler], + 'channel.updated': [channelUpdatedHandler], + 'channel.truncated': [channelTruncatedHandler], + 'channel.visible': [channelVisibleHandler], + 'member.updated': [memberUpdatedHandler], + 'message.new': [messageNewHandler], + 'notification.added_to_channel': [notificationAddedToChannelHandler], + 'notification.message_new': [notificationMessageNewHandler], + 'notification.removed_from_channel': [notificationRemovedFromChannelHandler], + 'user.presence.changed': [userPresenceChangedHandler], + }; + + constructor({ + client, + eventHandlers, + paginators, + }: ChannelPaginatorsOrchestratorOptions) { + super(); + this.client = client; + this.state = new StateStore({ paginators: paginators ?? [] }); + const finalEventHandlers = + eventHandlers ?? ChannelPaginatorsOrchestrator.getDefaultHandlers(); + for (const [type, handlers] of Object.entries(finalEventHandlers)) { + if (handlers) this.ensurePipeline(type).replaceAll(handlers); + } + } + + get paginators(): ChannelPaginator[] { + return this.state.getLatestValue().paginators; + } + + /** + * Returns deep copy of default handlers mapping. + * The defaults can be enriched with custom handlers or the custom handlers can be replaced. + */ + static getDefaultHandlers(): EventHandlers { + const src = ChannelPaginatorsOrchestrator.defaultEventHandlers; + const out: EventHandlers = {}; + for (const [type, handlers] of Object.entries(src)) { + if (!handlers) continue; + out[type as SupportedEventType] = [...handlers]; + } + return out; + } + + getPaginatorById(id: string) { + return this.paginators.find((p) => p.id === id); + } + + /** + * If paginator already exists → remove old, reinsert at new index. + * If index not provided → append at the end. + * If index provided → insert (or move) at that index. + * @param paginator + * @param index + */ + insertPaginator({ paginator, index }: { paginator: ChannelPaginator; index?: number }) { + const paginators = [...this.paginators]; + const existingIndex = paginators.findIndex((p) => p.id === paginator.id); + if (existingIndex > -1) { + paginators.splice(existingIndex, 1); + } + const validIndex = Math.max( + 0, + Math.min(index ?? paginators.length, paginators.length), + ); + paginators.splice(validIndex, 0, paginator); + this.state.partialNext({ paginators }); + } + + addEventHandler({ + eventType, + ...payload + }: { + eventType: SupportedEventType; + } & InsertEventHandlerPayload): Unsubscribe { + return this.ensurePipeline(eventType).insert(payload); + } + + /** Subscribe to WS (and more buses via attachBus) */ + registerSubscriptions(): Unsubscribe { + if (!this.hasSubscriptions) { + this.addUnsubscribeFunction( + // todo: maybe we should have a wrapper here to decide, whether the event is a LocalEventBus event or else supported by client + this.client.on((event: Event) => { + const pipe = this.pipelines.get(event.type); + if (pipe) { + pipe.run(event, this.ctx); + } + }).unsubscribe, + ); + } + + this.incrementRefCount(); + return () => this.unregisterSubscriptions(); + } + + ensurePipeline( + eventType: SupportedEventType, + ): EventHandlerPipeline { + let pipe = this.pipelines.get(eventType); + if (!pipe) { + pipe = new EventHandlerPipeline({ + id: `ChannelPaginatorsOrchestrator:${eventType}`, + }); + this.pipelines.set(eventType, pipe); + } + return pipe; + } + + private get ctx(): ChannelPaginatorsOrchestratorEventHandlerContext { + return { orchestrator: this }; + } +} diff --git a/src/EventHandlerPipeline.ts b/src/EventHandlerPipeline.ts new file mode 100644 index 0000000000..c2b63b976a --- /dev/null +++ b/src/EventHandlerPipeline.ts @@ -0,0 +1,163 @@ +import { generateUUIDv4 } from './utils'; +import type { Event } from './types'; +import type { Unsubscribe } from './store'; + +export type EventHandlerResult = { action: 'stop' }; // event processing run will be cancelled + +export type InsertEventHandlerPayload> = { + handle: EventHandlerPipelineHandler; + index?: number; + id?: string; + replace?: boolean; + revertOnUnsubscribe?: boolean; +}; + +export type EventHandlerPipelineHandler> = (payload: { + event: Event; + ctx: CTX; +}) => EventHandlerResult | void | Promise; + +export type LabeledEventHandler> = { + handle: EventHandlerPipelineHandler; + id?: string; +}; + +export class EventHandlerPipeline = {}> { + id: string; + protected handlers: LabeledEventHandler[] = []; + private runnerExecutionPromise = Promise.resolve(); + + constructor({ id }: { id: string }) { + this.id = id; + } + + get size(): number { + return this.handlers.length; + } + + /** + * Insert a handler into the pipeline at the given index. + * + * - If `replace` is `true` and the index is within bounds, the existing handler + * at that position will be replaced by the new one. + * - If `revertOnUnsubscribe` is also `true`, then calling the returned + * unsubscribe will both remove the inserted handler *and* restore the + * previously replaced handler at the same index. + * - If `replace` is `false` (default), the new handler is inserted at the index + * (or appended if the index is greater than the pipeline size). Unsubscribe + * will only remove this handler. + * + * @param handler The handler function to insert. + * @param index Target index in the pipeline (clamped to valid range). + * @param replace If true, replace existing handler at index instead of inserting. + * @param revertOnUnsubscribe If true, restore the replaced handler when unsubscribing. + * @returns An unsubscribe function that removes (and optionally restores) the handler. + */ + + insert({ + handle, + id, + index, + replace = false, + revertOnUnsubscribe, + }: InsertEventHandlerPayload): Unsubscribe { + const validIndex = Math.max( + 0, + Math.min(index ?? this.handlers.length, this.handlers.length), + ); + const handler: LabeledEventHandler = { + handle, + id: id ?? generateUUIDv4(), + }; + + if (replace && validIndex < this.handlers.length) { + const old = this.handlers[validIndex]; + this.handlers[validIndex] = handler; + return () => { + this.remove(handler); + if (revertOnUnsubscribe) this.handlers.splice(validIndex, 0, old); + }; + } else { + this.handlers.splice(validIndex, 0, handler); + return () => this.remove(handler); + } + } + + remove(h: LabeledEventHandler | EventHandlerPipelineHandler): void { + const index = this.handlers.findIndex((handler) => + typeof (h as LabeledEventHandler).handle === 'function' + ? (h as LabeledEventHandler).handle === handler.handle + : h === handler.handle, + ); + if (index >= 0) this.handlers.splice(index, 1); + } + + replaceAll(handlers: LabeledEventHandler[]): void { + this.handlers = handlers.slice(); + } + + clear(): void { + this.handlers = []; + } + + /** + * Queue an event for processing. Events are processed serially, in the order + * `run` is called. Returns a promise that resolves/rejects for this specific + * event’s processing, while the internal chain continues (errors won’t break it). + */ + run(event: Event, ctx: CTX): Promise { + let resolveTask!: () => void; + let rejectTask!: (e: unknown) => void; + // Per-task promise the caller can await + const taskPromise = new Promise((res, rej) => { + resolveTask = res; + rejectTask = rej; + }); + + // Queue this event’s work + this.runnerExecutionPromise = this.runnerExecutionPromise + .then(async () => { + try { + await this.processOne(event, ctx); + resolveTask(); + } catch (e) { + // Reject this task’s promise, but keep the chain alive. + rejectTask(e); + } + }) + .catch((e) => { + console.error(`[pipeline:${this.id}] execution error`, e); + // Ensure the chain remains resolved for the next enqueue: + this.runnerExecutionPromise = Promise.resolve(); + }); + + return taskPromise; + } + + /** + * Wait until all queued events have been processed. + */ + async drain(): Promise { + await this.runnerExecutionPromise; + } + + /** + * Process a single event through a stable snapshot of handlers to avoid + * mid-iteration mutations (insert/remove) affecting this run. + */ + private async processOne(event: Event, ctx: CTX): Promise { + const snapshot = this.handlers.slice(); + for (let i = 0; i < snapshot.length; i++) { + const handler = snapshot[i]; + try { + const result = await handler.handle({ event, ctx }); + if (result?.action === 'stop') return; + } catch { + console.error(`[pipeline:${this.id}] handler failed`, { + handlerId: handler.id ?? 'unknown', + handlerIndex: i, + }); + } + } + } +} diff --git a/src/index.ts b/src/index.ts index 5f5daf375f..eeb1ce6c68 100644 --- a/src/index.ts +++ b/src/index.ts @@ -58,3 +58,5 @@ export { promoteChannel, } from './utils'; export { FixedSizeQueueCache } from './utils/FixedSizeQueueCache'; +export * from './ChannelPaginatorsOrchestrator'; +export * from './EventHandlerPipeline'; diff --git a/src/pagination/BasePaginator.ts b/src/pagination/BasePaginator.ts index 7f73f0f53b..8c40930f0f 100644 --- a/src/pagination/BasePaginator.ts +++ b/src/pagination/BasePaginator.ts @@ -1,9 +1,15 @@ +import { binarySearchInsertIndex } from './sortCompiler'; +import { itemMatchesFilter } from './filterCompiler'; import { StateStore } from '../store'; import { debounce, type DebouncedFunc } from '../utils'; +import type { FieldToDataResolver } from './types.normalization'; +import { locateOnPlateauAlternating, locateOnPlateauScanOneSide } from './utility.search'; + +const noOrderChange = () => 0; type PaginationDirection = 'next' | 'prev'; type Cursor = { next: string | null; prev: string | null }; -export type PaginationQueryParams = { direction: PaginationDirection }; +export type PaginationQueryParams = { direction?: PaginationDirection }; export type PaginationQueryReturnValue = { items: T[] } & { next?: string; prev?: string; @@ -41,12 +47,39 @@ export abstract class BasePaginator { pageSize: number; protected _executeQueryDebounced!: DebouncedExecQueryFunction; protected _isCursorPagination = false; + /** + * Comparison function used to keep items in a paginator sorted. + * + * The comparator must follow the standard contract of `Array.prototype.sort`: + * - return a negative number if `a` should come before `b` + * - return a positive number if `a` should come after `b` + * - return 0 if they are considered equal for ordering + * + * Typical implementations are generated from a "sort spec" (e.g. `{ field: 1, otherField: -1 }`) + * so that insertion and pagination can maintain the same order as the backend. + * + * Notes: + * - The comparator must be deterministic: the same inputs always return + * the same result. + * - If multiple fields are used, they are evaluated in order of normalized sort ({ direction: AscDesc; field: keyof T }[]) + * until a non-zero comparison is found. + * - Equality (0) does not imply object identity; it only means neither item + * is considered greater than the other by the sort rules. + */ + sortComparator: (a: T, b: T) => number; + /** + * Allows defining data extraction logic for filter fields like member.user.name or members + * @protected + */ + protected _filterFieldToDataResolvers: FieldToDataResolver[]; protected constructor(options?: PaginatorOptions) { const { debounceMs, pageSize } = { ...DEFAULT_PAGINATION_OPTIONS, ...options }; this.pageSize = pageSize; this.state = new StateStore>(this.initialState); this.setDebounceOptions({ debounceMs }); + this.sortComparator = noOrderChange; + this._filterFieldToDataResolvers = []; } get lastQueryError() { @@ -97,6 +130,173 @@ export abstract class BasePaginator { abstract filterQueryResults(items: T[]): T[] | Promise; + protected buildFilters(): object | null { + return null; // === no filters' + } + + getItemId(item: T): string { + return (item as { id: string }).id; + } + + matchesFilter(item: T): boolean { + const filters = this.buildFilters(); + + // no filters => accept all + if (filters == null) return true; + + return itemMatchesFilter(item, filters, { + resolvers: this._filterFieldToDataResolvers, + }); + } + + ingestItem(ingestedItem: T): boolean { + const items = this.items ?? []; + const id = this.getItemId(ingestedItem); + + // If it doesn't match this paginator's filters, remove if present and exit. + const existingIndex = items.findIndex((ch) => this.getItemId(ch) === id); + if (!this.matchesFilter(ingestedItem)) { + if (existingIndex >= 0) { + const next = items.slice(); + next.splice(existingIndex, 1); + this.state.partialNext({ items: next }); + return true; // list changed (item removed) + } + return false; // no change + } + + // Build comparator once per call (you can cache it when sort changes). + + const next = items.slice(); + + if (existingIndex >= 0) { + // Update existing: remove then re-insert at the correct position + next.splice(existingIndex, 1); + } + + // Find insertion index via binary search: first index where existing > ingestionItem + const insertAt = binarySearchInsertIndex({ + needle: ingestedItem, + sortedArray: next, + compare: this.sortComparator, + }); + + next.splice(insertAt, 0, ingestedItem); + this.state.partialNext({ items: next }); + return true; // list changed (added or repositioned) + } + + /** + * Removes item from the paginator's state. + * It is preferable to provide item for better search performance. + * @param id + * @param item + */ + removeItem({ id, item }: { id?: string; item?: T }): boolean { + if (!id && !item) return false; + let index: number; + if (item) { + const location = this.locateByItem(item); + index = location.index; + } else { + index = this.items?.findIndex((i) => this.getItemId(i) === id) ?? -1; + } + + if (index === -1) return false; + const newItems = [...(this.items ?? [])]; + newItems.splice(index, 1); + this.state.partialNext({ items: newItems }); + return true; + } + + contains(item: T): boolean { + return !!this.items?.find((i) => this.getItemId(i) === this.getItemId(item)); + } + + /** + * Find the exact index of `needle` by ID (via getItemId) under the current sortComparator. + * Returns: + * - `index`: actual index if found, otherwise -1 + * - `insertionIndex`: lower-bound position where `needle` would be inserted + * to preserve order (always defined). + * + * Time: O(log n) + O(k) for a tie plateau of size k (unless comparator has ID tiebreaker). + * + * ### Usage examples + * + * ```ts + * const { index, insertionIndex } = paginator.locateByItem(channel); + * + * if (index > -1) { + * // Found -> e.g. remove the item + * items.splice(index, 1); + * } else { + * // Insert new at the right position + * items.splice(insertionIndex, 0, channel); + * } + * ``` + */ + public locateByItem( + needle: T, + options?: { alternatePlateauScan?: boolean }, + ): { index: number; insertionIndex: number } { + const items = this.items ?? []; + if (items.length === 0) return { index: -1, insertionIndex: 0 }; + + const insertionIndex = binarySearchInsertIndex({ + needle, + sortedArray: items, + compare: this.sortComparator, + }); + + // quick neighbor checks + const id = this.getItemId(needle); + const left = insertionIndex - 1; + if (left >= 0 && this.sortComparator(items[left], needle) === 0) { + if (this.getItemId(items[left]) === id) return { index: left, insertionIndex }; + } + if ( + insertionIndex < items.length && + this.sortComparator(items[insertionIndex], needle) === 0 + ) { + if (this.getItemId(items[insertionIndex]) === id) + return { index: insertionIndex, insertionIndex }; + } + + // plateau scan + const index = + (options?.alternatePlateauScan ?? true) + ? locateOnPlateauAlternating( + items, + needle, + this.sortComparator, + this.getItemId.bind(this), + insertionIndex, + ) + : locateOnPlateauScanOneSide( + items, + needle, + this.sortComparator, + this.getItemId.bind(this), + insertionIndex, + ); + + return { index, insertionIndex }; + } + + findItem(needle: T, options?: { alternatePlateauScan?: boolean }): T | undefined { + const { index } = this.locateByItem(needle, options); + return index > -1 ? (this.items ?? [])[index] : undefined; + } + + setFilterResolvers(resolvers: FieldToDataResolver[]) { + this._filterFieldToDataResolvers = resolvers; + } + + addFilterResolvers(resolvers: FieldToDataResolver[]) { + this._filterFieldToDataResolvers.push(...resolvers); + } + setDebounceOptions = ({ debounceMs }: PaginatorDebounceOptions) => { this._executeQueryDebounced = debounce(this.executeQuery.bind(this), debounceMs); }; diff --git a/src/pagination/ChannelPaginator.ts b/src/pagination/ChannelPaginator.ts new file mode 100644 index 0000000000..559e7b31c2 --- /dev/null +++ b/src/pagination/ChannelPaginator.ts @@ -0,0 +1,206 @@ +import type { + PaginationQueryParams, + PaginationQueryReturnValue, + PaginatorOptions, + PaginatorState, +} from './BasePaginator'; +import { BasePaginator } from './BasePaginator'; +import type { FilterBuilderOptions } from './FilterBuilder'; +import { FilterBuilder } from './FilterBuilder'; +import { makeComparator } from './sortCompiler'; +import { generateUUIDv4 } from '../utils'; +import type { StreamChat } from '../client'; +import type { Channel } from '../channel'; +import type { ChannelFilters, ChannelOptions, ChannelSort } from '../types'; +import type { FieldToDataResolver, PathResolver } from './types.normalization'; +import { resolveDotPathValue } from './utility.normalization'; + +const DEFAULT_BACKEND_SORT: ChannelSort = { last_message_at: -1, updated_at: -1 }; // {last_updated: -1} + +export type ChannelPaginatorState = PaginatorState; + +export type ChannelPaginatorRequestOptions = Partial< + Omit +>; + +export type ChannelPaginatorOptions = { + client: StreamChat; + filterBuilderOptions?: FilterBuilderOptions; + filters?: ChannelFilters; + id?: string; + paginatorOptions?: PaginatorOptions; + requestOptions?: ChannelPaginatorRequestOptions; + sort?: ChannelSort | ChannelSort[]; +}; + +const pinnedFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'pinned', + resolve: (channel) => !!channel.state.membership.pinned_at, +}; + +const membersFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'members', + resolve: (channel) => + channel.state.members + ? Object.values(channel.state.members).reduce((ids, member) => { + if (member.user?.id) { + ids.push(member.user?.id); + } + return ids; + }, []) + : [], +}; + +const memberUserNameFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'member.user.name', + resolve: (channel) => + channel.state.members + ? Object.values(channel.state.members).reduce((names, member) => { + if (member.user?.name) { + names.push(member.user.name); + } + return names; + }, []) + : [], +}; + +const dataFieldFilterResolver: FieldToDataResolver = { + matchesField: () => true, + resolve: (channel, path) => resolveDotPathValue(channel.data, path), +}; + +// very, very unfortunately channel data is dispersed btw Channel.data and Channel.state +const channelSortPathResolver: PathResolver = (channel, path) => { + switch (path) { + case 'last_message_at': + return channel.state.last_message_at; + case 'has_unread': { + const userId = channel.getClient().user?.id; + return !!(userId && channel.state.read[userId].unread_messages); + } + case 'last_updated': { + // combination of last_message_at and updated_at + const lastMessageAt = channel.state.last_message_at?.getTime() ?? 0; + const updatedAt = channel.data?.updated_at + ? new Date(channel.data?.updated_at).getTime() + : 0; + return lastMessageAt >= updatedAt ? lastMessageAt : updatedAt; + } + case 'pinned_at': + return channel.state.membership.pinned_at; + case 'unread_count': { + const userId = channel.getClient().user?.id; + return userId ? channel.state.read[userId].unread_messages : 0; + } + default: + return resolveDotPathValue(channel.data, path); + } +}; + +// todo: maybe items could be just an array of {cid: string} and the data would be retrieved from client.activeChannels +// todo: maybe we should introduce client._cache.channels that would be reactive and orchestrator would subscribe to client._cache.channels state to keep all the dependent state in sync +export class ChannelPaginator extends BasePaginator { + // state: StateStore; + private client: StreamChat; + protected _filters: ChannelFilters | undefined; + protected _sort: ChannelSort | ChannelSort[] | undefined; + protected _options: ChannelPaginatorRequestOptions | undefined; + private _id: string; + sortComparator: (a: Channel, b: Channel) => number; + filterBuilder: FilterBuilder; + + constructor({ + client, + id, + filterBuilderOptions, + filters, + paginatorOptions, + requestOptions, + sort, + }: ChannelPaginatorOptions) { + super(paginatorOptions); + const definedSort = sort ?? DEFAULT_BACKEND_SORT; + this.client = client; + this._id = id ?? `channel-paginator-${generateUUIDv4()}`; + this._sort = definedSort; + this._filters = filters; + this._options = requestOptions; + this.filterBuilder = new FilterBuilder(filterBuilderOptions); + this.sortComparator = makeComparator({ + sort: definedSort, + resolvePathValue: channelSortPathResolver, + tiebreaker: (l, r) => { + const leftId = this.getItemId(l); + const rightId = this.getItemId(r); + return leftId < rightId ? -1 : leftId > rightId ? 1 : 0; + }, + }); + this.setFilterResolvers([ + pinnedFilterResolver, + membersFilterResolver, + memberUserNameFilterResolver, + dataFieldFilterResolver, + ]); + } + + get id() { + return this._id; + } + + get filters(): ChannelFilters | undefined { + return this._filters; + } + + get sort(): ChannelSort | undefined { + return this._sort; + } + + get options(): ChannelOptions | undefined { + return this._options; + } + + set filters(filters: ChannelFilters | undefined) { + this._filters = filters; + this.resetState(); + } + + set sort(sort: ChannelSort | ChannelSort[] | undefined) { + this._sort = sort; + this.sortComparator = makeComparator({ + sort: this.sort ?? DEFAULT_BACKEND_SORT, + }); + this.resetState(); + } + + set options(options: ChannelPaginatorRequestOptions | undefined) { + this._options = options; + this.resetState(); + } + + getItemId(item: Channel): string { + return item.cid; + } + + buildFilters = (): ChannelFilters => + this.filterBuilder.buildFilters({ + baseFilters: { ...this.filters }, + }); + + query = async ({ direction }: PaginationQueryParams = {}): Promise< + PaginationQueryReturnValue + > => { + if (direction) { + console.warn('Direction is not supported with channel pagination.'); + } + const filters = this.buildFilters(); + const options: ChannelOptions = { + ...this.options, + limit: this.pageSize, + offset: this.offset, + }; + const items = await this.client.queryChannels(filters, this.sort, options); + return { items }; + }; + + filterQueryResults = (items: Channel[]) => items; +} diff --git a/src/pagination/FilterBuilder.ts b/src/pagination/FilterBuilder.ts index 9945dc9a29..53182a2c94 100644 --- a/src/pagination/FilterBuilder.ts +++ b/src/pagination/FilterBuilder.ts @@ -31,7 +31,10 @@ export type FilterBuilderGenerators< }; }; -export type FilterBuilderOptions> = { +export type FilterBuilderOptions< + TFilters, + TContext extends Record = Record, +> = { initialFilterConfig?: FilterBuilderGenerators; initialContext?: TContext; }; diff --git a/src/pagination/ReminderPaginator.ts b/src/pagination/ReminderPaginator.ts index ff81b5dc91..789354cd8c 100644 --- a/src/pagination/ReminderPaginator.ts +++ b/src/pagination/ReminderPaginator.ts @@ -37,7 +37,9 @@ export class ReminderPaginator extends BasePaginator { query = async ({ direction, - }: PaginationQueryParams): Promise> => { + }: Required): Promise< + PaginationQueryReturnValue + > => { const cursor = this.cursor?.[direction]; const { reminders: items, diff --git a/src/pagination/filterCompiler.ts b/src/pagination/filterCompiler.ts new file mode 100644 index 0000000000..a60f745167 --- /dev/null +++ b/src/pagination/filterCompiler.ts @@ -0,0 +1,192 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { + arraysEqualAsSets, + asArray, + compare, + isIterableButNotString, + normalizeComparedValues, + resolveDotPathValue, + toIterableArray, + tokenize, +} from './utility.normalization'; +import type { FieldToDataResolver } from './types.normalization'; +import type { QueryFilters } from '../types'; + +export type ItemMatchesFilterOptions = { + /** Custom resolvers to extract values from an item given a path */ + resolvers?: ReadonlyArray>; +}; + +export function itemMatchesFilter( + item: T, + filter: QueryFilters, + options: ItemMatchesFilterOptions, +): boolean { + const resolvers = options.resolvers ?? []; + const resolverValueCache = new Map(); + + const resolveOnce = (field: string) => { + if (resolverValueCache.has(field)) return resolverValueCache.get(field); + const resolver = resolvers?.find((resolver) => resolver.matchesField(field)) ?? { + resolve: resolveDotPathValue, + }; + const value = resolver.resolve(item, field); + resolverValueCache.set(field, value); + return value; + }; + + const matches = (filterNode: QueryFilters): boolean => { + if (!filterNode || typeof filterNode !== 'object') return true; + + if (filterNode.$and) return filterNode.$and.every((n) => matches(n)); + if (filterNode.$or) return filterNode.$or.some((n) => matches(n)); + if (filterNode.$nor) return !filterNode.$nor.some((n) => matches(n)); + + for (const [field, condition] of Object.entries(filterNode)) { + const itemPropertyValue = resolveOnce(field); + + if ( + typeof condition !== 'object' || + condition === null || + Array.isArray(condition) + ) { + if (!equalsOp(itemPropertyValue, condition)) return false; + continue; + } + + for (const [op, filterValue] of Object.entries(condition)) { + switch (op) { + case '$eq': + if (!equalsOp(itemPropertyValue, filterValue)) return false; + break; + case '$ne': + if (equalsOp(itemPropertyValue, filterValue)) return false; + break; + + case '$in': + if (!inSetOp(itemPropertyValue, asArray(filterValue))) return false; + break; + case '$nin': + if (inSetOp(itemPropertyValue, asArray(filterValue))) return false; + break; + + case '$gt': + if (!orderedCompareOp(itemPropertyValue, filterValue, (c) => c > 0)) + return false; + break; + case '$gte': + if (!orderedCompareOp(itemPropertyValue, filterValue, (c) => c >= 0)) + return false; + break; + case '$lt': + if (!orderedCompareOp(itemPropertyValue, filterValue, (c) => c < 0)) + return false; + break; + case '$lte': + if (!orderedCompareOp(itemPropertyValue, filterValue, (c) => c <= 0)) + return false; + break; + + case '$exists': + if (!!itemPropertyValue !== !!filterValue) return false; + break; + case '$contains': + if (!containsOp(itemPropertyValue, filterValue)) return false; + break; + case '$autocomplete': + if (!autoCompleteOp(itemPropertyValue, filterValue)) return false; + break; + default: + return false; + } + } + } + return true; + }; + return matches(filter); +} + +/** + * Duplicates ignored for array–array equality: ['a','a','b'] equals ['b','a']. + * + * Empty arrays: [] equals []; a scalar never equals []. + * + * This reuses your normalizeComparedValues so '1' equals 1, ISO dates compare correctly, etc. + * + * $gt/$gte/$lt/$lte remain scalar-only (return false if either side is iterable), as you wanted. + * + * $in/$nin left may be scalar or iterable; the right is a list. + * @param a + * @param b + * @param ok + */ +function orderedCompareOp(a: any, b: any, ok: (c: number) => boolean): boolean { + if (isIterableButNotString(a) || isIterableButNotString(b)) return false; + const n = normalizeComparedValues(a, b); + if (n.kind === 'incomparable') return false; + return ok(compare(n.a, n.b)); +} + +function equalsOp(left: any, right: any): boolean { + const leftIsIter = isIterableButNotString(left); + const rightIsIter = isIterableButNotString(right); + + if (!leftIsIter && !rightIsIter) { + // scalar vs scalar + const n = normalizeComparedValues(left, right); + if (n.kind === 'incomparable') return Object.is(left, right); + return n.a === n.b; + } + + if (leftIsIter && rightIsIter) { + // array vs array → set equality (order-insensitive) + const a = toIterableArray(left); + const b = toIterableArray(right); + return arraysEqualAsSets(a, b); + } + + // one side scalar, the other iterable → membership + if (leftIsIter) { + const a = toIterableArray(left); + return a.some((elem) => equalsOp(elem, right)); + } else { + const b = toIterableArray(right); + return b.some((elem) => equalsOp(left, elem)); + } +} + +function inSetOp(a: any, arr: any[]): boolean { + return arr.some((b) => equalsOp(a, b)); +} + +function containsOp(value: any, needle: any): boolean { + if (Array.isArray(value)) return value.includes(needle); + if (typeof value === 'string' && typeof needle === 'string') + return value.includes(needle); + return false; +} + +/** + * A value matches an autocomplete query if: + * - value is string: every query token is a prefix of some token in the value + * - value is string[]: any element matches as above + * - query can be string (tokenized) or string[] + */ +function autoCompleteOp(value: any, query: any): boolean { + if (value == null || query == null) return false; + + const queryTokens: string[] = Array.isArray(query) + ? query.map(String).flatMap(tokenize) + : tokenize(String(query)); + if (queryTokens.length === 0) return false; + + const matchOneString = (s: string): boolean => { + const valTokens = tokenize(s); + return queryTokens.every((qt) => valTokens.some((vt) => vt.includes(qt))); + }; + + if (typeof value === 'string') return matchOneString(value); + if (Array.isArray(value)) + return value.some((v) => typeof v === 'string' && matchOneString(v)); + return false; +} diff --git a/src/pagination/index.ts b/src/pagination/index.ts index 19e2a53b80..733c5efe8c 100644 --- a/src/pagination/index.ts +++ b/src/pagination/index.ts @@ -1,3 +1,4 @@ export * from './BasePaginator'; +export * from './ChannelPaginator'; export * from './FilterBuilder'; export * from './ReminderPaginator'; diff --git a/src/pagination/sortCompiler.ts b/src/pagination/sortCompiler.ts new file mode 100644 index 0000000000..b56e9cd13f --- /dev/null +++ b/src/pagination/sortCompiler.ts @@ -0,0 +1,97 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ + +import { + compare, + resolveDotPathValue as defaultResolvePathValue, + normalizeComparedValues, +} from './utility.normalization'; +import { normalizeQuerySort } from '../utils'; +import type { AscDesc } from '../types'; +import type { Comparator, PathResolver } from './types.normalization'; + +export function binarySearchInsertIndex({ + compare, + needle, + sortedArray, +}: { + sortedArray: T[]; + needle: T; + compare: Comparator; +}): number { + let low = 0; + let high = sortedArray.length; + + while (low < high) { + const middle = (low + high) >>> 1; // fast floor((low+high)/2) + const comparisonResult = compare(sortedArray[middle], needle); + + // We want the first position where existing > needle to insert before it + if (comparisonResult > 0) { + high = middle; + } else { + low = middle + 1; + } + } + + return low; +} + +/** + * Negative number (< 0) → a comes before b + * + * Zero (0) → leave a and b unchanged relative to each other + * (but they can still move relative to others — sort in JS is not guaranteed stable in older engines, though modern V8/Node/Chrome/Firefox make it stable) + * + * Positive number (> 0) → a comes after b + * @param sort + * @param resolvePathValue + * @param tiebreaker + */ +export function makeComparator< + T, + S extends Record | Record[], +>({ + sort, + resolvePathValue = defaultResolvePathValue, + tiebreaker = (a, b) => compare((a as any).cid, (b as any).cid), +}: { + sort: S; + resolvePathValue?: PathResolver; + tiebreaker?: Comparator; +}): Comparator { + const terms = normalizeQuerySort(sort); + + return (a: T, b: T) => { + for (const { field: path, direction } of terms) { + const leftValue = resolvePathValue(a, path); + const rightValue = resolvePathValue(b, path); + const normalized = normalizeComparedValues(leftValue, rightValue); + let comparison: number; + switch (normalized.kind) { + case 'date': + case 'number': + case 'string': + case 'boolean': + comparison = compare(normalized.a, normalized.b); + break; + default: + // deterministic fallback: null/undefined last; else string compare + if (leftValue == null && rightValue == null) comparison = 0; + else if (leftValue == null) comparison = 1; + else if (rightValue == null) comparison = -1; + else { + const stringLeftValue = String(leftValue), + stringRightValue = String(rightValue); + comparison = + stringLeftValue === stringRightValue + ? 0 + : stringLeftValue < stringRightValue + ? -1 + : 1; + } + } + if (comparison !== 0) return direction === 1 ? comparison : -comparison; + } + return tiebreaker ? tiebreaker(a, b) : 0; + }; +} diff --git a/src/pagination/types.normalization.ts b/src/pagination/types.normalization.ts new file mode 100644 index 0000000000..1932a5bc73 --- /dev/null +++ b/src/pagination/types.normalization.ts @@ -0,0 +1,7 @@ +export type PathResolver = (item: DataSource, field: string) => unknown; +export type Comparator = (left: T, right: T) => number; + +export type FieldToDataResolver = { + matchesField: (field: string) => boolean; + resolve: PathResolver; +}; diff --git a/src/pagination/utility.normalization.ts b/src/pagination/utility.normalization.ts new file mode 100644 index 0000000000..c7df10aef9 --- /dev/null +++ b/src/pagination/utility.normalization.ts @@ -0,0 +1,108 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ + +export function asArray(v: any): any[] { + return Array.isArray(v) ? v : [v]; +} + +export function isISODateString(x: any): x is string { + return typeof x === 'string' && x.includes('T') && !Number.isNaN(Date.parse(x)); +} + +export function toEpochMillis(x: any): number | null { + if (x instanceof Date) return x.getTime(); + if (typeof x === 'number' && Number.isFinite(x)) return x; // treat as epoch ms + if (isISODateString(x)) return Date.parse(x); + return null; +} + +export function toNumberLike(x: any): number | null { + if (typeof x === 'number' && Number.isFinite(x)) return x; + if (typeof x === 'string' && x.trim() !== '') { + const n = Number(x); + if (Number.isFinite(n)) return n; + } + return null; +} + +export function normalizeComparedValues(a: any, b: any) { + const Ad = toEpochMillis(a), + Bd = toEpochMillis(b); + if (Ad !== null && Bd !== null) return { kind: 'date', a: Ad, b: Bd }; + + const An = toNumberLike(a), + Bn = toNumberLike(b); + if (An !== null && Bn !== null) return { kind: 'number', a: An, b: Bn }; + + if (typeof a === 'string' && typeof b === 'string') return { kind: 'string', a, b }; + if (typeof a === 'boolean' && typeof b === 'boolean') return { kind: 'boolean', a, b }; + + return { kind: 'incomparable', a, b }; +} + +export function normKey(x: unknown): string { + // Use your normalizeComparedValues to coerce pairs; here we need a unary form. + // We can piggyback by normalizing x against itself: + const n = normalizeComparedValues(x, x); + switch (n.kind) { + case 'date': + case 'number': + case 'string': + case 'boolean': + return `${n.kind}:${String(n.a)}`; + default: + // fallback: use JSON-like string with type tag for determinism + return `other:${String(x)}`; + } +} + +export function compare(a: any, b: any): number { + if (a === b) return 0; + return a < b ? -1 : 1; +} + +export function arraysEqualAsSets(aList: unknown[], bList: unknown[]): boolean { + // de-duplicate by normalized key + const aKeys = new Set(aList.map(normKey)); + const bKeys = new Set(bList.map(normKey)); + if (aKeys.size !== bKeys.size) return false; + for (const k of aKeys) if (!bKeys.has(k)) return false; + return true; +} + +export function normalizeString(s: string): string { + return s.normalize('NFKC').toLowerCase().trim(); +} + +export function normalizeStringAccentInsensitive(s: string): string { + return s + .normalize('NFKD') + .replace(/[\u0300-\u036f]/g, '') + .toLowerCase() + .trim(); +} + +export function tokenize(s: string): string[] { + // split on whitespace; keep simple & deterministic + return normalizeString(s).split(/\s+/).filter(Boolean); +} + +// dot-path accessor +export function resolveDotPathValue(obj: any, path: string): unknown[] { + return path + .split('.') + .reduce((reduced, key) => (!reduced ? undefined : reduced[key]), obj); +} + +export function isIterableButNotString(v: unknown): v is Iterable { + return ( + v != null && + typeof v !== 'string' && + typeof (v as any)[Symbol.iterator] === 'function' + ); +} + +export function toIterableArray(v: unknown): unknown[] { + if (Array.isArray(v)) return v; + if (isIterableButNotString(v)) return Array.from(v as Iterable); + return [v]; // scalar as a single-element list +} diff --git a/src/pagination/utility.queryChannel.ts b/src/pagination/utility.queryChannel.ts new file mode 100644 index 0000000000..2a2fedd9b0 --- /dev/null +++ b/src/pagination/utility.queryChannel.ts @@ -0,0 +1,77 @@ +import type { ChannelQueryOptions, QueryChannelAPIResponse } from '../types'; +import type { StreamChat } from '../client'; +import type { Channel } from '../channel'; +import { generateChannelTempCid } from '../utils'; + +/** + * prevent from duplicate invocation of channel.watch() + * when events 'notification.message_new' and 'notification.added_to_channel' arrive at the same time + */ +const WATCH_QUERY_IN_PROGRESS_FOR_CHANNEL: Record< + string, + Promise | undefined +> = {}; + +type GetChannelParams = { + client: StreamChat; + channel?: Channel; + id?: string; + members?: string[]; + options?: ChannelQueryOptions; + type?: string; +}; +/** + * Watches a channel, coalescing concurrent invocations for the same CID. + * If a watch is already in flight, this call waits for it to settle instead of + * issuing another network request. + * @param client + * @param members + * @param options + * @param type + * @param id + * @param channel + */ +export const getChannel = async ({ + channel, + client, + id, + members, + options, + type, +}: GetChannelParams) => { + if (!channel && !type) { + throw new Error('Channel or channel type have to be provided to query a channel.'); + } + + // unfortunately typescript is not able to infer that if (!channel && !type) === false, then channel or type has to be truthy + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const theChannel = channel || client.channel(type!, id, { members }); + + // need to keep as with call to channel.watch the id can be changed from undefined to an actual ID generated server-side + const originalCid = theChannel?.id + ? theChannel.cid + : members && members.length + ? generateChannelTempCid(theChannel.type, members) + : undefined; + + if (!originalCid) { + throw new Error( + 'Channel ID or channel members array have to be provided to query a channel.', + ); + } + + const queryPromise = WATCH_QUERY_IN_PROGRESS_FOR_CHANNEL[originalCid]; + + if (queryPromise) { + await queryPromise; + } else { + try { + WATCH_QUERY_IN_PROGRESS_FOR_CHANNEL[originalCid] = theChannel.watch(options); + await WATCH_QUERY_IN_PROGRESS_FOR_CHANNEL[originalCid]; + } finally { + delete WATCH_QUERY_IN_PROGRESS_FOR_CHANNEL[originalCid]; + } + } + + return theChannel; +}; diff --git a/src/pagination/utility.search.ts b/src/pagination/utility.search.ts new file mode 100644 index 0000000000..4d1cec4195 --- /dev/null +++ b/src/pagination/utility.search.ts @@ -0,0 +1,56 @@ +export function locateOnPlateauAlternating( + items: readonly T[], + needle: T, + compare: (left: T, right: T) => number, + getItemId: (x: T) => string, + insertionIndex: number, +): number { + const targetId = getItemId(needle); + let leftIndex = insertionIndex - 1; + let rightIndex = insertionIndex; + + for (let step = 0; ; step++) { + const searchRight = step % 2 === 0; + + if (searchRight) { + if (rightIndex < items.length && compare(items[rightIndex], needle) === 0) { + if (getItemId(items[rightIndex]) === targetId) return rightIndex; + rightIndex++; + continue; + } + } else { + if (leftIndex >= 0 && compare(items[leftIndex], needle) === 0) { + if (getItemId(items[leftIndex]) === targetId) return leftIndex; + leftIndex--; + continue; + } + } + + const rightOut = + rightIndex >= items.length || compare(items[rightIndex], needle) !== 0; + const leftOut = leftIndex < 0 || compare(items[leftIndex], needle) !== 0; + if (rightOut && leftOut) break; // plateau exhausted + } + + return -1; +} + +export function locateOnPlateauScanOneSide( + items: readonly T[], + needle: T, + compare: (left: T, right: T) => number, + getItemId: (x: T) => string, + insertionIndex: number, +): number { + const targetId = getItemId(needle); + + // scan left + for (let i = insertionIndex - 1; i >= 0 && compare(items[i], needle) === 0; i--) { + if (getItemId(items[i]) === targetId) return i; + } + // scan right + for (let i = insertionIndex; i < items.length && compare(items[i], needle) === 0; i++) { + if (getItemId(items[i]) === targetId) return i; + } + return -1; +} diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts new file mode 100644 index 0000000000..e27ada86ec --- /dev/null +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -0,0 +1,580 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { getClientWithUser } from './test-utils/getClient'; +import { + Channel, + ChannelPaginator, + ChannelResponse, + EventTypes, + type StreamChat, +} from '../../src'; +import { ChannelPaginatorsOrchestrator } from '../../src/ChannelPaginatorsOrchestrator'; +vi.mock('../../src/pagination/utility.queryChannel', async () => { + return { + getChannel: vi.fn(async ({ client, id, type }) => { + return client.channel(type, id); + }), + }; +}); +import { getChannel as mockGetChannel } from '../../src/pagination/utility.queryChannel'; + +describe('ChannelPaginatorsOrchestrator', () => { + let client: StreamChat; + + beforeEach(() => { + client = getClientWithUser(); + vi.clearAllMocks(); + }); + + describe('constructor', () => { + it('initiates with default options', () => { + // @ts-expect-error accessing protected property + const defaultHandlers = ChannelPaginatorsOrchestrator.defaultEventHandlers; + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + expect(orchestrator.paginators).toHaveLength(0); + + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.size).toBe(Object.keys(defaultHandlers).length); + }); + + it('initiates with custom options', () => { + const paginator = new ChannelPaginator({ client }); + const customChannelVisibleHandler = vi.fn(); + const customChannelDeletedHandler = vi.fn(); + const customEventHandler = vi.fn(); + + // @ts-expect-error accessing protected property + const defaultHandlers = ChannelPaginatorsOrchestrator.defaultEventHandlers; + const eventHandlers = ChannelPaginatorsOrchestrator.getDefaultHandlers(); + + eventHandlers['channel.visible'] = [ + ...(eventHandlers['channel.visible'] ?? []), + { + id: 'channel.visible:custom', + handle: customChannelVisibleHandler, + }, + ]; + + eventHandlers['channel.deleted'] = [ + { + id: 'channel.deleted:custom', + handle: customChannelDeletedHandler, + }, + ]; + + eventHandlers['custom.event'] = [ + { + id: 'custom.event', + handle: customEventHandler, + }, + ]; + + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + eventHandlers, + paginators: [paginator], + }); + expect(orchestrator.paginators).toHaveLength(1); + expect(orchestrator.getPaginatorById(paginator.id)).toStrictEqual(paginator); + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.size).toBe(Object.keys(defaultHandlers).length + 1); + + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('channel.visible').size).toBe(2); + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('channel.visible').handlers[0].id).toBe( + eventHandlers['channel.visible'][0].id, + ); + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('channel.visible').handlers[1].id).toBe( + eventHandlers['channel.visible'][1].id, + ); + + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('channel.deleted').size).toBe(1); + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('channel.deleted').handlers[0].id).toBe( + eventHandlers['channel.deleted'][0].id, + ); + + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('custom.event').size).toBe(1); + // @ts-expect-error accessing protected property + expect(orchestrator.pipelines.get('custom.event').handlers[0].id).toBe( + eventHandlers['custom.event'][0].id, + ); + }); + }); + + describe('registerSubscriptions', () => { + it('subscribes only once', async () => { + const onSpy = vi.spyOn(client, 'on'); + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + orchestrator.registerSubscriptions(); + orchestrator.registerSubscriptions(); + expect(onSpy).toHaveBeenCalledTimes(1); + }); + + it('routes events to correct pipelines', async () => { + const customChannelDeletedHandler = vi.fn(); + const customEventHandler = vi.fn(); + + const eventHandlers = ChannelPaginatorsOrchestrator.getDefaultHandlers(); + + eventHandlers['channel.deleted'] = [ + { + id: 'channel.deleted:custom', + handle: customChannelDeletedHandler, + }, + ]; + + eventHandlers['custom.event'] = [ + { + id: 'custom.event', + handle: customEventHandler, + }, + ]; + + const orchestrator = new ChannelPaginatorsOrchestrator({ client, eventHandlers }); + orchestrator.registerSubscriptions(); + + const channelDeletedEvent = { type: 'channel.deleted', cid: 'x' } as const; + + client.dispatchEvent(channelDeletedEvent); + + await vi.waitFor(() => { + expect(customChannelDeletedHandler).toHaveBeenCalledTimes(1); + expect(customChannelDeletedHandler).toHaveBeenCalledWith( + expect.objectContaining({ + ctx: { orchestrator }, + event: channelDeletedEvent, + }), + ); + }); + + const customEvent = { type: 'custom.event' as EventTypes, x: 'abc' } as const; + + client.dispatchEvent(customEvent); + + await vi.waitFor(() => { + expect(customEventHandler).toHaveBeenCalledTimes(1); + expect(customEventHandler).toHaveBeenCalledWith( + expect.objectContaining({ + ctx: { orchestrator }, + event: customEvent, + }), + ); + }); + }); + }); + + describe('insertPaginator', () => { + it('appends when no index is provided', () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + + orchestrator.insertPaginator({ paginator: p1 }); + orchestrator.insertPaginator({ paginator: p2 }); + + expect(orchestrator.paginators.map((p) => p.id)).toEqual([p1.id, p2.id]); + }); + + it('inserts at specific index', () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + const p3 = new ChannelPaginator({ client }); + + orchestrator.insertPaginator({ paginator: p1 }); + orchestrator.insertPaginator({ paginator: p3 }); + orchestrator.insertPaginator({ paginator: p2, index: 1 }); + + expect(orchestrator.paginators.map((p) => p.id)).toEqual([p1.id, p2.id, p3.id]); + }); + + it('moves existing paginator to new index', () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + const p3 = new ChannelPaginator({ client }); + + orchestrator.insertPaginator({ paginator: p1 }); + orchestrator.insertPaginator({ paginator: p2 }); + orchestrator.insertPaginator({ paginator: p3 }); + + // move p1 from 0 to 2 + orchestrator.insertPaginator({ paginator: p1, index: 2 }); + expect(orchestrator.paginators.map((p) => p.id)).toEqual([p2.id, p3.id, p1.id]); + }); + + it('clamps out-of-bounds index', () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + + orchestrator.insertPaginator({ paginator: p1, index: -10 }); // -> 0 + orchestrator.insertPaginator({ paginator: p2, index: 999 }); // -> end + + expect(orchestrator.paginators.map((p) => p.id)).toEqual([p1.id, p2.id]); + }); + }); + + describe('addEventHandler', () => { + it('registers a custom handler and can unsubscribe it', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const channelUpdatedHandler = vi.fn(); + const unsubscribe = orchestrator.addEventHandler({ + eventType: 'channel.updated', + id: 'custom', + handle: channelUpdatedHandler, + }); + + orchestrator.registerSubscriptions(); + const channelUpdatedEvent = { type: 'channel.updated', cid: 'x' } as const; + + client.dispatchEvent(channelUpdatedEvent); + // event listeners are executed async + await vi.waitFor(() => { + expect(channelUpdatedHandler).toHaveBeenCalledWith({ + ctx: { orchestrator }, + event: channelUpdatedEvent, + }); + }); + + // Unsubscribe the custom handler and ensure it no longer fires + unsubscribe(); + client.dispatchEvent(channelUpdatedEvent); + + // still 1 call total (did not increment) + expect(channelUpdatedHandler).toHaveBeenCalledTimes(1); + }); + }); + + describe('ensurePipeline', () => { + it('returns the same pipeline instance for the same event type', () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p1 = orchestrator.ensurePipeline('channel.updated'); + const p2 = orchestrator.ensurePipeline('channel.updated'); + expect(p1).toBe(p2); + }); + }); + + // Helper to create a minimal channel with needed state + function makeChannel(cid: string) { + const [type, id] = cid.split(':'); + return client.channel(type, id); + } + + describe('channel.deleted', () => { + it('removes the channel from all paginators', async () => { + const cid = 'messaging:1'; + const ch = makeChannel(cid); + + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + const r1 = vi.spyOn(p1, 'removeItem'); + const r2 = vi.spyOn(p2, 'removeItem'); + + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2], + }); + client.activeChannels[cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'channel.deleted', cid } as const); + + await vi.waitFor(() => { + // client.activeChannels does not contain the deleted channel, therefore the search is performed with id + expect(r1).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); + expect(r2).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); + }); + }); + + it('is a no-op when cid is missing', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: 'channel.deleted' } as const); // no cid + await vi.waitFor(() => { + expect(r).not.toHaveBeenCalled(); + }); + }); + + it('tries to remove non-existent channel from all paginators', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: 'channel.deleted', cid: 'messaging:404' }); // no such channel + await vi.waitFor(() => { + expect(r).toHaveBeenCalledWith({ id: 'messaging:404', item: undefined }); + }); + }); + }); + + describe.each(['channel.hidden', 'notification.removed_from_channel'] as EventTypes[])( + '%s', + (eventType) => { + it('removes the channel from all paginators', async () => { + const cid = 'messaging:2'; + const ch = makeChannel(cid); + + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + const r1 = vi.spyOn(p1, 'removeItem'); + const r2 = vi.spyOn(p2, 'removeItem'); + + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2], + }); + client.activeChannels[cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: eventType, cid } as const); + + await vi.waitFor(() => { + // client.activeChannels contains the hidden channel, therefore the search is performed with item + expect(r1).toHaveBeenCalledWith({ id: ch.cid, item: ch }); + expect(r2).toHaveBeenCalledWith({ id: ch.cid, item: ch }); + }); + }); + + it('is a no-op when cid is missing', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: eventType } as const); // no cid + await vi.waitFor(() => { + expect(r).not.toHaveBeenCalled(); + }); + }); + + it('tries to remove non-existent channel from all paginators', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: eventType, cid: 'messaging:404' }); // no such channel + await vi.waitFor(() => { + expect(r).toHaveBeenCalledWith({ id: 'messaging:404', item: undefined }); + }); + }); + }, + ); + + describe.each(['channel.updated', 'channel.truncated'] as EventTypes[])( + '%s', + (eventType) => { + it('re-emits item lists for paginators that already contain the channel', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:3'); + client.activeChannels[ch.cid] = ch; + + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + p1.state.partialNext({ items: [ch] }); + vi.spyOn(p1, 'findItem').mockReturnValue(ch); + vi.spyOn(p2, 'findItem').mockReturnValue(undefined); + const partialNextSpy1 = vi.spyOn(p1.state, 'partialNext'); + const partialNextSpy2 = vi.spyOn(p2.state, 'partialNext'); + + orchestrator.insertPaginator({ paginator: p1 }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: eventType, cid: ch.cid }); + await vi.waitFor(() => { + expect(partialNextSpy2).toHaveBeenCalledTimes(0); + expect(partialNextSpy1).toHaveBeenCalledTimes(1); + const last = partialNextSpy1.mock.calls.at(-1)![0]; + expect(last.items!.length).toBe(1); + expect(last.items![0]).toStrictEqual(ch); + }); + }); + }, + ); + + describe.each([ + 'channel.visible', + 'member.updated', + 'message.new', + 'notification.added_to_channel', + 'notification.message_new', + ] as EventTypes[])('%s', (eventType) => { + it('ingests when matchesFilter, removes when not', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:5'); + client.activeChannels[ch.cid] = ch; + + const p = new ChannelPaginator({ client }); + const matchesFilterSpy = vi.spyOn(p, 'matchesFilter').mockReturnValue(true); + const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); + const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: eventType, cid: ch.cid }); + await vi.waitFor(() => { + expect(matchesFilterSpy).toHaveBeenCalledWith(ch); + expect(ingestItemSpy).toHaveBeenCalledWith(ch); + expect(removeItemSpy).not.toHaveBeenCalled(); + }); + + matchesFilterSpy.mockReturnValue(false); + client.dispatchEvent({ type: eventType, cid: 'messaging:5' }); + + await vi.waitFor(() => { + expect(removeItemSpy).toHaveBeenCalledWith({ item: ch }); + expect(ingestItemSpy).toHaveBeenCalledTimes(1); + }); + }); + + it('loads channel by (type,id) when not in activeChannels', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + + const p = new ChannelPaginator({ client }); + const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); + vi.spyOn(p, 'matchesFilter').mockReturnValue(true); + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ + type: eventType, + channel_type: 'messaging', + channel_id: '6', + }); + + await vi.waitFor(() => { + expect(mockGetChannel).toHaveBeenCalledWith({ + client, + id: '6', + type: 'messaging', + }); + const ch = makeChannel('messaging:6'); + expect(ingestItemSpy).toHaveBeenCalledWith(ch); + expect(removeItemSpy).not.toHaveBeenCalled(); + }); + }); + + it('uses event.channel if provided', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:7'); + client.activeChannels[ch.cid] = ch; + + const p = new ChannelPaginator({ client }); + + const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); + vi.spyOn(p, 'matchesFilter').mockReturnValue(true); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ + type: eventType, + channel: { cid: 'messaging:7' } as ChannelResponse, + }); + await vi.waitFor(() => { + expect(ingestItemSpy).toHaveBeenCalledWith(ch); + expect(removeItemSpy).not.toHaveBeenCalled(); + }); + }); + + it('removes channel if does not match the filter anymore', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:7'); + client.activeChannels[ch.cid] = ch; + + const p = new ChannelPaginator({ client }); + + const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); + vi.spyOn(p, 'matchesFilter').mockReturnValue(false); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ + type: eventType, + channel: { cid: 'messaging:7' } as ChannelResponse, + }); + await vi.waitFor(() => { + expect(ingestItemSpy).not.toHaveBeenCalled(); + expect(removeItemSpy).toHaveBeenCalledWith({ item: ch }); + }); + }); + }); + + describe('user.presence.changed', () => { + it('updates user on channels where the user is a member and re-emits lists', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + + const ch1 = makeChannel('messaging:13'); + ch1.state.members = { + u1: { user: { id: 'u1', name: 'Old' } }, + u3: { user: { id: 'u3', name: 'Old3' } }, + }; + ch1.state.membership = { user: { id: 'u1', name: 'Old' } }; + + const ch2 = makeChannel('messaging:14'); + ch2.state.members = { + u1: { user: { id: 'u1', name: 'Old' } }, + u2: { user: { id: 'u2', name: 'Old2' } }, + u3: { user: { id: 'u3', name: 'Old3' } }, + }; + ch2.state.membership = { user: { id: 'u1', name: 'Old' } }; + + client.activeChannels[ch1.cid] = ch1; + client.activeChannels[ch2.cid] = ch2; + + const p = new ChannelPaginator({ client }); + p.state.partialNext({ items: [ch1, ch2] }); + const partialNextSpy = vi.spyOn(p.state, 'partialNext'); + + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); + + // user u1 presence changed + client.dispatchEvent({ + type: 'user.presence.changed', + user: { id: 'u1', name: 'NewName' }, + }); + + await vi.waitFor(() => { + expect(ch1.state.members['u1'].user?.name).toBe('NewName'); + expect(ch1.state.members['u3'].user?.name).toBe('Old3'); + + expect(ch2.state.members['u1'].user?.name).toBe('NewName'); + expect(ch2.state.members['u2'].user?.name).toBe('Old2'); + expect(ch2.state.members['u3'].user?.name).toBe('Old3'); + + expect(ch1.state.membership.user?.name).toBe('NewName'); + expect(ch2.state.membership.user?.name).toBe('NewName'); + expect(partialNextSpy).toHaveBeenCalledTimes(1); + expect(partialNextSpy).toHaveBeenCalledWith({ items: [ch1, ch2] }); + }); + + // Now user without id → ignored + partialNextSpy.mockClear(); + client.dispatchEvent({ type: 'user.presence.changed', user: {} as any }); + expect(partialNextSpy).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/test/unit/EventHandlerPipeline.test.ts b/test/unit/EventHandlerPipeline.test.ts new file mode 100644 index 0000000000..67a0ce934e --- /dev/null +++ b/test/unit/EventHandlerPipeline.test.ts @@ -0,0 +1,525 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { + EventHandlerPipeline, + type LabeledEventHandler, +} from '../../src/EventHandlerPipeline'; + +type TestEvent = { type: string; payload?: any }; +type TestCtx = { tag: string }; + +const makeEvt = (type: string): TestEvent => ({ type }); +const ctx: TestCtx = { tag: 'ctx' }; + +describe('EventHandlerPipeline', () => { + let pipeline: EventHandlerPipeline; + let consoleErrorSpy: ReturnType; + + beforeEach(() => { + pipeline = new EventHandlerPipeline({ id: 'test-pipe' }); + consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); + }); + + afterEach(() => { + consoleErrorSpy.mockRestore(); + }); + + describe('constructor & size', () => { + it('initializes with id and zero handlers', () => { + expect(pipeline.id).toBe('test-pipe'); + expect(pipeline.size).toBe(0); + }); + }); + + describe('insert', () => { + it('appends by default when no index', async () => { + const calls: string[] = []; + const h1 = { + id: 'h1', + handle: () => { + calls.push('h1'); + }, + }; + const h2 = { + id: 'h2', + handle: () => { + calls.push('h2'); + }, + }; + + pipeline.insert(h1); + pipeline.insert(h2); + + expect(pipeline.size).toBe(2); + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('x'), ctx).then(() => { + expect(calls).toEqual(['h1', 'h2']); + }); + }); + + it('inserts at clamped index (negative -> 0, too large -> append)', () => { + const order: string[] = []; + const a = { + id: 'a', + handle: () => { + order.push('a'); + }, + }; + const b = { + id: 'b', + handle: () => { + order.push('b'); + }, + }; + const c = { + id: 'c', + handle: () => { + order.push('c'); + }, + }; + const d = { + id: 'd', + handle: () => { + order.push('d'); + }, + }; + + pipeline.insert(a); // [a] + pipeline.insert(b); // [a,b] + pipeline.insert({ ...c, index: -10 }); // clamp to 0 => [c,a,b] + pipeline.insert({ ...d, index: 999 }); // append => [c,a,b,d] + + expect(pipeline.size).toBe(4); + // @ts-expect-error passing custom event type + return pipeline.run(makeEvt('e'), ctx).then(() => { + expect(order).toEqual(['c', 'a', 'b', 'd']); + }); + }); + + it('replace=false inserts and unsubscribe removes only target handler', async () => { + const calls: string[] = []; + const a = { + id: 'a', + handle: () => { + calls.push('a'); + }, + }; + const b = { + id: 'b', + handle: () => { + calls.push('b'); + }, + }; + + const unsubA = pipeline.insert({ ...a, index: 0, replace: false }); + const unsubB = pipeline.insert({ ...b, index: 0, replace: false }); + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('x'), ctx); + expect(calls).toEqual(['b', 'a']); + + unsubB(); // remove only b + expect(pipeline.size).toBe(1); + + // reset the array contents + calls.length = 0; + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('y'), ctx); + expect(calls).toEqual(['a']); + + unsubA(); + expect(pipeline.size).toBe(0); + }); + + it('replace=true replaces existing handler and revertOnUnsubscribe restores it', async () => { + const calls: string[] = []; + const orig = { + id: 'orig', + handle: () => { + calls.push('orig'); + }, + }; + const repl = { + id: 'repl', + handle: () => { + calls.push('repl'); + }, + }; + + // seed + pipeline.insert({ ...orig, index: 0 }); + // replace at 0 with repl + const unsub = pipeline.insert({ + ...repl, + index: 0, + replace: true, + revertOnUnsubscribe: true, + }); + + // handlers: [repl] + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('1'), ctx); + expect(calls).toEqual(['repl']); + + // unsubscribe => remove repl and restore orig at index 0 + unsub(); + calls.length = 0; + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('2'), ctx); + expect(calls).toEqual(['orig']); + }); + + it('replace=true at index >= length behaves like insert (does not revert)', async () => { + const calls: string[] = []; + const a = { + id: 'a', + handle: () => { + calls.push('a'); + }, + }; + const repl = { + id: 'repl', + handle: () => { + calls.push('repl'); + }, + }; + + pipeline.insert(a); // [a] + const unsub = pipeline.insert({ + ...repl, + index: 5, + replace: true, + revertOnUnsubscribe: true, + }); //[a,repl] + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('x'), ctx); + expect(calls).toEqual(['a', 'repl']); // reverse exec + + unsub(); // should only remove repl; no original to restore + calls.length = 0; + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('y'), ctx); + expect(calls).toEqual(['a']); + }); + }); + + describe('remove', () => { + it('removes by handler object identity', async () => { + const out: string[] = []; + const h1: LabeledEventHandler = { + id: 'h1', + handle: () => { + out.push('h1'); + }, + }; + const h2: LabeledEventHandler = { + id: 'h2', + handle: () => { + out.push('h2'); + }, + }; + + pipeline.insert(h1); + pipeline.insert(h2); + pipeline.remove(h2); // remove by object + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('evt'), ctx); + expect(out).toEqual(['h1']); // reverse exec; only h1 left + }); + + it('removes by function reference', async () => { + const out: string[] = []; + const fn = () => { + out.push('fn'); + }; + const h1: LabeledEventHandler = { id: 'h1', handle: fn }; + pipeline.insert(h1); + pipeline.remove(fn); // remove by function ref + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('evt'), ctx); + expect(out).toEqual([]); // removed + }); + + it('no-op remove for unknown handler', async () => { + const out: string[] = []; + const fn = () => { + out.push('a'); + }; + pipeline.remove(fn); // nothing inserted yet + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('evt'), ctx); // no errors + expect(out).toEqual([]); + expect(pipeline.size).toBe(0); + }); + }); + + describe('replaceAll & clear', () => { + it('replaceAll swaps the entire handler list', async () => { + const out: string[] = []; + const a = { + id: 'a', + handle: () => { + out.push('a'); + }, + }; + const b = { + id: 'b', + handle: () => { + out.push('b'); + }, + }; + const c = { + id: 'c', + handle: () => { + out.push('c'); + }, + }; + + pipeline.insert(a); + pipeline.insert(b); + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e'), ctx); + expect(out).toEqual(['a', 'b']); + out.length = 0; + + pipeline.replaceAll([c]); + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e2'), ctx); + expect(out).toEqual(['c']); + expect(pipeline.size).toBe(1); + }); + + it('clear removes all handlers', async () => { + const out: string[] = []; + pipeline.insert({ + id: 'a', + handle: () => { + out.push('a'); + }, + }); + pipeline.insert({ + id: 'b', + handle: () => { + out.push('b'); + }, + }); + expect(pipeline.size).toBe(2); + + pipeline.clear(); + expect(pipeline.size).toBe(0); + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e'), ctx); + expect(out).toEqual([]); // nothing ran + }); + }); + + describe('run / drain / execution order', () => { + it('serializes events: second run waits for the first to finish', async () => { + const seen: string[] = []; + let hAsyncHandlerRunCount = 0; + let resolveRun1!: () => void; + const hAsync = { + id: 'async', + handle: () => + new Promise((res) => { + if (hAsyncHandlerRunCount === 0) { + resolveRun1 = () => { + seen.push('A-done'); + res(); + }; + ++hAsyncHandlerRunCount; + } else { + setTimeout(() => { + seen.push('A-done'); + res(); + }, 0); + } + seen.push('A-start'); + }), + }; + + const hSync = { + id: 'sync', + handle: () => { + seen.push('B-run'); + }, + }; + + pipeline.insert(hAsync); + pipeline.insert(hSync); + + // @ts-expect-error passing custom event type + const eventRun1 = pipeline.run(makeEvt('ev1'), ctx); + // @ts-expect-error passing custom event type + const eventRun2 = pipeline.run(makeEvt('ev2'), ctx); + + // At this point, first run has started (A-start), + // but the hSync is not run until we resolveRun1 and then eventRun1 can be resolved + await Promise.resolve(); // tick microtasks + expect(seen).toEqual(['A-start']); + + resolveRun1(); + await eventRun1; + expect(seen).toEqual(['A-start', 'A-done', 'B-run']); + + // Now second event runs + await eventRun2; + + // total should be 6 entries + expect(seen).toEqual(['A-start', 'A-done', 'B-run', 'A-start', 'A-done', 'B-run']); + }); + + it('drain waits for the last queued event to finish', async () => { + const marks: string[] = []; + let handlerRunCount = 0; + let resolveLater!: () => void; + + pipeline.insert({ + id: 'hold', + handle: () => + new Promise((res) => { + if (handlerRunCount === 0) { + resolveLater = () => { + marks.push('released'); + res(); + }; + ++handlerRunCount; + } else { + setTimeout(() => { + marks.push('released'); + res(); + }, 0); + } + marks.push('held'); + }), + }); + + // @ts-expect-error passing custom event type + pipeline.run(makeEvt('e1'), ctx); + // @ts-expect-error passing custom event type + pipeline.run(makeEvt('e2'), ctx); + const drained = pipeline.drain(); + + await Promise.resolve(); + expect(marks).toEqual(['held']); // first event started + + resolveLater(); // finish first; second starts then finishes too + expect(marks).toEqual(['held', 'released']); // first event started + await drained; + expect(marks).toEqual(['held', 'released', 'held', 'released']); + }); + + it('stop action halts remaining handlers for that event only', async () => { + const order: string[] = []; + pipeline.insert({ + id: 'a', + handle: () => { + order.push('a'); + }, + }); + pipeline.insert({ + id: 'stopper', + handle: () => { + order.push('stopper'); + return { action: 'stop' }; + }, + }); + pipeline.insert({ + id: 'c', + handle: () => { + order.push('c'); + }, + }); + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e'), ctx); + expect(order).toEqual(['a', 'stopper']); + }); + + it('handler exceptions are logged but do not break processing', async () => { + const order: string[] = []; + const before = { + id: 'before', + handle: () => { + order.push('before'); + }, + }; + + const boom = { + id: 'boom', + handle: () => { + order.push('boom'); + throw new Error('fail'); + }, + }; + + const after = { + id: 'after', + handle: () => { + order.push('after'); + }, + }; + + pipeline.insert(before); + pipeline.insert(boom); + pipeline.insert(after); + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e'), ctx); + // reverse exec: after -> boom -> before; boom throws but processing continues + expect(order).toEqual(['before', 'boom', 'after']); + expect(consoleErrorSpy).toHaveBeenCalled(); // logged + }); + + it('snapshot isolation: handlers added during a run do not affect the current event', async () => { + const order: string[] = []; + + const late = { + id: 'late', + handle: () => { + order.push('late'); + }, + }; + const head = { + id: 'head', + handle: () => { + order.push('head'); + }, + }; + const inserter = { + id: 'inserter', + handle: () => { + order.push('inserter'); + // insert a new handler while processing this event + pipeline.insert(late); + }, + }; + const tail = { + id: 'tail', + handle: () => { + order.push('tail'); + }, + }; + + pipeline.insert(head); + pipeline.insert(inserter); + pipeline.insert(tail); + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e1'), ctx); + // 'late' must NOT run for e1 + expect(order).toEqual(['head', 'inserter', 'tail']); + + order.length = 0; + + // @ts-expect-error passing custom event type + await pipeline.run(makeEvt('e2'), ctx); + // For the next event, late is present + expect(order).toEqual(['head', 'inserter', 'tail', 'late']); + }); + }); +}); diff --git a/test/unit/LiveLocationManager.test.ts b/test/unit/LiveLocationManager.test.ts index 1148106375..922c55fe77 100644 --- a/test/unit/LiveLocationManager.test.ts +++ b/test/unit/LiveLocationManager.test.ts @@ -74,7 +74,9 @@ describe('LiveLocationManager', () => { watchLocation, }); expect(manager.deviceId).toEqual(deviceId); + // @ts-expect-error accessing private property expect(manager.getDeviceId).toEqual(getDeviceId); + // @ts-expect-error accessing private property expect(manager.watchLocation).toEqual(watchLocation); expect(manager.state.getLatestValue()).toEqual({ messages: new Map(), diff --git a/test/unit/pagination/BasePaginator.test.ts b/test/unit/pagination/BasePaginator.test.ts index 1f988e22e2..30bd6bfd16 100644 --- a/test/unit/pagination/BasePaginator.test.ts +++ b/test/unit/pagination/BasePaginator.test.ts @@ -1,23 +1,34 @@ import { describe, expect, it, vi } from 'vitest'; import { + AscDesc, BasePaginator, DEFAULT_PAGINATION_OPTIONS, PaginationQueryParams, PaginationQueryReturnValue, type PaginatorOptions, -} from '../../../src/pagination'; + QueryFilters, +} from '../../../src'; import { sleep } from '../../../src/utils'; +import { makeComparator } from '../../../src/pagination/sortCompiler'; const toNextTick = async () => { const sleepPromise = sleep(0); vi.advanceTimersByTime(0); await sleepPromise; }; + type TestItem = { id: string; + name?: string; + teams?: string[]; + blocked?: boolean; + createdAt?: string; // date string + age?: number; }; class Paginator extends BasePaginator { + sort: QueryFilters | undefined; + sortComparator: (a: TestItem, b: TestItem) => number = vi.fn(); queryResolve: Function = vi.fn(); queryReject: Function = vi.fn(); queryPromise: Promise> | null = null; @@ -26,6 +37,7 @@ class Paginator extends BasePaginator { constructor(options: PaginatorOptions = {}) { super(options); } + query(params: PaginationQueryParams): Promise> { const promise = new Promise>( (queryResolve, queryReject) => { @@ -57,7 +69,10 @@ describe('BasePaginator', () => { cursor: undefined, offset: 0, }); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); }); + it('initiates with custom options', () => { const paginator = new Paginator({ pageSize: 1 }); expect(paginator.pageSize).not.toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); @@ -225,4 +240,454 @@ describe('BasePaginator', () => { expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); }); }); + describe('item management', () => { + const item: TestItem = { + id: 'id1', + name: 'test', + age: 100, + teams: ['abc', 'efg'], + }; + + const item2 = { + ...item, + id: 'id2', + name: 'test2', + age: 101, + }; + + const item3 = { + ...item, + id: 'id3', + name: 'test3', + age: 102, + }; + + describe('matchesFilter', () => { + it('returns true if no filter is provided', async () => { + const paginator = new Paginator(); + expect(paginator.matchesFilter(item)).toBeTruthy(); + }); + it('returns false if does not match the filter', async () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + name: { $eq: 'test1' }, + }); + expect(paginator.matchesFilter(item)).toBeFalsy(); + }); + it('returns true if item matches the filter', async () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + $or: [{ name: { $eq: 'test1' } }, { teams: { $contains: 'abc' } }], + }); + expect(paginator.matchesFilter(item)).toBeTruthy(); + }); + }); + + describe('ingestItem', () => { + it('exists but does not match the filter anymore removes the item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item2, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams + }); + + const adjustedItem = { + ...item, + teams: ['efg'], // removed from the team abc + }; + + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item removed + expect(paginator.items).toHaveLength(2); + }); + + it('exists and matches the filter updates the item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item, item2, item3], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + paginator.sort = { age: 1 }; + + const adjustedItem = { + ...item, + age: 103, + }; + + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated + expect(paginator.items).toHaveLength(3); + expect(paginator.items![0]).toStrictEqual(item2); + expect(paginator.items![1]).toStrictEqual(item3); + expect(paginator.items![2]).toStrictEqual(adjustedItem); + }); + + it('does not exist and does not match the filter results in no action', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem = { + ...item, + id: 'id2', + name: 'test2', + }; + + expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action + expect(paginator.items).toHaveLength(1); + expect(paginator.items![0]).toStrictEqual(item); + }); + + it('does not exist and matches the filter inserts according to default sort order (append)', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toHaveLength(3); + expect(paginator.items![0]).toStrictEqual(item3); + expect(paginator.items![1]).toStrictEqual(item); + expect(paginator.items![2]).toStrictEqual(item2); + }); + + it('does not exist and matches the filter inserts according to sort order', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toHaveLength(3); + expect(paginator.items![0]).toStrictEqual(item3); + expect(paginator.items![1]).toStrictEqual(item2); + expect(paginator.items![2]).toStrictEqual(item); + }); + }); + + describe('removeItem', () => { + it('removes existing item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item2, item], + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + expect(paginator.removeItem({ item: item3 })).toBeTruthy(); + expect(paginator.items).toHaveLength(2); + expect(paginator.items![0]).toStrictEqual(item2); + expect(paginator.items![1]).toStrictEqual(item); + }); + + it('results in no action for non-existent item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item2, item], + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + expect(paginator.removeItem({ item: item3 })).toBeFalsy(); + expect(paginator.items).toHaveLength(2); + expect(paginator.items![0]).toStrictEqual(item2); + expect(paginator.items![1]).toStrictEqual(item); + }); + }); + + describe('contains', () => { + it('returns true if the item exists', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item2, item], + }); + expect(paginator.contains(item3)).toBeTruthy(); + }); + + it('returns false if the items does not exist', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item2, item], + }); + expect(paginator.contains(item3)).toBeFalsy(); + }); + }); + + describe('locateByItem', () => { + const a: TestItem = { id: 'a', age: 30, name: 'A' }; + const b: TestItem = { id: 'b', age: 25, name: 'B' }; + const c: TestItem = { id: 'c', age: 25, name: 'C' }; + const d: TestItem = { id: 'd', age: 20, name: 'D' }; + + const tieBreakerById = (l: TestItem, r: TestItem) => + l.id < r.id ? -1 : l.id > r.id ? 1 : 0; + + it('returns {index:-1, insertionIndex:0} for empty list', () => { + const paginator = new Paginator(); + const res = paginator.locateByItem(a); + expect(res).toEqual({ index: -1, insertionIndex: 0 }); + }); + + it('finds an existing item on a tie plateau (no ID tiebreaker)', () => { + const paginator = new Paginator(); + // comparator: age desc only (ties produce a plateau) + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + // items are already sorted by age desc + paginator.state.partialNext({ items: [a, b, c, d] }); + + const res = paginator.locateByItem(c); + expect(res.index).toBe(2); // c is at index 2 in [a, b, c, d] + // insertionIndex for identical key (age 25) is after the plateau + expect(res.insertionIndex).toBe(3); + }); + + it('returns insertion index when not found on a tie plateau (no ID tiebreaker)', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + // same sort keys as b/c but different id; not present + const x: TestItem = { id: 'x', age: 25, name: 'X' }; + const res = paginator.locateByItem(x); + // insertion point should be after the 25-plateau (after c at index 2) + expect(res.index).toBe(-1); + expect(res.insertionIndex).toBe(3); + }); + + it('finds exact index with ID tiebreaker in comparator (pure O(log n))', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + // tie-breaker on id asc guarantees a total order + tiebreaker: tieBreakerById, + }); + + // With tiebreaker, the order within age==25 is by id asc: b (id 'b'), then c (id 'c') + paginator.state.partialNext({ items: [a, b, c, d] }); + + const res = paginator.locateByItem(c); + expect(res.index).toBe(2); + // In this setting the insertionIndex is deterministic but not strictly needed when found + expect(res.insertionIndex).toBeGreaterThanOrEqual(2); + }); + + it('computes insertion at the beginning when needle sorts before all items', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + const z: TestItem = { id: 'z', age: 40, name: 'Z' }; // highest age → goes to front + const res = paginator.locateByItem(z); + expect(res.index).toBe(-1); + expect(res.insertionIndex).toBe(0); + }); + + it('computes insertion at the end when needle sorts after all items', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + const z: TestItem = { id: 'z', age: 10, name: 'Z' }; // lowest age → goes to end + const res = paginator.locateByItem(z); + expect(res.index).toBe(-1); + expect(res.insertionIndex).toBe(4); + }); + + it('checks both immediate neighbors before plateau scan (fast path)', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + // needle equal to left neighbor of insertionIndex + const resLeftNeighbor = paginator.locateByItem(c); + expect(resLeftNeighbor.index).toBe(2); + + // needle equal to right neighbor (craft by duplicating c’s sort but different id not present) + const y: TestItem = { id: 'y', age: 25, name: 'Y' }; + const resRightNeighbor = paginator.locateByItem(y); + expect(resRightNeighbor.index).toBe(-1); + expect(resRightNeighbor.insertionIndex).toBe(3); + }); + }); + + describe('findItem', () => { + const a: TestItem = { id: 'a', age: 30 }; + const b: TestItem = { id: 'b', age: 25 }; + const c: TestItem = { id: 'c', age: 25 }; + const d: TestItem = { id: 'd', age: 20 }; + + it('returns the exact item instance when present', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + // Same identity object: + expect(paginator.findItem(c)).toBe(c); + + // Same identity by id but different object reference still matches by locateByItem: + const cClone = { ...c }; + expect(paginator.findItem(cClone)).toBe(c); + }); + + it('returns undefined when not present', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.state.partialNext({ items: [a, b, d] }); + + const needle: TestItem = { id: 'x', age: 25 }; + expect(paginator.findItem(needle)).toBeUndefined(); + }); + + it('works with an ID tie-breaker comparator as well', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: (l: TestItem, r: TestItem) => + l.id < r.id ? -1 : l.id > r.id ? 1 : 0, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + expect(paginator.findItem(c)).toBe(c); + const x: TestItem = { id: 'x', age: 25 }; + expect(paginator.findItem(x)).toBeUndefined(); + }); + + it('handles empty list', () => { + const paginator = new Paginator(); + expect(paginator.findItem({ id: 'z' })).toBeUndefined(); + }); + }); + + describe('filter resolvers', () => { + const resolvers1 = [{ matchesField: () => true, resolve: () => 'abc' }]; + const resolvers2 = [ + { matchesField: () => false, resolve: () => 'efg' }, + { matchesField: () => true, resolve: () => 'hij' }, + ]; + it('get overridden with setFilterResolvers', () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + + paginator.setFilterResolvers(resolvers1); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers1.length); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); + + paginator.setFilterResolvers(resolvers2); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers2.length); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers2); + + paginator.setFilterResolvers([]); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + }); + + it('get expanded with addFilterResolvers', () => { + const paginator = new Paginator(); + paginator.addFilterResolvers(resolvers1); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); + + paginator.addFilterResolvers(resolvers2); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual([ + ...resolvers1, + ...resolvers2, + ]); + + paginator.addFilterResolvers([]); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual([ + ...resolvers1, + ...resolvers2, + ]); + }); + }); + }); }); diff --git a/test/unit/pagination/ChannelPaginator.test.ts b/test/unit/pagination/ChannelPaginator.test.ts new file mode 100644 index 0000000000..b33b18a8bc --- /dev/null +++ b/test/unit/pagination/ChannelPaginator.test.ts @@ -0,0 +1,441 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { + Channel, + type ChannelFilters, + ChannelOptions, + ChannelPaginator, + ChannelSort, + DEFAULT_PAGINATION_OPTIONS, + type FilterBuilderGenerators, + type StreamChat, +} from '../../../src'; +import { getClientWithUser } from '../test-utils/getClient'; +import type { FieldToDataResolver } from '../../../src/pagination/types.normalization'; + +const user = { id: 'custom-id' }; + +describe('ChannelPaginator', () => { + let client: StreamChat; + let channel1: Channel; + let channel2: Channel; + + beforeEach(() => { + client = getClientWithUser(user); + + channel1 = new Channel(client, 'type', 'id1', {}); + channel1.state.last_message_at = new Date('1972-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1972-01-01T08:39:35.235Z'; + + channel2 = new Channel(client, 'type', 'id1', {}); + channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + }); + + it('initiates with defaults', () => { + const paginator = new ChannelPaginator({ client }); + expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); + expect(paginator.state.getLatestValue()).toEqual({ + hasNext: true, + hasPrev: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: undefined, + offset: 0, + }); + expect(paginator.id.startsWith('channel-paginator')).toBeTruthy(); + expect(paginator.sortComparator).toBeDefined(); + + channel1.state.last_message_at = new Date('1970-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; + + channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + + expect(paginator.sortComparator(channel1, channel2)).toBe(1); // channel2 comes before channel1 + expect(paginator.filterBuilder.buildFilters()).toStrictEqual({}); + expect( + paginator.filterBuilder.buildFilters({ baseFilters: paginator.filters }), + ).toStrictEqual({}); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + }); + + it('initiates with options', () => { + const customId = 'custom-id'; + const filterGenerators: FilterBuilderGenerators = { + custom: { + enabled: true, + generate: (context) => context, + }, + }; + const initialFilterBuilderContext = { x: 'y' }; + + channel1.data!.created_at = '1970-01-01T08:39:35.235Z'; + channel2.data!.created_at = '1971-01-01T08:39:35.235Z'; + + const paginator = new ChannelPaginator({ + client, + id: customId, + filterBuilderOptions: { + initialContext: initialFilterBuilderContext, + initialFilterConfig: filterGenerators, + }, + filters: { type: 'type' }, + paginatorOptions: { pageSize: 2 }, + requestOptions: { member_limit: 5 }, + sort: { created_at: 1 }, + }); + expect(paginator.pageSize).toBe(2); + expect(paginator.state.getLatestValue()).toEqual({ + hasNext: true, + hasPrev: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: undefined, + offset: 0, + }); + expect(paginator.id.startsWith(customId)).toBeTruthy(); + + expect(paginator.sortComparator(channel1, channel2)).toBe(-1); // channel1 comes before channel2 + expect(paginator.filterBuilder.buildFilters()).toStrictEqual({ + ...initialFilterBuilderContext, + }); + expect( + paginator.filterBuilder.buildFilters({ baseFilters: paginator.filters }), + ).toStrictEqual({ + type: 'type', + ...initialFilterBuilderContext, + }); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + }); + + describe('sortComparator', () => { + const changeOrder = 1; + const keepOrder = -1; + it('should sort be default sort', () => { + const paginator = new ChannelPaginator({ client }); + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + + channel1.state.last_message_at = new Date('1970-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; + + channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + }); + + it('should sort by non-existent attribute', () => { + const paginator = new ChannelPaginator({ client, sort: { created_at: 1 } }); + expect(paginator.sortComparator(channel1, channel2)).toBe(0); + }); + + it('should sort by attribute with the same values', () => { + const paginator = new ChannelPaginator({ client, sort: { created_at: 1 } }); + channel1.data!.created_at = '1971-01-01T08:39:35.235Z'; + channel2.data!.created_at = '1971-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(0); + }); + + it('should sort by created_at', () => { + const paginator = new ChannelPaginator({ client, sort: { created_at: 1 } }); + channel1.data!.created_at = '1972-01-01T08:39:35.235Z'; + channel2.data!.created_at = '1971-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + }); + it('should sort by has_unread', () => { + const paginator = new ChannelPaginator({ client, sort: { has_unread: 1 } }); + channel1.state.read[user.id] = { + last_read: new Date('1972-01-01T08:39:35.235Z'), + unread_messages: 10, + user, + }; + channel2.state.read[user.id] = { + last_read: new Date('1972-01-01T08:39:35.235Z'), + unread_messages: 0, + user, + }; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + }); + it('should sort by last_message_at', () => { + const paginator = new ChannelPaginator({ client, sort: { last_message_at: 1 } }); + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + }); + it('should sort by last_updated', () => { + const paginator = new ChannelPaginator({ client, sort: { last_updated: 1 } }); + + // compares channel1.state.last_message_at with channel2.data!.updated_at + channel1.state.last_message_at = new Date('1975-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; + channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1973-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + + // compares channel2.state.last_message_at with channel1.data!.updated_at + channel1.state.last_message_at = new Date('1975-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1976-01-01T08:39:35.235Z'; + channel2.state.last_message_at = new Date('1978-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1973-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + }); + it('should sort by member_count', () => { + const paginator = new ChannelPaginator({ client, sort: { member_count: 1 } }); + channel1.data!.member_count = 2; + channel2.data!.member_count = 1; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + }); + it('should sort by pinned_at', () => { + const paginator = new ChannelPaginator({ client, sort: { pinned_at: 1 } }); + channel1.state.membership = { pinned_at: '1972-01-01T08:39:35.235Z' }; + channel2.state.membership = { pinned_at: '1971-01-01T08:39:35.235Z' }; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + + channel1.state.membership = { pinned_at: '1970-01-01T08:39:35.235Z' }; + channel2.state.membership = { pinned_at: '1971-01-01T08:39:35.235Z' }; + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + }); + it('should sort by unread_count', () => { + const paginator = new ChannelPaginator({ client, sort: { unread_count: 1 } }); + channel1.state.read[user.id] = { + last_read: new Date(), + unread_messages: 10, + user, + }; + channel2.state.read[user.id] = { + last_read: new Date(), + unread_messages: 0, + user, + }; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + + channel1.state.read[user.id] = { + last_read: new Date(), + unread_messages: 10, + user, + }; + channel2.state.read[user.id] = { + last_read: new Date(), + unread_messages: 11, + user, + }; + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + }); + it('should sort by updated_at', () => { + const paginator = new ChannelPaginator({ client, sort: { updated_at: 1 } }); + + channel1.data!.updated_at = '1972-01-01T08:39:35.235Z'; + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + + channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + }); + it('should sort by custom field', () => { + // @ts-expect-error using field not declared among CustomChannelData + const paginator = new ChannelPaginator({ client, sort: { customField: 1 } }); + + // @ts-expect-error using field not declared among CustomChannelData + channel1.data!.customField = 'B'; + // @ts-expect-error using field not declared among CustomChannelData + channel2.data!.customField = 'A'; + expect(paginator.sortComparator(channel1, channel2)).toBe(changeOrder); + + // @ts-expect-error using field not declared among CustomChannelData + channel1.data!.customField = 'A'; + // @ts-expect-error using field not declared among CustomChannelData + channel2.data!.customField = 'B'; + expect(paginator.sortComparator(channel1, channel2)).toBe(keepOrder); + }); + }); + + describe('filter resolvers', () => { + it('resolves "pinned" field', () => { + const paginator = new ChannelPaginator({ + client, + filters: { members: { $in: [user.id] }, pinned: true }, + }); + + channel1.state.members = { + [user.id]: { user }, + ['other-member']: { user: { id: 'other-member' } }, + }; + + channel1.state.membership = { + user, + pinned_at: '2025-09-03T12:19:39.101089Z', + }; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.state.membership = { + user, + pinned_at: undefined, + }; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('resolves "members" field', () => { + const paginator = new ChannelPaginator({ + client, + filters: { members: { $in: [user.id] } }, + }); + channel1.state.members = { + [user.id]: { user }, + ['other-member']: { user: { id: 'other-member' } }, + }; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.state.members = { + ['other-member']: { user: { id: 'other-member' } }, + }; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('resolves "member.user.name" field', () => { + const paginator = new ChannelPaginator({ + client, + filters: { 'member.user.name': { $autocomplete: '-' } }, + }); + channel1.state.members = { + [user.id]: { user: { ...user, name: 'name' } }, + ['other-member']: { user: { id: 'other-member', name: 'na-me' } }, + }; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.state.members = { + [user.id]: { user: { ...user, name: 'name' } }, + }; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('resolves ChannelResponse fields', () => { + const paginator = new ChannelPaginator({ client, filters: { blocked: true } }); + channel1.data!.blocked = true; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.data!.blocked = false; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('resolves custom fields stored in channel.data', () => { + const paginator = new ChannelPaginator({ + client, + // @ts-expect-error declaring custom property field in filter + filters: { x: { $contains: 'specific' } }, + }); + // @ts-expect-error using undeclared custom property + channel1.data!.x = ['a', 'b', 'specific']; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + // @ts-expect-error using undeclared custom property + channel1.data!.x = undefined; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('overrides filter resolvers', () => { + const resolver: FieldToDataResolver = { + matchesField: (field) => field === 'custom.nested', + resolve: (item, field) => { + // @ts-expect-error accessing undeclared custom property + return item.data!.custom?.nested; + }, + }; + + const paginator = new ChannelPaginator({ + client, + // @ts-expect-error using undeclared custom property + filters: { 'custom.nested': { $eq: 'x' } }, + }); + paginator.setFilterResolvers([resolver]); + + // @ts-expect-error using undeclared custom property + channel1.data!.custom = { nested: 'x' }; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + // @ts-expect-error using undeclared custom property + channel1.data!.custom = { nested: 'y' }; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + }); + + describe('setters', () => { + const stateAfterQuery = { + items: [channel1, channel2], + hasNext: false, + hasPrev: false, + offset: 10, + isLoading: false, + lastQueryError: undefined, + cursor: undefined, + }; + it('filters reset state', () => { + const paginator = new ChannelPaginator({ client }); + paginator.state.partialNext(stateAfterQuery); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + paginator.filters = {}; + expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + }); + it('sort reset state', () => { + const paginator = new ChannelPaginator({ client }); + paginator.state.partialNext(stateAfterQuery); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + paginator.sort = {}; + expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + }); + it('options reset state', () => { + const paginator = new ChannelPaginator({ client }); + paginator.state.partialNext(stateAfterQuery); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + paginator.options = {}; + expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + }); + }); + + describe('query', () => { + it('is called with correct parameters', async () => { + const queryChannelsSpy = vi.spyOn(client, 'queryChannels').mockResolvedValue([]); + const filters: ChannelFilters = { name: 'A' }; + const sort: ChannelSort = { has_unread: -1 }; + const requestOptions: ChannelOptions = { message_limit: 3 }; + const paginator = new ChannelPaginator({ + client, + filters, + sort, + requestOptions, + filterBuilderOptions: { + initialFilterConfig: { + custom: { + enabled: true, + generate: (context: { num?: number }) => ({ + muted: { $eq: !!context.num }, + }), + }, + }, + initialContext: { num: 5 }, + }, + paginatorOptions: { pageSize: 22 }, + }); + + await paginator.query(); + expect(queryChannelsSpy).toHaveBeenCalledWith( + { + muted: { + $eq: true, + }, + name: 'A', + }, + { + has_unread: -1, + }, + { + limit: 22, + message_limit: 3, + offset: 0, + }, + ); + }); + }); +}); diff --git a/test/unit/pagination/FilterBuilder.test.ts b/test/unit/pagination/FilterBuilder.test.ts index 7be4dfb3fe..2935b4bf15 100644 --- a/test/unit/pagination/FilterBuilder.test.ts +++ b/test/unit/pagination/FilterBuilder.test.ts @@ -4,7 +4,7 @@ import { FilterBuilderGenerators, ExtendedQueryFilter, ExtendedQueryFilters, -} from '../../../src/pagination/FilterBuilder'; +} from '../../../src'; type BasicFilterFieldsSchema = { name: ExtendedQueryFilter; diff --git a/test/unit/pagination/filterCompiler.test.ts b/test/unit/pagination/filterCompiler.test.ts new file mode 100644 index 0000000000..38f96b6e11 --- /dev/null +++ b/test/unit/pagination/filterCompiler.test.ts @@ -0,0 +1,368 @@ +import { describe, expect, it } from 'vitest'; +import { + ChannelData, + ChannelMemberResponse, + ChannelResponse, + ContainsOperator, + PrimitiveFilter, + QueryFilter, + QueryFilters, + RequireOnlyOne, +} from '../../../src'; +import { + itemMatchesFilter, + ItemMatchesFilterOptions, +} from '../../../src/pagination/filterCompiler'; +import { resolveDotPathValue } from '../../../src/pagination/utility.normalization'; + +type CustomChannelData = { + custom1?: string[]; + custom2?: string; + custom3?: number; + custom4?: boolean; + custom5?: string; + data?: { + members: ChannelMemberResponse[]; + }; + name?: string; +}; +type CustomChannelFilters = QueryFilters< + ContainsOperator> & { + archived?: boolean; + 'member.user.name'?: + | RequireOnlyOne<{ + $autocomplete?: string; + $eq?: string; + }> + | string; + + members?: + | RequireOnlyOne, '$in'>> + | RequireOnlyOne, '$eq'>> + | PrimitiveFilter; + name?: + | RequireOnlyOne< + { + $autocomplete?: string; + } & QueryFilter + > + | PrimitiveFilter; + pinned?: boolean; + } & { + [Key in keyof Omit]: + | RequireOnlyOne> + | PrimitiveFilter; + } +>; + +type TestChannel = ChannelData & CustomChannelData; + +const filter: CustomChannelFilters = { + $or: [ + { + $and: [ + { custom1: { $contains: 'a' } }, + { custom2: { $eq: '5' } }, + { custom3: { $lt: 10 } }, + { custom4: { $eq: true } }, + ], + }, + { + $and: [ + { custom1: { $contains: 'b' } }, + { custom2: { $eq: '15' } }, + { custom3: { $lt: 10 } }, + { custom4: { $eq: false } }, + ], + }, + { + $or: [ + { name: { $autocomplete: 'ith' } }, + { name: { $autocomplete: 'Sm' } }, + { 'member.user.name': { $autocomplete: 'ack' } }, + { blocked: true }, + { custom2: { $eq: '5' } }, + { custom2: { $lt: '2020-08-26T11:09:07.814Z' } }, + { custom2: { $gt: '2022-08-26T11:09:07.814Z' } }, + { custom3: { $gt: 10 } }, + { custom4: { $exists: true } }, + { custom1: { $contains: 'b' } }, + { custom5: { $in: ['Rob', 'Bob'] } }, + ], + }, + ], +}; + +const options: ItemMatchesFilterOptions = { + resolvers: [ + { + matchesField: () => true, + resolve: (item, path) => resolveDotPathValue(item, path), + }, + ], +}; + +describe('itemMatchesFilter', () => { + it('determines that data do not match the filter', () => { + const item: TestChannel = {}; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match a primitive filter', () => { + const item: TestChannel = { blocked: true }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match a primitive filter', () => { + const item: TestChannel = { blocked: undefined }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $eq filter', () => { + const item: TestChannel = { custom2: '5' }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $eq filter', () => { + const item: TestChannel = { custom2: '55' }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data match the $ne filter', () => { + const item: TestChannel = {}; + expect( + itemMatchesFilter(item, { name: { $ne: 'Channel Bob' } }, options), + ).toBeTruthy(); + }); + + it('determines that data do not match the $ne filter', () => { + const item: TestChannel = { name: 'Channel Bob' }; + expect( + itemMatchesFilter(item, { name: { $ne: 'Channel Bob' } }, options), + ).toBeFalsy(); + }); + + it('determines that data match the number comparison filter', () => { + const item: TestChannel = { custom3: 11 }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the number comparison filter', () => { + const item: TestChannel = { custom3: 10 }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the date comparison filter', () => { + const item: TestChannel = { custom2: '2020-08-26T11:09:07.714Z' }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the date comparison filter', () => { + const item: TestChannel = { custom2: '2021-08-26T11:09:07.714Z' }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $exists filter', () => { + // @ts-expect-error custom4 does not match the TestChannel definition + const item: TestChannel = { custom4: ['a', '5'] }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $exists filter', () => { + // @ts-expect-error custom3 does not match the TestChannel definition + const item: TestChannel = { custom3: ['a', 5] }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $autocomplete filter', () => { + const item: TestChannel = { name: 'Smith' }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $autocomplete filter', () => { + const item: TestChannel = { name: 'it' }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $contains filter', () => { + const item: TestChannel = { custom1: ['a', 'b', 'c'] }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $contains filter', () => { + const item: TestChannel = { custom1: ['a', 'bb', 'c'] }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $in filter', () => { + const item: TestChannel = { custom5: 'Rob' }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $in filter', () => { + const item: TestChannel = { custom5: 'Ro' }; + expect(itemMatchesFilter(item, filter, options)).toBeFalsy(); + }); + + it('determines that data match the $nin filter', () => { + const item: TestChannel = { custom5: 'Ro' }; + expect( + itemMatchesFilter( + item, + { custom5: { $nin: ['Rob', 'Bob'] } }, + options, + ), + ).toBeTruthy(); + }); + + it('determines that data do not match the $nin filter', () => { + const item: TestChannel = { custom5: 'Rob' }; + expect( + itemMatchesFilter( + item, + { custom5: { $nin: ['Rob', 'Bob'] } }, + options, + ), + ).toBeFalsy(); + }); + + it('determines that data match the $and filter', () => { + const item: TestChannel = { + custom1: ['x', 'b', 'y'], + custom2: '15', + custom3: 9, + custom4: false, + }; + expect(itemMatchesFilter(item, filter, options)).toBeTruthy(); + }); + + it('determines that data do not match the $and filter', () => { + const item: TestChannel = { + custom1: ['x', 'b', 'y'], + custom2: '15', + custom3: 10, + custom4: false, + }; + const andFilters = filter.$or!.slice(0, 2); + // @ts-ignore + expect( + itemMatchesFilter(item, { $or: andFilters }, options), + ).toBeFalsy(); + }); + + it('determines that data match the $nor filter', () => { + const item: TestChannel = { + custom1: ['x', 'y'], + // @ts-expect-error custom2 does not match the TestChannel definition + custom2: { a: 'b' }, + // @ts-expect-error custom3 does not match the TestChannel definition + custom3: true, + custom4: false, + }; + expect( + itemMatchesFilter(item, { $nor: filter.$or }, options), + ).toBeTruthy(); + }); + + it('determines that data do not match the $nor filter', () => { + // matches the 2nd $and + const item: TestChannel = { + custom1: ['x', 'b', 'y'], + custom2: '15', + custom3: 9, + custom4: false, + }; + expect( + itemMatchesFilter(item, { $nor: filter.$or }, options), + ).toBeFalsy(); + }); + + it('determines that data match filter by property dot path', () => { + const item: TestChannel = { + data: { + members: [ + { user: { id: '1', name: 'Jack' } }, + { user: { id: '2', name: 'Bob' } }, + { user: { id: '3', name: 'Mark' } }, + ], + }, + }; + + expect( + itemMatchesFilter( + item, + { 'member.user.name': { $autocomplete: 'rk' } }, + { + resolvers: [ + { + matchesField: (field) => field === 'member.user.name', + resolve: (item) => { + return item.data?.members.map(({ user }) => user?.name) ?? []; + }, + }, + ], + }, + ), + ).toBeTruthy(); + }); + + it('determines that data match filter by $eq: array', () => { + const item: TestChannel = { + data: { + members: [ + { user: { id: '123', name: 'Jack' } }, + { user: { id: '234', name: 'Bob' } }, + { user: { id: '345', name: 'Mark' } }, + ], + }, + }; + + // has to match all the ids + expect( + itemMatchesFilter( + item, + { members: { $eq: ['345', '123', '234'] } }, + { + resolvers: [ + { + matchesField: (field) => field === 'members', + resolve: (item) => { + return item.data?.members.map(({ user }) => user?.id) ?? []; + }, + }, + ], + }, + ), + ).toBeTruthy(); + }); + + it('determines that data do not match filter by $eq: array', () => { + const item: TestChannel = { + data: { + members: [ + { user: { id: '123', name: 'Jack' } }, + { user: { id: '234', name: 'Bob' } }, + { user: { id: '345', name: 'Mark' } }, + ], + }, + }; + + // one id is missing + expect( + itemMatchesFilter( + item, + { members: { $eq: ['123', '234'] } }, + { + resolvers: [ + { + matchesField: (field) => field === 'members', + resolve: (item) => { + return item.data?.members.map(({ user }) => user?.id) ?? []; + }, + }, + ], + }, + ), + ).toBeFalsy(); + }); +}); diff --git a/test/unit/pagination/sortCompiler.test.ts b/test/unit/pagination/sortCompiler.test.ts new file mode 100644 index 0000000000..500ab3eafd --- /dev/null +++ b/test/unit/pagination/sortCompiler.test.ts @@ -0,0 +1,267 @@ +// sortCompiler.spec.ts +import { describe, it, expect } from 'vitest'; +import { + binarySearchInsertIndex, + makeComparator, +} from '../../../src/pagination/sortCompiler'; +import { resolveDotPathValue as defaultResolvePathValue } from '../../../src/pagination/utility.normalization'; +import type { AscDesc } from '../../../src'; + +// Minimal item type for tests +type Item = { + cid: string; // tie-breaker field (default tiebreak compares by cid) + v?: unknown; // primary field for many tests + nested?: { x?: unknown }; // nested field for dot-path tests +}; + +// Small utility: sort a shallow copy and return cids to verify ordering +function orderByComparator(items: Item[], cmp: (a: Item, b: Item) => number): string[] { + return [...items].sort(cmp).map((i) => i.cid); +} + +/** + * Helper to build a comparator with optional resolvePathValue override. + */ +function toComparator( + sort: Record | Array>, + resolvePathValue = defaultResolvePathValue, +) { + return makeComparator | Array>>({ + sort, + resolvePathValue, + }); +} + +describe('makeComparator', () => { + it('sorts numbers ascending/descending', () => { + const items: Item[] = [ + { cid: 'c', v: 10 }, + { cid: 'a', v: 2 }, + { cid: 'b', v: 2 }, // equal to test tie-breaker by cid + { cid: 'd', v: 100 }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['a', 'b', 'c', 'd']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['d', 'c', 'a', 'b']); + }); + + it('sorts strings ascending/descending with tie-break on cid', () => { + const items: Item[] = [ + { cid: '2', v: 'beta' }, + { cid: '1', v: 'alpha' }, + { cid: '4', v: 'alpha' }, // same string as cid=1; tie-break by cid + { cid: '3', v: 'gamma' }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['1', '4', '2', '3']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['3', '2', '1', '4']); + }); + + it('sorts booleans (false < true)', () => { + const items: Item[] = [ + { cid: 'c', v: true }, + { cid: 'a', v: false }, + { cid: 'b', v: false }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['a', 'b', 'c']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['c', 'a', 'b']); + }); + + it('sorts dates (Date objects) descending', () => { + const items: Item[] = [ + { cid: 'a', v: new Date('2023-01-01T00:00:00Z') }, + { cid: 'b', v: new Date('2024-01-01T00:00:00Z') }, + { cid: 'c', v: new Date('2022-06-15T00:00:00Z') }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['c', 'a', 'b']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['b', 'a', 'c']); + }); + + it('sorts dates given as ISO strings equivalently to Date objects', () => { + const items: Item[] = [ + { cid: 'a', v: '2023-01-01T00:00:00Z' }, + { cid: 'b', v: '2024-01-01T00:00:00Z' }, + { cid: 'c', v: '2022-06-15T00:00:00Z' }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['c', 'a', 'b']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['b', 'a', 'c']); + }); + + it('sorts dates given as epoch ms (numbers) equivalently', () => { + const items: Item[] = [ + { cid: 'a', v: Date.parse('2023-01-01T00:00:00Z') }, + { cid: 'b', v: Date.parse('2024-01-01T00:00:00Z') }, + { cid: 'c', v: Date.parse('2022-06-15T00:00:00Z') }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['c', 'a', 'b']); + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['b', 'a', 'c']); + }); + + it('uses resolvePathValue for nested paths', () => { + const items: Item[] = [ + { cid: 'a', nested: { x: 100 } }, + { cid: 'b', nested: { x: 50 } }, + { cid: 'c', nested: { x: 75 } }, + ]; + + const cmp = toComparator({ 'nested.x': 1 }); + expect(orderByComparator(items, cmp)).toEqual(['b', 'c', 'a']); + }); + + it('applies multi-field sorting in order (then uses cid tiebreaker)', () => { + const items: Item[] = [ + { cid: '3', v: 1, nested: { x: 5 } }, + { cid: '1', v: 1, nested: { x: 10 } }, + { cid: '2', v: 1, nested: { x: 10 } }, + { cid: '4', v: 2, nested: { x: 0 } }, + ]; + + // First by v asc, then nested.x desc; if both equal, tie-break by cid asc + const cmp = toComparator([{ v: 1 }, { 'nested.x': -1 }]); + expect(orderByComparator(items, cmp)).toEqual(['1', '2', '3', '4']); + }); + + it('fallback ordering: null/undefined come last (ascending) and first (descending)', () => { + const items: Item[] = [ + { cid: 'a', v: 10 }, + { cid: 'b', v: undefined }, + { cid: 'c', v: null }, + { cid: 'd', v: 5 }, + ]; + + const asc = toComparator({ v: 1 }); + expect(orderByComparator(items, asc)).toEqual(['d', 'a', 'b', 'c']); // null/undefined last + + const desc = toComparator({ v: -1 }); + expect(orderByComparator(items, desc)).toEqual(['b', 'c', 'a', 'd']); // null/undefined first + }); + + it('applies custom tiebreaker when provided', () => { + const items: Item[] = [ + { cid: 'b', v: 1 }, + { cid: 'a', v: 1 }, + { cid: 'c', v: 1 }, + ]; + + const customTiebreaker = (l: Item, r: Item) => r.cid.localeCompare(l.cid); + + const cmp = makeComparator>({ + sort: { v: 1 }, // all v equal + resolvePathValue: defaultResolvePathValue, + tiebreaker: customTiebreaker, + }); + + expect(orderByComparator(items, cmp)).toEqual(['c', 'b', 'a']); + }); + + it('accepts array sort spec and object sort spec equivalently', () => { + const items: Item[] = [ + { cid: '3', v: 2 }, + { cid: '1', v: 1 }, + { cid: '2', v: 1 }, + ]; + + const arrayBasedComparator = toComparator([{ v: 1 }]); + const objectBasedComparator = toComparator({ v: 1 }); + + expect(orderByComparator(items, arrayBasedComparator)).toEqual(['1', '2', '3']); + expect(orderByComparator(items, objectBasedComparator)).toEqual(['1', '2', '3']); + }); +}); + +describe('binarySearchInsertIndex', () => { + it('inserts at beginning, middle, and end as expected', () => { + const items: Item[] = [ + { cid: 'a', v: 10 }, + { cid: 'b', v: 20 }, + { cid: 'c', v: 30 }, + { cid: 'd', v: 40 }, + ]; + const cmp = toComparator({ v: 1 }); + + // Insert before all + let index = binarySearchInsertIndex({ + sortedArray: items, + needle: { cid: 'x', v: 5 }, + compare: cmp, + }); + expect(index).toBe(0); + + // Insert in the middle + index = binarySearchInsertIndex({ + sortedArray: items, + needle: { cid: 'y', v: 25 }, + compare: cmp, + }); + expect(index).toBe(2); // between 20 and 30 + + // Insert after all + index = binarySearchInsertIndex({ + sortedArray: items, + needle: { cid: 'z', v: 50 }, + compare: cmp, + }); + expect(index).toBe(4); + }); + + it('inserts after equal values block (stable position after equals)', () => { + const items: Item[] = [ + { cid: 'a', v: 10 }, + { cid: 'b', v: 10 }, + { cid: 'c', v: 10 }, + ]; + const cmp = toComparator({ v: 1 }); + + const index = binarySearchInsertIndex({ + sortedArray: items, + needle: { cid: 'x', v: 10 }, + compare: cmp, + }); + + // By design, our binary search returns the first position where existing > needle. + // For equals, it advances to the right of the equal block. + expect(index).toBe(3); + }); + + it('respects multi-field comparator (e.g., secondary key decides insertion point)', () => { + const items: Item[] = [ + { cid: '2', v: 1, nested: { x: 5 } }, + { cid: '1', v: 1, nested: { x: 10 } }, // comes earlier due to nested.x desc + { cid: '3', v: 2, nested: { x: 0 } }, + ]; + const cmp = toComparator([{ v: 1 }, { 'nested.x': -1 }]); + + // Needle with same v=1 but nested.x=7 should go between cid=1 (x=10) and cid=2 (x=5) + const index = binarySearchInsertIndex({ + sortedArray: orderByComparator(items, cmp).map( + (cid) => items.find((i) => i.cid === cid)!, + ) as Item[], + needle: { cid: 'x', v: 1, nested: { x: 7 } }, + compare: cmp, + }); + + expect(index).toBe(1); // after the 10, before the 5 + }); +}); From 65fd6cded496d14eaa462ca60103844c754b28d0 Mon Sep 17 00:00:00 2001 From: martincupela Date: Mon, 15 Sep 2025 16:09:24 +0200 Subject: [PATCH 02/31] feat: allow to boost paginator items and lock item order --- src/ChannelPaginatorsOrchestrator.ts | 74 +-- src/pagination/BasePaginator.ts | 128 ++++- .../ChannelPaginatorsOrchestrator.test.ts | 122 ++++- test/unit/pagination/BasePaginator.test.ts | 459 ++++++++++++++++-- 4 files changed, 680 insertions(+), 103 deletions(-) diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index 481eba2029..6eceb7b1a7 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -13,7 +13,7 @@ import type { import { getChannel } from './pagination/utility.queryChannel'; import type { Channel } from './channel'; -type ChannelPaginatorsOrchestratorEventHandlerContext = { +export type ChannelPaginatorsOrchestratorEventHandlerContext = { orchestrator: ChannelPaginatorsOrchestrator; }; @@ -73,12 +73,20 @@ const updateLists: EventHandlerPipelineHandler< if (!channel) return; - // todo: can these state updates be made atomic across all the paginators? - // maybe we could add to state store API that would allow to queue changes and then commit? orchestrator.paginators.forEach((paginator) => { if (paginator.matchesFilter(channel)) { - // todo: does it make sense to move channel at the top of the items array (original implementation) - // if items are supposed to be ordered by the sort object? + const channelBoost = paginator.getBoost(channel.cid); + if ( + [ + 'message.new', + 'notification.message_new', + 'notification.added_to_channel', + 'channel.visible', + ].includes(event.type) && + (!channelBoost || channelBoost.seq < paginator.maxBoostSeq) + ) { + paginator.boost(channel.cid, { seq: paginator.maxBoostSeq + 1 }); + } paginator.ingestItem(channel); } else { // remove if it does not match the filter anymore @@ -87,20 +95,13 @@ const updateLists: EventHandlerPipelineHandler< }); }; -// todo: we have to make sure that client.activeChannels is always up-to-date +// we have to make sure that client.activeChannels is always up-to-date const channelDeletedHandler: LabeledEventHandler = { handle: removeItem, id: 'ChannelPaginatorsOrchestrator:default-handler:channel.deleted', }; -// fixme: is it ok, remove item just because its property hidden is switched to hidden: true? What about offset cursor, should we update it? -const channelHiddenHandler: LabeledEventHandler = - { - handle: removeItem, - id: 'ChannelPaginatorsOrchestrator:default-handler:channel.hidden', - }; - // fixme: this handler should not be handled by the orchestrator but as Channel does not have reactive state, // we need to re-emit the whole list to reflect the changes const channelUpdatedHandler: LabeledEventHandler = @@ -188,7 +189,7 @@ export type ChannelPaginatorsOrchestratorState = { paginators: ChannelPaginator[]; }; -type EventHandlers = Partial< +export type ChannelPaginatorsOrchestratorEventHandlers = Partial< Record< SupportedEventType, LabeledEventHandler[] @@ -198,7 +199,7 @@ type EventHandlers = Partial< export type ChannelPaginatorsOrchestratorOptions = { client: StreamChat; paginators?: ChannelPaginator[]; - eventHandlers?: EventHandlers; + eventHandlers?: ChannelPaginatorsOrchestratorEventHandlers; }; export class ChannelPaginatorsOrchestrator extends WithSubscriptions { @@ -209,19 +210,19 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { EventHandlerPipeline >(); - protected static readonly defaultEventHandlers: EventHandlers = { - 'channel.deleted': [channelDeletedHandler], - 'channel.hidden': [channelHiddenHandler], - 'channel.updated': [channelUpdatedHandler], - 'channel.truncated': [channelTruncatedHandler], - 'channel.visible': [channelVisibleHandler], - 'member.updated': [memberUpdatedHandler], - 'message.new': [messageNewHandler], - 'notification.added_to_channel': [notificationAddedToChannelHandler], - 'notification.message_new': [notificationMessageNewHandler], - 'notification.removed_from_channel': [notificationRemovedFromChannelHandler], - 'user.presence.changed': [userPresenceChangedHandler], - }; + protected static readonly defaultEventHandlers: ChannelPaginatorsOrchestratorEventHandlers = + { + 'channel.deleted': [channelDeletedHandler], + 'channel.updated': [channelUpdatedHandler], + 'channel.truncated': [channelTruncatedHandler], + 'channel.visible': [channelVisibleHandler], + 'member.updated': [memberUpdatedHandler], + 'message.new': [messageNewHandler], + 'notification.added_to_channel': [notificationAddedToChannelHandler], + 'notification.message_new': [notificationMessageNewHandler], + 'notification.removed_from_channel': [notificationRemovedFromChannelHandler], + 'user.presence.changed': [userPresenceChangedHandler], + }; constructor({ client, @@ -242,13 +243,17 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { return this.state.getLatestValue().paginators; } + private get ctx(): ChannelPaginatorsOrchestratorEventHandlerContext { + return { orchestrator: this }; + } + /** * Returns deep copy of default handlers mapping. * The defaults can be enriched with custom handlers or the custom handlers can be replaced. */ - static getDefaultHandlers(): EventHandlers { + static getDefaultHandlers(): ChannelPaginatorsOrchestratorEventHandlers { const src = ChannelPaginatorsOrchestrator.defaultEventHandlers; - const out: EventHandlers = {}; + const out: ChannelPaginatorsOrchestratorEventHandlers = {}; for (const [type, handlers] of Object.entries(src)) { if (!handlers) continue; out[type as SupportedEventType] = [...handlers]; @@ -321,7 +326,10 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { return pipe; } - private get ctx(): ChannelPaginatorsOrchestratorEventHandlerContext { - return { orchestrator: this }; - } + reload = async () => + await Promise.allSettled( + this.paginators.map(async (paginator) => { + await paginator.reload(); + }), + ); } diff --git a/src/pagination/BasePaginator.ts b/src/pagination/BasePaginator.ts index 8c40930f0f..18e96f0877 100644 --- a/src/pagination/BasePaginator.ts +++ b/src/pagination/BasePaginator.ts @@ -35,16 +35,19 @@ export type PaginatorState = { export type PaginatorOptions = { /** The number of milliseconds to debounce the search query. The default interval is 300ms. */ debounceMs?: number; + /** Will prevent changing the index of existing items */ + lockItemOrder?: boolean; pageSize?: number; }; export const DEFAULT_PAGINATION_OPTIONS: Required = { debounceMs: 300, + lockItemOrder: false, pageSize: 10, } as const; export abstract class BasePaginator { state: StateStore>; - pageSize: number; + config: Required; protected _executeQueryDebounced!: DebouncedExecQueryFunction; protected _isCursorPagination = false; /** @@ -72,10 +75,16 @@ export abstract class BasePaginator { * @protected */ protected _filterFieldToDataResolvers: FieldToDataResolver[]; + /** + * Ephemeral priority for attention UX without breaking sort invariants + * @protected + */ + protected boosts = new Map(); + protected _maxBoostSeq: number = 0; protected constructor(options?: PaginatorOptions) { - const { debounceMs, pageSize } = { ...DEFAULT_PAGINATION_OPTIONS, ...options }; - this.pageSize = pageSize; + this.config = { ...DEFAULT_PAGINATION_OPTIONS, ...options }; + const { debounceMs } = this.config; this.state = new StateStore>(this.initialState); this.setDebounceOptions({ debounceMs }); this.sortComparator = noOrderChange; @@ -126,6 +135,19 @@ export abstract class BasePaginator { return this.state.getLatestValue().offset; } + get pageSize() { + return this.config.pageSize; + } + + /** Single point of truth: always use the effective comparator */ + get effectiveComparator() { + return this.boostComparator; + } + + get maxBoostSeq() { + return this._maxBoostSeq; + } + abstract query(params: PaginationQueryParams): Promise>; abstract filterQueryResults(items: T[]): T[] | Promise; @@ -149,15 +171,77 @@ export abstract class BasePaginator { }); } + protected clearExpiredBoosts(now = Date.now()) { + for (const [id, b] of this.boosts) if (now > b.until) this.boosts.delete(id); + this._maxBoostSeq = Math.max( + ...Array.from(this.boosts.values()).map((boost) => boost.seq), + 0, + ); + } + + /** Comparator that consults boosts first, then falls back to sortComparator */ + protected boostComparator = (a: T, b: T): number => { + const now = Date.now(); + this.clearExpiredBoosts(now); + + const idA = this.getItemId(a); + const idB = this.getItemId(b); + const boostA = this.getBoost(idA); + const boostB = this.getBoost(idB); + + const aIsBoosted = !!(boostA && now <= boostA.until); + const bIsBoosted = !!(boostB && now <= boostB.until); + + if (aIsBoosted && !bIsBoosted) return -1; + if (!aIsBoosted && bIsBoosted) return 1; + + if (aIsBoosted && bIsBoosted) { + // higher seq wins + const seqDistance = (boostB.seq ?? 0) - (boostA.seq ?? 0); + if (seqDistance !== 0) return seqDistance > 0 ? 1 : -1; + // fall through to normal comparator for stability + } + return this.sortComparator(a, b); + }; + + /** Public API to manage boosts */ + boost(id: string, opts?: { ttlMs?: number; until?: number; seq?: number }) { + const now = Date.now(); + const until = opts?.until ?? (opts?.ttlMs != null ? now + opts.ttlMs : now + 15000); // default 15s + + if (typeof opts?.seq === 'number' && opts.seq > this._maxBoostSeq) { + this._maxBoostSeq = opts.seq; + } + + const seq = opts?.seq ?? 0; + this.boosts.set(id, { until, seq }); + } + + getBoost(id: string) { + return this.boosts.get(id); + } + + removeBoost(id: string) { + this.boosts.delete(id); + this._maxBoostSeq = Math.max( + ...Array.from(this.boosts.values()).map((boost) => boost.seq), + 0, + ); + } + + isBoosted(id: string) { + const boost = this.getBoost(id); + return !!(boost && Date.now() <= boost.until); + } + ingestItem(ingestedItem: T): boolean { const items = this.items ?? []; const id = this.getItemId(ingestedItem); - + const next = items.slice(); // If it doesn't match this paginator's filters, remove if present and exit. const existingIndex = items.findIndex((ch) => this.getItemId(ch) === id); if (!this.matchesFilter(ingestedItem)) { if (existingIndex >= 0) { - const next = items.slice(); next.splice(existingIndex, 1); this.state.partialNext({ items: next }); return true; // list changed (item removed) @@ -165,21 +249,20 @@ export abstract class BasePaginator { return false; // no change } - // Build comparator once per call (you can cache it when sort changes). - - const next = items.slice(); - if (existingIndex >= 0) { // Update existing: remove then re-insert at the correct position next.splice(existingIndex, 1); } - // Find insertion index via binary search: first index where existing > ingestionItem - const insertAt = binarySearchInsertIndex({ - needle: ingestedItem, - sortedArray: next, - compare: this.sortComparator, - }); + const insertAt = + this.config.lockItemOrder && existingIndex >= 0 + ? existingIndex + : // Find insertion index via binary search: first index where existing > ingestionItem + binarySearchInsertIndex({ + needle: ingestedItem, + sortedArray: next, + compare: this.effectiveComparator, + }); next.splice(insertAt, 0, ingestedItem); this.state.partialNext({ items: next }); @@ -246,18 +329,18 @@ export abstract class BasePaginator { const insertionIndex = binarySearchInsertIndex({ needle, sortedArray: items, - compare: this.sortComparator, + compare: this.effectiveComparator, }); // quick neighbor checks const id = this.getItemId(needle); const left = insertionIndex - 1; - if (left >= 0 && this.sortComparator(items[left], needle) === 0) { + if (left >= 0 && this.effectiveComparator(items[left], needle) === 0) { if (this.getItemId(items[left]) === id) return { index: left, insertionIndex }; } if ( insertionIndex < items.length && - this.sortComparator(items[insertionIndex], needle) === 0 + this.effectiveComparator(items[insertionIndex], needle) === 0 ) { if (this.getItemId(items[insertionIndex]) === id) return { index: insertionIndex, insertionIndex }; @@ -269,14 +352,14 @@ export abstract class BasePaginator { ? locateOnPlateauAlternating( items, needle, - this.sortComparator, + this.effectiveComparator, this.getItemId.bind(this), insertionIndex, ) : locateOnPlateauScanOneSide( items, needle, - this.sortComparator, + this.effectiveComparator, this.getItemId.bind(this), insertionIndex, ); @@ -381,4 +464,9 @@ export abstract class BasePaginator { prevDebounced = () => { this._executeQueryDebounced({ direction: 'prev' }); }; + + reload = async () => { + this.resetState(); + await this.next(); + }; } diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts index e27ada86ec..0d29234e95 100644 --- a/test/unit/ChannelPaginatorsOrchestrator.test.ts +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -1,7 +1,6 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'; import { getClientWithUser } from './test-utils/getClient'; import { - Channel, ChannelPaginator, ChannelResponse, EventTypes, @@ -259,6 +258,22 @@ describe('ChannelPaginatorsOrchestrator', () => { }); }); + describe('reload', () => { + it('calls reload on all the paginators', async () => { + const paginator1 = new ChannelPaginator({ client }); + const paginator2 = new ChannelPaginator({ client }); + vi.spyOn(paginator1, 'reload').mockResolvedValue(); + vi.spyOn(paginator2, 'reload').mockResolvedValue(); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [paginator1, paginator2], + }); + await orchestrator.reload(); + expect(paginator1.reload).toHaveBeenCalledTimes(1); + expect(paginator2.reload).toHaveBeenCalledTimes(1); + }); + }); + // Helper to create a minimal channel with needed state function makeChannel(cid: string) { const [type, id] = cid.split(':'); @@ -320,7 +335,7 @@ describe('ChannelPaginatorsOrchestrator', () => { }); }); - describe.each(['channel.hidden', 'notification.removed_from_channel'] as EventTypes[])( + describe.each(['notification.removed_from_channel'] as EventTypes[])( '%s', (eventType) => { it('removes the channel from all paginators', async () => { @@ -522,6 +537,109 @@ describe('ChannelPaginatorsOrchestrator', () => { }); }); + it.each([ + 'message.new', + 'notification.message_new', + 'notification.added_to_channel', + 'channel.visible', + ] as EventTypes[])( + 'boosts ingested channel on %s if the item is not already boosted at the top', + async (eventType) => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + const nowSpy = vi.spyOn(Date, 'now').mockReturnValue(now.getTime()); + + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:5'); + client.activeChannels[ch.cid] = ch; + + const paginator = new ChannelPaginator({ client }); + const matchesFilterSpy = vi.spyOn(paginator, 'matchesFilter').mockReturnValue(true); + + orchestrator.insertPaginator({ paginator }); + orchestrator.registerSubscriptions(); + + // @ts-expect-error accessing protected property + expect(paginator.boosts.size).toBe(0); + + client.dispatchEvent({ type: eventType, cid: ch.cid }); + + await vi.waitFor(() => { + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.entries())).toEqual([ + [ch.cid, { seq: 1, until: now.getTime() + 15000 }], + ]); + }); + + client.dispatchEvent({ type: eventType, cid: ch.cid }); + await vi.waitFor(() => { + // already at the top + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.entries())).toEqual([ + [ch.cid, { seq: 1, until: now.getTime() + 15000 }], + ]); + }); + + matchesFilterSpy.mockReturnValue(false); + client.dispatchEvent({ type: eventType, cid: ch.cid }); + + await vi.waitFor(() => { + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.entries())).toEqual([ + [ch.cid, { seq: 1, until: now.getTime() + 15000 }], + ]); + }); + + matchesFilterSpy.mockReturnValue(true); + // @ts-expect-error accessing protected property + paginator._maxBoostSeq = 1000; + client.dispatchEvent({ type: eventType, cid: ch.cid }); + await vi.waitFor(() => { + // some other channel has a higher boost + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.entries())).toEqual([ + [ch.cid, { seq: 1001, until: now.getTime() + 15000 }], + ]); + }); + + nowSpy.mockRestore(); + vi.useRealTimers(); + }, + ); + + it.each([ + 'channel.updated', + 'channel.truncated', + 'member.updated', + 'user.presence.changed', + ] as EventTypes[])('does not boost ingested channel on %s', async (eventType) => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + const nowSpy = vi.spyOn(Date, 'now').mockReturnValue(now.getTime()); + + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const ch = makeChannel('messaging:5'); + client.activeChannels[ch.cid] = ch; + + const paginator = new ChannelPaginator({ client }); + const matchesFilterSpy = vi.spyOn(paginator, 'matchesFilter').mockReturnValue(true); + + orchestrator.insertPaginator({ paginator }); + orchestrator.registerSubscriptions(); + + // @ts-expect-error accessing protected property + expect(paginator.boosts.size).toBe(0); + + client.dispatchEvent({ type: eventType, cid: ch.cid }); + + await vi.waitFor(() => { + // @ts-expect-error accessing protected property + expect(paginator.boosts.size).toBe(0); + }); + }); + describe('user.presence.changed', () => { it('updates user on channels where the user is a member and re-emits lists', async () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); diff --git a/test/unit/pagination/BasePaginator.test.ts b/test/unit/pagination/BasePaginator.test.ts index 30bd6bfd16..bf4aa43e84 100644 --- a/test/unit/pagination/BasePaginator.test.ts +++ b/test/unit/pagination/BasePaginator.test.ts @@ -88,6 +88,7 @@ describe('BasePaginator', () => { }); }); }); + describe('pagination API', () => { it('paginates to next pages', async () => { const paginator = new Paginator(); @@ -240,6 +241,7 @@ describe('BasePaginator', () => { expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); }); }); + describe('item management', () => { const item: TestItem = { id: 'id1', @@ -286,55 +288,159 @@ describe('BasePaginator', () => { }); describe('ingestItem', () => { - it('exists but does not match the filter anymore removes the item', () => { - const paginator = new Paginator(); + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'exists but does not match the filter anymore removes the item %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item3, item2, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams + }); + + const adjustedItem = { + ...item, + teams: ['efg'], // removed from the team abc + }; + + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item removed + expect(paginator.items).toHaveLength(2); + }, + ); + + it.each([ + [' adjusts the order on lockItemOrder: false', false], + [' does not adjust the order on lockItemOrder: true', true], + ])('exists and matches the filter updates the item and %s', (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); paginator.state.partialNext({ - items: [item3, item2, item], + items: [item, item2, item3], }); // @ts-expect-error accessing protected property paginator.buildFilters = () => ({ - teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams + age: { $gt: 100 }, }); + paginator.sort = { age: 1 }; + const adjustedItem = { ...item, - teams: ['efg'], // removed from the team abc + age: 103, }; - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item removed - expect(paginator.items).toHaveLength(2); + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated + expect(paginator.items).toHaveLength(3); + + if (lockItemOrder) { + expect(paginator.items).toStrictEqual([adjustedItem, item2, item3]); + } else { + expect(paginator.items).toStrictEqual([item2, item3, adjustedItem]); + } }); - it('exists and matches the filter updates the item', () => { + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and does not match the filter results in no action %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem = { + ...item, + id: 'id2', + name: 'test2', + }; + + expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action + expect(paginator.items).toStrictEqual([item]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to default sort order (append) %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item3, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item, item2]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to sort order %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item3, item], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toHaveLength(3); + expect(paginator.items![0]).toStrictEqual(item3); + expect(paginator.items![1]).toStrictEqual(item2); + expect(paginator.items![2]).toStrictEqual(item); + }, + ); + + it('reflects the boost priority on lockItemOrder: false for newly ingested items', () => { const paginator = new Paginator(); paginator.state.partialNext({ - items: [item, item2, item3], + items: [item3, item], }); // @ts-expect-error accessing protected property paginator.buildFilters = () => ({ - age: { $gt: 100 }, + teams: { $contains: 'abc' }, }); - paginator.sort = { age: 1 }; - - const adjustedItem = { - ...item, - age: 103, - }; - - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated - expect(paginator.items).toHaveLength(3); - expect(paginator.items![0]).toStrictEqual(item2); - expect(paginator.items![1]).toStrictEqual(item3); - expect(paginator.items![2]).toStrictEqual(adjustedItem); + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item2, item3, item]); }); - it('does not exist and does not match the filter results in no action', () => { + it('reflects the boost priority on lockItemOrder: false for existing items recently boosted', () => { const paginator = new Paginator(); paginator.state.partialNext({ - items: [item], + items: [item, item2, item3], }); // @ts-expect-error accessing protected property @@ -342,37 +448,45 @@ describe('BasePaginator', () => { age: { $gt: 100 }, }); + paginator.sort = { age: 1 }; + const adjustedItem = { - ...item, - id: 'id2', - name: 'test2', + ...item2, + age: 103, }; + paginator.boost(item2.id); + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated + expect(paginator.items).toHaveLength(3); - expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action - expect(paginator.items).toHaveLength(1); - expect(paginator.items![0]).toStrictEqual(item); + expect(paginator.items).toStrictEqual([adjustedItem, item, item3]); }); - it('does not exist and matches the filter inserts according to default sort order (append)', () => { - const paginator = new Paginator(); + it('does not reflect the boost priority on lockItemOrder: true', () => { + const paginator = new Paginator({ lockItemOrder: true }); paginator.state.partialNext({ - items: [item3, item], + items: [item, item2, item3], }); // @ts-expect-error accessing protected property paginator.buildFilters = () => ({ - teams: { $contains: 'abc' }, + age: { $gt: 100 }, }); - expect(paginator.ingestItem(item2)).toBeTruthy(); + paginator.sort = { age: 1 }; + + const adjustedItem = { + ...item2, + age: 103, + }; + paginator.boost(item2.id); + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated expect(paginator.items).toHaveLength(3); - expect(paginator.items![0]).toStrictEqual(item3); - expect(paginator.items![1]).toStrictEqual(item); - expect(paginator.items![2]).toStrictEqual(item2); + + expect(paginator.items).toStrictEqual([item, adjustedItem, item3]); }); - it('does not exist and matches the filter inserts according to sort order', () => { - const paginator = new Paginator(); + it('reflects the boost priority on lockItemOrder: true when ingesting a new item', () => { + const paginator = new Paginator({ lockItemOrder: true }); paginator.state.partialNext({ items: [item3, item], }); @@ -381,16 +495,10 @@ describe('BasePaginator', () => { paginator.buildFilters = () => ({ teams: { $contains: 'abc' }, }); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ sort: { age: -1 } }); + paginator.boost(item2.id); expect(paginator.ingestItem(item2)).toBeTruthy(); - expect(paginator.items).toHaveLength(3); - expect(paginator.items![0]).toStrictEqual(item3); - expect(paginator.items![1]).toStrictEqual(item2); - expect(paginator.items![2]).toStrictEqual(item); + expect(paginator.items).toStrictEqual([item2, item3, item]); }); }); @@ -430,6 +538,29 @@ describe('BasePaginator', () => { }); }); + describe('reload', () => { + it('starts the pagination from the beginning', async () => { + const a: TestItem = { id: 'a', age: 30 }; + const b: TestItem = { id: 'b', age: 25 }; + const c: TestItem = { id: 'c', age: 25 }; + const d: TestItem = { id: 'd', age: 20 }; + + const paginator = new Paginator(); + const nextSpy = vi.spyOn(paginator, 'next').mockResolvedValue(); + paginator.state.next({ + hasNext: false, + hasPrev: false, + isLoading: false, + items: [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }], + offset: 4, + }); + await paginator.reload(); + expect(nextSpy).toHaveBeenCalledTimes(1); + expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + nextSpy.mockRestore(); + }); + }); + describe('contains', () => { it('returns true if the item exists', () => { const paginator = new Paginator(); @@ -689,5 +820,237 @@ describe('BasePaginator', () => { ]); }); }); + + describe('item boosting', () => { + const a = { id: 'a', age: 10, name: 'A' } as TestItem; + const b = { id: 'b', age: 20, name: 'B' } as TestItem; + const c = { id: 'c', age: 30, name: 'C' } as TestItem; + + const byIdAsc = (l: TestItem, r: TestItem) => + l.id < r.id ? -1 : l.id > r.id ? 1 : 0; + + describe('clearExpiredBoosts', () => { + it('removes expired boosts and updates maxBoostSeq', () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.boosts.clear(); + const now = 1000000; + + paginator.boost('fresh', { until: now + 1000, seq: 1 }); + paginator.boost('stale', { until: now - 1, seq: 5 }); + + // @ts-expect-error accessing protected method + paginator.clearExpiredBoosts(now); + + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.keys())).toEqual(['fresh']); + expect(paginator.maxBoostSeq).toBe(1); + }); + + it('sets maxBoostSeq to 0 when no boosts remain', () => { + const paginator = new Paginator(); + // two expired boosts at "now" + paginator.boost('x', { until: 1000, seq: 1 }); + paginator.boost('y', { until: 1500, seq: 3 }); + + // @ts-expect-error accessing protected method + paginator.clearExpiredBoosts(10000); + + // @ts-expect-error accessing protected property + expect(paginator.boosts.size).toBe(0); + expect(paginator.maxBoostSeq).toBe(0); + }); + }); + + describe('boostComparator', () => { + it('prioritizes boosted over non-boosted', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + paginator.sortComparator = byIdAsc; + + // Boost only "a" + paginator.boost('b', { ttlMs: 10000, seq: 0 }); + + // @ts-expect-error: protected method + expect(paginator.boostComparator(a, b)).toBe(1); // a after b + // @ts-expect-error + expect(paginator.boostComparator(b, a)).toBe(-1); // b stays before a + + // Let boost expire + vi.setSystemTime(new Date(now.getTime() + 11000)); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); // fallback to byIdAsc + vi.useRealTimers(); + }); + + it('when both boosted, higher seq comes first; ties fall back to sortComparator', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + // Fallback comparator id asc + paginator.sortComparator = byIdAsc; + + paginator.boost('a', { ttlMs: 60000, seq: 1 }); + paginator.boost('b', { ttlMs: 60000, seq: 3 }); + + // b has higher seq → should come first → comparator(a,b) > 0 + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(1); + // reverse check + // @ts-expect-error + expect(paginator.boostComparator(b, a)).toBe(-1); + + // Equal seq → fall back to sortComparator (id asc => a before b) + paginator.boost('a', { ttlMs: 60000, seq: 2 }); + paginator.boost('b', { ttlMs: 60000, seq: 2 }); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); + + vi.useRealTimers(); + }); + + it('ignores expired boosts automatically during comparison', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + paginator.sortComparator = byIdAsc; + + paginator.boost('b', { ttlMs: 5000, seq: 10 }); + // Initially boosted + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(1); + + // Advance beyond TTL so boost is expired; comparator should fall back + vi.setSystemTime(new Date(now.getTime() + 6000)); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); // byIdAsc, not boost + vi.useRealTimers(); + }); + }); + + describe('boost', () => { + it('assigns default TTL (15s) and default seq=0; updates maxBoostSeq only upward', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + + paginator.boost('k'); // default 15s, seq 0 + const b1 = paginator.getBoost('k')!; + expect(b1.seq).toBe(0); + expect(b1.until).toBe(now.getTime() + 15000); + expect(paginator.maxBoostSeq).toBe(0); + + // Raise max seq + paginator.boost('m', { ttlMs: 1000, seq: 5 }); + expect(paginator.maxBoostSeq).toBe(5); + + // Lower seq should NOT decrease maxBoostSeq + paginator.boost('n', { ttlMs: 1000, seq: 2 }); + expect(paginator.maxBoostSeq).toBe(5); + + vi.useRealTimers(); + }); + + it('accepts explicit until and seq', () => { + const paginator = new Paginator(); + paginator.boost('z', { until: 42, seq: 7 }); + const b = paginator.getBoost('z')!; + expect(b.until).toBe(42); + expect(b.seq).toBe(7); + expect(paginator.maxBoostSeq).toBe(7); + }); + }); + + describe('getBoost', () => { + it('returns the boost record when present; otherwise undefined', () => { + const paginator = new Paginator(); + expect(paginator.getBoost('missing')).toBeUndefined(); + paginator.boost('a', { ttlMs: 1000, seq: 1 }); + const b = paginator.getBoost('a'); + expect(b).toBeDefined(); + expect(b!.seq).toBe(1); + }); + }); + + describe('removeBoost', () => { + it('removes a boost and recalculates maxBoostSeq', () => { + const paginator = new Paginator(); + paginator.boost('a', { ttlMs: 60000, seq: 1 }); + paginator.boost('b', { ttlMs: 60000, seq: 5 }); + paginator.boost('c', { ttlMs: 60000, seq: 2 }); + expect(paginator.maxBoostSeq).toBe(5); + + paginator.removeBoost('b'); // remove current max + expect(paginator.getBoost('b')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(2); + + paginator.removeBoost('c'); + expect(paginator.getBoost('c')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(1); + + paginator.removeBoost('a'); + expect(paginator.getBoost('a')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(0); + }); + }); + + describe('isBoosted', () => { + it('returns true when boost exists and now <= until; false otherwise', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + expect(paginator.isBoosted('x')).toBe(false); + + paginator.boost('x', { ttlMs: 5000, seq: 0 }); + expect(paginator.isBoosted('x')).toBe(true); + + // Exactly at until is still considered boosted per <= check + vi.setSystemTime(new Date(now.getTime() + 5000)); + expect(paginator.isBoosted('x')).toBe(true); + + // After until → false + vi.setSystemTime(new Date(now.getTime() + 5001)); + expect(paginator.isBoosted('x')).toBe(false); + + vi.useRealTimers(); + }); + }); + + describe('integration: ingestion respects boostComparator implicitly', () => { + it('newly ingested boosted items float above non-boosted regardless of fallback sort', () => { + vi.useFakeTimers(); + vi.setSystemTime(new Date('2025-01-01T00:00:00Z')); + + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: 1 }, // ascending age (so normally a < b < c by age) + }); + paginator.state.partialNext({ items: [a, b] }); + + // Boost "c" before ingest → it should be placed ahead of non-boosted even though age is highest + paginator.boost('c', { ttlMs: 60000, seq: 1 }); + expect(paginator.ingestItem(c)).toBeTruthy(); + + // c should be first due to boost, then a, then b (fallback sort would place c last otherwise) + expect(paginator.items!.map((i) => i.id)).toEqual(['c', 'a', 'b']); + + vi.useRealTimers(); + }); + }); + }); }); }); From 69b421f8d385a910a0ba796a3a6ce4a2370e3467 Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 7 Nov 2025 14:23:04 +0100 Subject: [PATCH 03/31] feat: support missing ChannelManager features in BasePaginator Support setting paginator items directly, optional request retries, offline DB in ChannelPaginator, identification of pagination restart based on query shape change. --- src/ChannelPaginatorsOrchestrator.ts | 260 +++++---- src/EventHandlerPipeline.ts | 53 +- src/pagination/BasePaginator.ts | 353 ++++++++++-- src/pagination/ChannelPaginator.ts | 207 ++++++- src/pagination/ReminderPaginator.ts | 41 +- src/utils.ts | 2 +- src/utils/mergeWith/mergeWithCore.ts | 304 +++++----- .../ChannelPaginatorsOrchestrator.test.ts | 207 +++++-- test/unit/EventHandlerPipeline.test.ts | 68 ++- .../MessageReceiptsTracker.test.ts | 45 ++ test/unit/pagination/BasePaginator.test.ts | 538 +++++++++++++++++- test/unit/pagination/ChannelPaginator.test.ts | 242 +++++--- test/unit/utils/mergeWith.test.ts | 151 ++++- 13 files changed, 1919 insertions(+), 552 deletions(-) diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index 6eceb7b1a7..b3aecabc9e 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -7,6 +7,7 @@ import type { Unsubscribe } from './store'; import { StateStore } from './store'; import type { EventHandlerPipelineHandler, + FindEventHandlerParams, InsertEventHandlerPayload, LabeledEventHandler, } from './EventHandlerPipeline'; @@ -17,11 +18,32 @@ export type ChannelPaginatorsOrchestratorEventHandlerContext = { orchestrator: ChannelPaginatorsOrchestrator; }; +type EventHandlerContext = ChannelPaginatorsOrchestratorEventHandlerContext; + type SupportedEventType = EventTypes | (string & {}); -const reEmit: EventHandlerPipelineHandler< - ChannelPaginatorsOrchestratorEventHandlerContext -> = ({ event, ctx: { orchestrator } }) => { +const getCachedChannelFromEvent = ( + event: Event, + cache: Record, +): Channel | undefined => { + let channel: Channel | undefined = undefined; + if (event.cid) { + channel = cache[event.cid]; + } else if (event.channel_id && event.channel_type) { + // todo: is there a central method to construct the cid from type and channel id? + channel = cache[`${event.channel_type}:${event.channel_id}`]; + } else if (event.channel) { + channel = cache[event.channel.cid]; + } else { + return; + } + return channel; +}; + +const reEmit: EventHandlerPipelineHandler = ({ + event, + ctx: { orchestrator }, +}) => { if (!event.cid) return; const channel = orchestrator.client.activeChannels[event.cid]; if (!channel) return; @@ -33,9 +55,10 @@ const reEmit: EventHandlerPipelineHandler< }); }; -const removeItem: EventHandlerPipelineHandler< - ChannelPaginatorsOrchestratorEventHandlerContext -> = ({ event, ctx: { orchestrator } }) => { +const removeItem: EventHandlerPipelineHandler = ({ + event, + ctx: { orchestrator }, +}) => { if (!event.cid) return; const channel = orchestrator.client.activeChannels[event.cid]; orchestrator.paginators.forEach((paginator) => { @@ -43,21 +66,26 @@ const removeItem: EventHandlerPipelineHandler< }); }; -const updateLists: EventHandlerPipelineHandler< - ChannelPaginatorsOrchestratorEventHandlerContext -> = async ({ event, ctx: { orchestrator } }) => { - let channel: Channel | undefined = undefined; - if (event.cid) { - channel = orchestrator.client.activeChannels[event.cid]; - } else if (event.channel_id && event.channel_type) { - // todo: is there a central method to construct the cid from type and channel id? - channel = - orchestrator.client.activeChannels[`${event.channel_type}:${event.channel_id}`]; - } else if (event.channel) { - channel = orchestrator.client.activeChannels[event.channel.cid]; - } else { - return; - } +// todo: documentation: show how to implement allowNewMessagesFromUnfilteredChannels just by inserting event handler +// at the start of the handler pipeline and filter out events for unknown channels +export const ignoreEventsForUnknownChannels: EventHandlerPipelineHandler< + EventHandlerContext +> = ({ event, ctx: { orchestrator } }) => { + const channel: Channel | undefined = getCachedChannelFromEvent( + event, + orchestrator.client.activeChannels, + ); + if (!channel) return { action: 'stop' }; +}; + +const updateLists: EventHandlerPipelineHandler = async ({ + event, + ctx: { orchestrator }, +}) => { + let channel: Channel | undefined = getCachedChannelFromEvent( + event, + orchestrator.client.activeChannels, + ); if (!channel) { const [type, id] = event.cid @@ -96,104 +124,91 @@ const updateLists: EventHandlerPipelineHandler< }; // we have to make sure that client.activeChannels is always up-to-date -const channelDeletedHandler: LabeledEventHandler = - { - handle: removeItem, - id: 'ChannelPaginatorsOrchestrator:default-handler:channel.deleted', - }; +const channelDeletedHandler: LabeledEventHandler = { + handle: removeItem, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.deleted', +}; // fixme: this handler should not be handled by the orchestrator but as Channel does not have reactive state, // we need to re-emit the whole list to reflect the changes -const channelUpdatedHandler: LabeledEventHandler = - { - handle: reEmit, - id: 'ChannelPaginatorsOrchestrator:default-handler:channel.updated', - }; +const channelUpdatedHandler: LabeledEventHandler = { + handle: reEmit, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.updated', +}; // fixme: this handler should not be handled by the orchestrator but as Channel does not have reactive state, // we need to re-emit the whole list to reflect the changes -const channelTruncatedHandler: LabeledEventHandler = - { - handle: reEmit, - id: 'ChannelPaginatorsOrchestrator:default-handler:channel.truncated', - }; - -const channelVisibleHandler: LabeledEventHandler = - { - handle: updateLists, - id: 'ChannelPaginatorsOrchestrator:default-handler:channel.visible', - }; +const channelTruncatedHandler: LabeledEventHandler = { + handle: reEmit, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.truncated', +}; + +const channelVisibleHandler: LabeledEventHandler = { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:channel.visible', +}; // members filter - should not be impacted as id is stable - cannot be updated // member.user.name - can be impacted -const memberUpdatedHandler: LabeledEventHandler = - { - handle: updateLists, - id: 'ChannelPaginatorsOrchestrator:default-handler:member.updated', - }; - -const messageNewHandler: LabeledEventHandler = - { - handle: updateLists, - id: 'ChannelPaginatorsOrchestrator:default-handler:message.new', - }; - -const notificationAddedToChannelHandler: LabeledEventHandler = - { - handle: updateLists, - id: 'ChannelPaginatorsOrchestrator:default-handler:notification.added_to_channel', - }; - -const notificationMessageNewHandler: LabeledEventHandler = - { - handle: updateLists, - id: 'ChannelPaginatorsOrchestrator:default-handler:notification.message_new', - }; - -const notificationRemovedFromChannelHandler: LabeledEventHandler = - { - handle: removeItem, - id: 'ChannelPaginatorsOrchestrator:default-handler:notification.removed_from_channel', - }; +const memberUpdatedHandler: LabeledEventHandler = { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:member.updated', +}; + +const messageNewHandler: LabeledEventHandler = { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:message.new', +}; + +const notificationAddedToChannelHandler: LabeledEventHandler = { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.added_to_channel', +}; + +const notificationMessageNewHandler: LabeledEventHandler = { + handle: updateLists, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.message_new', +}; + +const notificationRemovedFromChannelHandler: LabeledEventHandler = { + handle: removeItem, + id: 'ChannelPaginatorsOrchestrator:default-handler:notification.removed_from_channel', +}; // fixme: updates users for member object in all the channels which are loaded with that member - normalization would be beneficial -const userPresenceChangedHandler: LabeledEventHandler = - { - handle: ({ event, ctx: { orchestrator } }) => { - const eventUser = event.user; - if (!eventUser?.id) return; - orchestrator.paginators.forEach((paginator) => { - const paginatorItems = paginator.items; - if (!paginatorItems) return; - let updated = false; - paginatorItems.forEach((channel) => { - if (channel.state.members[eventUser.id]) { - channel.state.members[eventUser.id].user = event.user; - updated = true; - } - if (channel.state.membership.user?.id === eventUser.id) { - channel.state.membership.user = eventUser; - updated = true; - } - }); - if (updated) { - // fixme: user is not reactive and so the whole list has to be re-rendered - paginator.state.partialNext({ items: [...paginatorItems] }); +const userPresenceChangedHandler: LabeledEventHandler = { + handle: ({ event, ctx: { orchestrator } }) => { + const eventUser = event.user; + if (!eventUser?.id) return; + orchestrator.paginators.forEach((paginator) => { + const paginatorItems = paginator.items; + if (!paginatorItems) return; + let updated = false; + paginatorItems.forEach((channel) => { + if (channel.state.members[eventUser.id]) { + channel.state.members[eventUser.id].user = event.user; + updated = true; + } + if (channel.state.membership.user?.id === eventUser.id) { + channel.state.membership.user = eventUser; + updated = true; } }); - }, - id: 'ChannelPaginatorsOrchestrator:default-handler:user.presence.changed', - }; + if (updated) { + // fixme: user is not reactive and so the whole list has to be re-rendered + paginator.state.partialNext({ items: [...paginatorItems] }); + } + }); + }, + id: 'ChannelPaginatorsOrchestrator:default-handler:user.presence.changed', +}; export type ChannelPaginatorsOrchestratorState = { paginators: ChannelPaginator[]; }; export type ChannelPaginatorsOrchestratorEventHandlers = Partial< - Record< - SupportedEventType, - LabeledEventHandler[] - > + Record[]> >; export type ChannelPaginatorsOrchestratorOptions = { @@ -205,14 +220,15 @@ export type ChannelPaginatorsOrchestratorOptions = { export class ChannelPaginatorsOrchestrator extends WithSubscriptions { client: StreamChat; state: StateStore; - protected pipelines = new Map< + protected _pipelines = new Map< SupportedEventType, - EventHandlerPipeline + EventHandlerPipeline >(); protected static readonly defaultEventHandlers: ChannelPaginatorsOrchestratorEventHandlers = { 'channel.deleted': [channelDeletedHandler], + 'channel.hidden': [channelDeletedHandler], 'channel.updated': [channelUpdatedHandler], 'channel.truncated': [channelTruncatedHandler], 'channel.visible': [channelVisibleHandler], @@ -243,7 +259,11 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { return this.state.getLatestValue().paginators; } - private get ctx(): ChannelPaginatorsOrchestratorEventHandlerContext { + get pipelines(): Map> { + return this._pipelines; + } + + private get ctx(): EventHandlerContext { return { orchestrator: this }; } @@ -291,17 +311,39 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { ...payload }: { eventType: SupportedEventType; - } & InsertEventHandlerPayload): Unsubscribe { + } & InsertEventHandlerPayload): Unsubscribe { return this.ensurePipeline(eventType).insert(payload); } + setEventHandlers({ + eventType, + handlers, + }: { + eventType: SupportedEventType; + handlers: LabeledEventHandler[]; + }) { + return this.ensurePipeline(eventType).replaceAll(handlers); + } + + removeEventHandlers({ + eventType, + handlers, + }: { + eventType: SupportedEventType; + handlers: FindEventHandlerParams[]; + }) { + const pipeline = this._pipelines.get(eventType); + if (!pipeline) return; + handlers.forEach((params) => pipeline.remove(params)); + } + /** Subscribe to WS (and more buses via attachBus) */ registerSubscriptions(): Unsubscribe { if (!this.hasSubscriptions) { this.addUnsubscribeFunction( // todo: maybe we should have a wrapper here to decide, whether the event is a LocalEventBus event or else supported by client this.client.on((event: Event) => { - const pipe = this.pipelines.get(event.type); + const pipe = this._pipelines.get(event.type); if (pipe) { pipe.run(event, this.ctx); } @@ -315,13 +357,13 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { ensurePipeline( eventType: SupportedEventType, - ): EventHandlerPipeline { - let pipe = this.pipelines.get(eventType); + ): EventHandlerPipeline { + let pipe = this._pipelines.get(eventType); if (!pipe) { - pipe = new EventHandlerPipeline({ + pipe = new EventHandlerPipeline({ id: `ChannelPaginatorsOrchestrator:${eventType}`, }); - this.pipelines.set(eventType, pipe); + this._pipelines.set(eventType, pipe); } return pipe; } diff --git a/src/EventHandlerPipeline.ts b/src/EventHandlerPipeline.ts index c2b63b976a..925e7c1e64 100644 --- a/src/EventHandlerPipeline.ts +++ b/src/EventHandlerPipeline.ts @@ -2,6 +2,12 @@ import { generateUUIDv4 } from './utils'; import type { Event } from './types'; import type { Unsubscribe } from './store'; +type MatchById = { id: string | RegExp; regexMatch?: boolean }; +export type FindEventHandlerParams> = { + handler?: LabeledEventHandler | EventHandlerPipelineHandler; + idMatch?: MatchById; +}; + export type EventHandlerResult = { action: 'stop' }; // event processing run will be cancelled export type InsertEventHandlerPayload> = { @@ -35,6 +41,27 @@ export class EventHandlerPipeline = {}> { return this.handlers.length; } + findIndex({ handler, idMatch }: FindEventHandlerParams): number { + let index = -1; + if (handler) { + index = this.handlers.findIndex((existingHandler) => + typeof (handler as LabeledEventHandler).handle === 'function' + ? (handler as LabeledEventHandler).handle === existingHandler.handle + : handler === existingHandler.handle, + ); + } + + if (idMatch) { + index = this.handlers.findIndex((h) => { + if (!h.id) return false; + if (idMatch.regexMatch || idMatch.id instanceof RegExp) + return !!h.id.match(idMatch.id); + return h.id === idMatch.id; + }); + } + return index; + } + /** * Insert a handler into the pipeline at the given index. * @@ -53,7 +80,6 @@ export class EventHandlerPipeline = {}> { * @param revertOnUnsubscribe If true, restore the replaced handler when unsubscribing. * @returns An unsubscribe function that removes (and optionally restores) the handler. */ - insert({ handle, id, @@ -74,22 +100,29 @@ export class EventHandlerPipeline = {}> { const old = this.handlers[validIndex]; this.handlers[validIndex] = handler; return () => { - this.remove(handler); + this.remove({ handler }); if (revertOnUnsubscribe) this.handlers.splice(validIndex, 0, old); }; } else { this.handlers.splice(validIndex, 0, handler); - return () => this.remove(handler); + return () => this.remove({ handler }); } } - remove(h: LabeledEventHandler | EventHandlerPipelineHandler): void { - const index = this.handlers.findIndex((handler) => - typeof (h as LabeledEventHandler).handle === 'function' - ? (h as LabeledEventHandler).handle === handler.handle - : h === handler.handle, - ); - if (index >= 0) this.handlers.splice(index, 1); + /** + * Remove handler by: + * - handler function identity or + * - by id that could be an exact match or + * - match by regexp. + * @param params {FindEventHandlerParams} + */ + remove(params: FindEventHandlerParams): void { + let index = this.findIndex(params); + // need to perform n+1 searches in case the search is done by regex => there can be multiple matches + while (index > -1) { + this.handlers.splice(index, 1); + index = this.findIndex(params); + } } replaceAll(handlers: LabeledEventHandler[]): void { diff --git a/src/pagination/BasePaginator.ts b/src/pagination/BasePaginator.ts index 18e96f0877..6912554dfe 100644 --- a/src/pagination/BasePaginator.ts +++ b/src/pagination/BasePaginator.ts @@ -1,15 +1,33 @@ import { binarySearchInsertIndex } from './sortCompiler'; import { itemMatchesFilter } from './filterCompiler'; -import { StateStore } from '../store'; -import { debounce, type DebouncedFunc } from '../utils'; +import { isPatch, StateStore, type ValueOrPatch } from '../store'; +import { debounce, type DebouncedFunc, sleep } from '../utils'; import type { FieldToDataResolver } from './types.normalization'; import { locateOnPlateauAlternating, locateOnPlateauScanOneSide } from './utility.search'; +import { isEqual } from '../utils/mergeWith/mergeWithCore'; +import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../constants'; const noOrderChange = () => 0; type PaginationDirection = 'next' | 'prev'; -type Cursor = { next: string | null; prev: string | null }; -export type PaginationQueryParams = { direction?: PaginationDirection }; +export type PaginatorCursor = { next: string | null; prev: string | null }; +type StateResetPolicy = 'auto' | 'yes' | 'no' | (string & {}); + +export type PaginationQueryShapeChangeIdentifier = ( + prevQueryShape?: S, + nextQueryShape?: S, +) => boolean; + +export type PaginationQueryParams = { + direction?: PaginationDirection; + /** Data that define the query (filters, sort, ...) */ + queryShape?: Q; + /** Per-call override of the reset behavior. */ + reset?: StateResetPolicy; + /** Should retry the failed request given number of times. Default is 0. */ + retryCount?: number; +}; + export type PaginationQueryReturnValue = { items: T[] } & { next?: string; prev?: string; @@ -17,39 +35,76 @@ export type PaginationQueryReturnValue = { items: T[] } & { export type PaginatorDebounceOptions = { debounceMs: number; }; -type DebouncedExecQueryFunction = DebouncedFunc< - (params: { direction: PaginationDirection }) => Promise +type DebouncedExecQueryFunction = DebouncedFunc< + (params: PaginationQueryParams) => Promise >; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type PaginatorState = { +export type PaginatorState = { hasNext: boolean; hasPrev: boolean; isLoading: boolean; items: T[] | undefined; lastQueryError?: Error; - cursor?: Cursor; + cursor?: PaginatorCursor; offset?: number; }; -export type PaginatorOptions = { +export type PaginatorOptions = { /** The number of milliseconds to debounce the search query. The default interval is 300ms. */ debounceMs?: number; - /** Will prevent changing the index of existing items */ + /** + * Function containing custom logic that decides, whether the next pagination query to be executed should be considered the first page query. + * It makes sense to consider the next query as the first page query if filters, sort, options etc. (query params) excluding the page size have changed. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + hasPaginationQueryShapeChanged?: PaginationQueryShapeChangeIdentifier; + /** Custom function to retrieve items pages and optionally return a cursor in case of cursor pagination. */ + doRequest?: (queryParams: Q) => Promise<{ items: T[]; cursor?: PaginatorCursor }>; + /** In case of cursor pagination, specify the initial cursor value. */ + initialCursor?: PaginatorCursor; + /** In case of offset pagination, specify the initial offset value. */ + initialOffset?: number; + /** Will prevent changing the index of existing items. */ lockItemOrder?: boolean; + /** The item page size to be requested from the server. */ pageSize?: number; + /** Prevent silencing the errors thrown during the pagination execution. Default is false. */ + throwErrors?: boolean; }; -export const DEFAULT_PAGINATION_OPTIONS: Required = { + +type OptionalPaginatorConfigFields = + | 'doRequest' + | 'initialCursor' + | 'initialOffset' + | 'throwErrors'; + +export type BasePaginatorConfig = Pick< + PaginatorOptions, + OptionalPaginatorConfigFields +> & + Required, OptionalPaginatorConfigFields>>; + +const baseHasPaginationQueryShapeChanged: PaginationQueryShapeChangeIdentifier< + unknown +> = (prevQueryShape, nextQueryShape) => !isEqual(prevQueryShape, nextQueryShape); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export const DEFAULT_PAGINATION_OPTIONS: BasePaginatorConfig = { debounceMs: 300, lockItemOrder: false, pageSize: 10, + hasPaginationQueryShapeChanged: baseHasPaginationQueryShapeChanged, + throwErrors: false, } as const; -export abstract class BasePaginator { +export abstract class BasePaginator { state: StateStore>; - config: Required; - protected _executeQueryDebounced!: DebouncedExecQueryFunction; + config: BasePaginatorConfig; + protected _executeQueryDebounced!: DebouncedExecQueryFunction; protected _isCursorPagination = false; + /** Last effective query shape produced by subclass for the most recent request. */ + protected _lastQueryShape?: Q; + protected _nextQueryShape?: Q; /** * Comparison function used to keep items in a paginator sorted. * @@ -82,10 +137,23 @@ export abstract class BasePaginator { protected boosts = new Map(); protected _maxBoostSeq: number = 0; - protected constructor(options?: PaginatorOptions) { - this.config = { ...DEFAULT_PAGINATION_OPTIONS, ...options }; + protected constructor({ + initialCursor, + initialOffset, + ...options + }: PaginatorOptions = {}) { + this.config = { + ...DEFAULT_PAGINATION_OPTIONS, + initialCursor, + initialOffset, + ...options, + }; const { debounceMs } = this.config; - this.state = new StateStore>(this.initialState); + this.state = new StateStore>({ + ...this.initialState, + cursor: initialCursor, + offset: initialOffset ?? 0, + }); this.setDebounceOptions({ debounceMs }); this.sortComparator = noOrderChange; this._filterFieldToDataResolvers = []; @@ -111,15 +179,24 @@ export abstract class BasePaginator { return this.state.getLatestValue().isLoading; } - get initialState(): PaginatorState { + /** Signals that the paginator has not performed any query so far */ + get isInitialized() { + return typeof this._lastQueryShape !== 'undefined'; + } + + get isOfflineSupportEnabled() { + return false; + } + + get initialState(): PaginatorState { return { hasNext: true, hasPrev: true, //todo: check if optimistic value does not cause problems in UI isLoading: false, - items: undefined, + items: undefined, // todo: maybe should be null? lastQueryError: undefined, - cursor: undefined, - offset: 0, + cursor: this.config.initialCursor, + offset: this.config.initialOffset ?? 0, }; } @@ -139,6 +216,18 @@ export abstract class BasePaginator { return this.config.pageSize; } + set pageSize(size: number) { + this.config.pageSize = size; + } + + set initialCursor(cursor: PaginatorCursor) { + this.config.initialCursor = cursor; + } + + set initialOffset(offset: number) { + this.config.initialOffset = offset; + } + /** Single point of truth: always use the effective comparator */ get effectiveComparator() { return this.boostComparator; @@ -148,12 +237,40 @@ export abstract class BasePaginator { return this._maxBoostSeq; } - abstract query(params: PaginationQueryParams): Promise>; + abstract query( + params: PaginationQueryParams, + ): Promise>; abstract filterQueryResults(items: T[]): T[] | Promise; + /** + * Subclasses must return the query shape. + */ + protected getNextQueryShape({ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + direction, + }: Pick, 'direction'> = {}): Q { + throw new Error('Paginator.getNextQueryShape() is not implemented'); + } + + /** + * Decide whether a param change between queries requires a state reset. + * Default: deep inequality => reset. + * Subclasses can override to implement domain rules + * (e.g. ChannelPaginator filters {cid: { $in: string[]}} with different CIDs may be required not to lead to reset). + */ + protected shouldResetStateBeforeQuery( + prevQueryShape: unknown | undefined, + nextQueryShape: unknown | undefined, + ): boolean { + return ( + typeof prevQueryShape === 'undefined' || + this.config.hasPaginationQueryShapeChanged(prevQueryShape, nextQueryShape) + ); + } + protected buildFilters(): object | null { - return null; // === no filters' + return null; // === no filters } getItemId(item: T): string { @@ -372,6 +489,29 @@ export abstract class BasePaginator { return index > -1 ? (this.items ?? [])[index] : undefined; } + setItems(valueOrFactory: ValueOrPatch, cursor?: PaginatorCursor) { + this.state.next((current) => { + const { items: currentItems = [] } = current; + const newItems = isPatch(valueOrFactory) + ? valueOrFactory(currentItems) + : valueOrFactory; + + // If the references between the two values are the same, just return the + // current state; otherwise trigger a state change. + if (currentItems === newItems) { + return current; + } + const newState = { ...current, items: newItems }; + + if (cursor) { + newState.cursor = cursor; + } else { + newState.offset = newItems.length; + } + return newState; + }); + } + setFilterResolvers(resolvers: FieldToDataResolver[]) { this._filterFieldToDataResolvers = resolvers; } @@ -384,9 +524,24 @@ export abstract class BasePaginator { this._executeQueryDebounced = debounce(this.executeQuery.bind(this), debounceMs); }; - canExecuteQuery = (direction: PaginationDirection) => - (!this.isLoading && direction === 'next' && this.hasNext) || - (direction === 'prev' && this.hasPrev); + protected canExecuteQuery = ({ + direction, + reset, + }: { direction: PaginationDirection } & Pick, 'reset'>) => + !this.isLoading && + (reset === 'yes' || + (direction === 'next' && this.hasNext) || + (direction === 'prev' && this.hasPrev)); + + isFirstPageQuery = ( + params: { queryShape?: unknown } & Pick, 'reset'>, + ): boolean => { + if (typeof this.items === 'undefined') return true; + if (params.reset === 'yes') return true; + if (params.reset === 'no') return false; + + return this.shouldResetStateBeforeQuery(this._lastQueryShape, params.queryShape); + }; protected getStateBeforeFirstQuery(): PaginatorState { return { @@ -411,39 +566,112 @@ export abstract class BasePaginator { }; } - async executeQuery({ direction }: { direction: PaginationDirection }) { - if (!this.canExecuteQuery(direction)) return; - const isFirstPage = typeof this.items === 'undefined'; + preloadFirstPageFromOfflineDb = ( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + params: PaginationQueryParams, + ): Promise | T[] | undefined => undefined; + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + populateOfflineDbAfterQuery = (params: { + items: T[] | undefined; + queryShape: Q | undefined; + }): Promise | T[] | undefined => undefined; + + protected async runQueryRetryable( + params: PaginationQueryParams = {}, + ): Promise | null> { + const { retryCount } = params; + try { + return await this.query(params); + } catch (e) { + // If the offline support is enabled, and there are items in the DB, we should not report the error. + const isOfflineSupportEnabledWithItems = + this.isOfflineSupportEnabled && (this.items ?? []).length > 0; + if (!isOfflineSupportEnabledWithItems) { + this.state.partialNext({ lastQueryError: e as Error }); + } + + const nextRetryCount = (retryCount ?? 0) - 1; + if (nextRetryCount > 0) { + // not swapping isLoading flag to false as the load has not finished yet + await sleep(DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES); + return await this.runQueryRetryable({ + ...params, + retryCount: nextRetryCount, + }); + } + if (this.config.throwErrors) { + this.state.partialNext({ isLoading: false }); + throw e; + } + return null; + } + } + + async executeQuery({ + direction = 'next', + queryShape: forcedQueryShape, // todo: remove it? + reset, + retryCount = 0, + }: PaginationQueryParams = {}) { + const queryShape = forcedQueryShape ?? this.getNextQueryShape({ direction }); + if (!this.canExecuteQuery({ direction, reset })) return; + + const isFirstPage = this.isFirstPageQuery({ queryShape, reset }); if (isFirstPage) { - this.state.next(this.getStateBeforeFirstQuery()); + const state = this.getStateBeforeFirstQuery(); + // preload from the offline DB only if no successful HTTP request has been run previously + let items: T[] | undefined = undefined; + if (!this.isInitialized) { + items = + (await this.preloadFirstPageFromOfflineDb({ + direction, + queryShape, + reset, + retryCount, + })) ?? state.items; + } + this.state.next({ ...state, items }); } else { this.state.partialNext({ isLoading: true }); } - const stateUpdate: Partial> = {}; - try { - const results = await this.query({ direction }); - if (!results) return; - const { items, next, prev } = results; - if (isFirstPage && (next || prev)) { - this._isCursorPagination = true; - } + this._nextQueryShape = queryShape; + const results = await this.runQueryRetryable({ + direction, + queryShape, + reset, + retryCount, + }); + this._lastQueryShape = this._nextQueryShape; + this._nextQueryShape = undefined; - if (this._isCursorPagination) { - stateUpdate.cursor = { next: next || null, prev: prev || null }; - stateUpdate.hasNext = !!next; - stateUpdate.hasPrev = !!prev; - } else { - stateUpdate.offset = (this.offset ?? 0) + items.length; - stateUpdate.hasNext = items.length === this.pageSize; - } + // if the request failed the value is null, loading finished + if (!results) { + this.state.partialNext({ isLoading: false }); + return; + } - stateUpdate.items = await this.filterQueryResults(items); - } catch (e) { - stateUpdate.lastQueryError = e as Error; - } finally { - this.state.next(this.getStateAfterQuery(stateUpdate, isFirstPage)); + const stateUpdate: Partial> = { lastQueryError: undefined }; + + const { items, next, prev } = results; + if (isFirstPage && (next || prev)) { + this._isCursorPagination = true; } + + if (this._isCursorPagination) { + stateUpdate.cursor = { next: next || null, prev: prev || null }; + stateUpdate.hasNext = !!next; + stateUpdate.hasPrev = !!prev; + } else { + stateUpdate.offset = (this.offset ?? 0) + items.length; + stateUpdate.hasNext = items.length === this.pageSize; + } + + stateUpdate.items = await this.filterQueryResults(items); + const state = this.getStateAfterQuery(stateUpdate, isFirstPage); + this.state.next(state); + this.populateOfflineDbAfterQuery({ items: state.items, queryShape }); } cancelScheduledQuery() { @@ -454,19 +682,24 @@ export abstract class BasePaginator { this.state.next(this.initialState); } - next = () => this.executeQuery({ direction: 'next' }); + next = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.executeQuery({ direction: 'next', ...params }); - prev = () => this.executeQuery({ direction: 'prev' }); + prev = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.executeQuery({ direction: 'prev', ...params }); - nextDebounced = () => { - this._executeQueryDebounced({ direction: 'next' }); + nextDebounced = ( + params: Omit, 'direction' | 'queryShape'> = {}, + ) => { + this._executeQueryDebounced({ direction: 'next', ...params }); }; - prevDebounced = () => { - this._executeQueryDebounced({ direction: 'prev' }); + prevDebounced = ( + params: Omit, 'direction' | 'queryShape'> = {}, + ) => { + this._executeQueryDebounced({ direction: 'prev', ...params }); }; reload = async () => { - this.resetState(); - await this.next(); + await this.next({ reset: 'yes' }); }; } diff --git a/src/pagination/ChannelPaginator.ts b/src/pagination/ChannelPaginator.ts index 559e7b31c2..6609ff1d7a 100644 --- a/src/pagination/ChannelPaginator.ts +++ b/src/pagination/ChannelPaginator.ts @@ -1,6 +1,7 @@ import type { PaginationQueryParams, PaginationQueryReturnValue, + PaginationQueryShapeChangeIdentifier, PaginatorOptions, PaginatorState, } from './BasePaginator'; @@ -11,12 +12,26 @@ import { makeComparator } from './sortCompiler'; import { generateUUIDv4 } from '../utils'; import type { StreamChat } from '../client'; import type { Channel } from '../channel'; -import type { ChannelFilters, ChannelOptions, ChannelSort } from '../types'; +import type { + ChannelFilters, + ChannelOptions, + ChannelSort, + ChannelStateOptions, +} from '../types'; import type { FieldToDataResolver, PathResolver } from './types.normalization'; import { resolveDotPathValue } from './utility.normalization'; +import type { ValueOrPatch } from '../store'; +import { isEqual } from '../utils/mergeWith/mergeWithCore'; const DEFAULT_BACKEND_SORT: ChannelSort = { last_message_at: -1, updated_at: -1 }; // {last_updated: -1} +export type ChannelQueryShape = { + filters: ChannelFilters; + sort?: ChannelSort; + options?: ChannelOptions; + stateOptions?: ChannelStateOptions; +}; + export type ChannelPaginatorState = PaginatorState; export type ChannelPaginatorRequestOptions = Partial< @@ -25,14 +40,35 @@ export type ChannelPaginatorRequestOptions = Partial< export type ChannelPaginatorOptions = { client: StreamChat; + channelStateOptions?: ChannelStateOptions; filterBuilderOptions?: FilterBuilderOptions; filters?: ChannelFilters; id?: string; - paginatorOptions?: PaginatorOptions; + paginatorOptions?: PaginatorOptions; requestOptions?: ChannelPaginatorRequestOptions; sort?: ChannelSort | ChannelSort[]; }; +const getQueryShapeRelevantChannelOptions = (options: ChannelOptions) => { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const { limit: _, member_limit: __, message_limit: ___, ...relevantShape } = options; + return relevantShape; +}; + +const hasPaginationQueryShapeChanged: PaginationQueryShapeChangeIdentifier< + ChannelQueryShape +> = (prevQueryShape, nextQueryShape) => + !isEqual( + { + ...prevQueryShape, + options: getQueryShapeRelevantChannelOptions(prevQueryShape?.options ?? {}), + }, + { + ...nextQueryShape, + options: getQueryShapeRelevantChannelOptions(nextQueryShape?.options ?? {}), + }, + ); + const pinnedFilterResolver: FieldToDataResolver = { matchesField: (field) => field === 'pinned', resolve: (channel) => !!channel.state.membership.pinned_at, @@ -99,17 +135,19 @@ const channelSortPathResolver: PathResolver = (channel, path) => { // todo: maybe items could be just an array of {cid: string} and the data would be retrieved from client.activeChannels // todo: maybe we should introduce client._cache.channels that would be reactive and orchestrator would subscribe to client._cache.channels state to keep all the dependent state in sync -export class ChannelPaginator extends BasePaginator { - // state: StateStore; +export class ChannelPaginator extends BasePaginator { + private readonly _id: string; private client: StreamChat; - protected _filters: ChannelFilters | undefined; + protected _staticFilters: ChannelFilters | undefined; protected _sort: ChannelSort | ChannelSort[] | undefined; protected _options: ChannelPaginatorRequestOptions | undefined; - private _id: string; + protected _channelStateOptions: ChannelStateOptions | undefined; + protected _nextQueryShape: ChannelQueryShape | undefined; sortComparator: (a: Channel, b: Channel) => number; filterBuilder: FilterBuilder; constructor({ + channelStateOptions, client, id, filterBuilderOptions, @@ -118,13 +156,14 @@ export class ChannelPaginator extends BasePaginator { requestOptions, sort, }: ChannelPaginatorOptions) { - super(paginatorOptions); + super({ hasPaginationQueryShapeChanged, ...paginatorOptions }); const definedSort = sort ?? DEFAULT_BACKEND_SORT; this.client = client; this._id = id ?? `channel-paginator-${generateUUIDv4()}`; this._sort = definedSort; - this._filters = filters; + this._staticFilters = filters; this._options = requestOptions; + this._channelStateOptions = channelStateOptions; this.filterBuilder = new FilterBuilder(filterBuilderOptions); this.sortComparator = makeComparator({ sort: definedSort, @@ -147,8 +186,12 @@ export class ChannelPaginator extends BasePaginator { return this._id; } - get filters(): ChannelFilters | undefined { - return this._filters; + get isOfflineSupportEnabled() { + return !!this.client.offlineDb; + } + + get staticFilters(): ChannelFilters | undefined { + return this._staticFilters; } get sort(): ChannelSort | undefined { @@ -159,9 +202,12 @@ export class ChannelPaginator extends BasePaginator { return this._options; } - set filters(filters: ChannelFilters | undefined) { - this._filters = filters; - this.resetState(); + get channelStateOptions(): ChannelStateOptions | undefined { + return this._channelStateOptions; + } + + set staticFilters(filters: ChannelFilters | undefined) { + this._staticFilters = filters; } set sort(sort: ChannelSort | ChannelSort[] | undefined) { @@ -169,12 +215,14 @@ export class ChannelPaginator extends BasePaginator { this.sortComparator = makeComparator({ sort: this.sort ?? DEFAULT_BACKEND_SORT, }); - this.resetState(); } set options(options: ChannelPaginatorRequestOptions | undefined) { this._options = options; - this.resetState(); + } + + set channelStateOptions(options: ChannelStateOptions | undefined) { + this._channelStateOptions = options; } getItemId(item: Channel): string { @@ -183,24 +231,127 @@ export class ChannelPaginator extends BasePaginator { buildFilters = (): ChannelFilters => this.filterBuilder.buildFilters({ - baseFilters: { ...this.filters }, + baseFilters: { ...this.staticFilters }, }); - query = async ({ direction }: PaginationQueryParams = {}): Promise< - PaginationQueryReturnValue - > => { - if (direction) { - console.warn('Direction is not supported with channel pagination.'); - } - const filters = this.buildFilters(); - const options: ChannelOptions = { - ...this.options, - limit: this.pageSize, - offset: this.offset, + // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; + protected getNextQueryShape(): ChannelQueryShape { + const shape: ChannelQueryShape = { + filters: this.buildFilters(), + options: { + ...this.options, + limit: this.pageSize, + offset: this.offset, + }, }; - const items = await this.client.queryChannels(filters, this.sort, options); + + if (this.sort) { + shape.sort = this.sort; + } + + if (this.channelStateOptions) { + shape.stateOptions = this.channelStateOptions; + } + return shape; + } + + preloadFirstPageFromOfflineDb = async ({ + direction, + queryShape, + reset, + }: PaginationQueryParams) => { + if ( + !this.client.offlineDb?.getChannelsForQuery || + !this.client.user?.id || + !queryShape + ) + return undefined; + + try { + const channelsFromDB = await this.client.offlineDb.getChannelsForQuery({ + userId: this.client.user.id, + filters: queryShape.filters, + sort: queryShape.sort, + }); + + if (channelsFromDB) { + const offlineChannels = this.client.hydrateActiveChannels(channelsFromDB, { + offlineMode: true, + skipInitialization: [], // passing empty array will clear out the existing messages from channel state, this removes the possibility of duplicate messages + }); + + return offlineChannels; + } + + if (!this.client.offlineDb.syncManager.syncStatus) { + this.client.offlineDb.syncManager.scheduleSyncStatusChangeCallback( + this.id, + async () => { + await this.executeQuery({ direction, queryShape, reset }); + }, + ); + return; + } + } catch (error) { + this.client.logger('error', (error as Error).message); + if (this.config.throwErrors) throw error; + } + return; + }; + + populateOfflineDbAfterQuery = ({ + items, + queryShape, + }: { + items?: Channel[]; + queryShape?: ChannelQueryShape; + }) => { + if (!items || !queryShape) return undefined; + + this.client.offlineDb?.executeQuerySafely( + (db) => + db.upsertCidsForQuery({ + cids: items.map((channel) => channel.cid), + filters: queryShape.filters, + sort: queryShape.sort, + }), + { method: 'upsertCidsForQuery' }, + ); + }; + + query = async (): Promise> => { + // get the params only if they were not generated previously + if (!this._nextQueryShape) { + this._nextQueryShape = this.getNextQueryShape(); + } + const { filters, sort, options, stateOptions } = this._nextQueryShape; + let items: Channel[]; + if (this.config.doRequest) { + items = (await this.config.doRequest(this._nextQueryShape)).items; + } else { + items = await this.client.queryChannels(filters, sort, options, stateOptions); + } return { items }; }; filterQueryResults = (items: Channel[]) => items; + + setItems(valueOrFactory: ValueOrPatch) { + super.setItems(valueOrFactory); + + if (!this.client.offlineDb) return; + + const { items: channels = [], sort } = this; + const filters = this.buildFilters(); + + this.client.offlineDb?.executeQuerySafely( + (db) => + db.upsertCidsForQuery({ + cids: channels.map((channel) => channel.cid), + filters, + sort, + }), + { method: 'upsertCidsForQuery' }, + ); + } } diff --git a/src/pagination/ReminderPaginator.ts b/src/pagination/ReminderPaginator.ts index 789354cd8c..9bbf56c5ce 100644 --- a/src/pagination/ReminderPaginator.ts +++ b/src/pagination/ReminderPaginator.ts @@ -4,10 +4,18 @@ import type { PaginationQueryReturnValue, PaginatorOptions, } from './BasePaginator'; -import type { ReminderFilters, ReminderResponse, ReminderSort } from '../types'; +import type { + QueryRemindersOptions, + ReminderFilters, + ReminderResponse, + ReminderSort, +} from '../types'; import type { StreamChat } from '../client'; -export class ReminderPaginator extends BasePaginator { +export class ReminderPaginator extends BasePaginator< + ReminderResponse, + QueryRemindersOptions +> { private client: StreamChat; protected _filters: ReminderFilters | undefined; protected _sort: ReminderSort | undefined; @@ -30,27 +38,34 @@ export class ReminderPaginator extends BasePaginator { this.resetState(); } - constructor(client: StreamChat, options?: PaginatorOptions) { + constructor( + client: StreamChat, + options?: PaginatorOptions, + ) { super(options); this.client = client; } - query = async ({ + protected getNextQueryShape({ direction, - }: Required): Promise< - PaginationQueryReturnValue - > => { + }: Required< + Pick, 'direction'> + >): QueryRemindersOptions { const cursor = this.cursor?.[direction]; - const { - reminders: items, - next, - prev, - } = await this.client.queryReminders({ + return { filter: this.filters, sort: this.sort, limit: this.pageSize, [direction]: cursor, - }); + }; + } + + query = async ({ + queryShape, + }: PaginationQueryParams): Promise< + PaginationQueryReturnValue + > => { + const { reminders: items, next, prev } = await this.client.queryReminders(queryShape); return { items, next, prev }; }; diff --git a/src/utils.ts b/src/utils.ts index ef255ccb72..472b540586 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1327,7 +1327,7 @@ export const runDetached = ( onErrorCallback?: (error: Error) => void | Promise; }, ) => { - const { context, onSuccessCallback = () => undefined, onErrorCallback } = options ?? {}; + const { context, onSuccessCallback, onErrorCallback } = options ?? {}; const defaultOnError = (error: Error) => { console.log(`An error has occurred in context ${context}: ${error}`); }; diff --git a/src/utils/mergeWith/mergeWithCore.ts b/src/utils/mergeWith/mergeWithCore.ts index 234c9dec0e..0288b9d2f3 100644 --- a/src/utils/mergeWith/mergeWithCore.ts +++ b/src/utils/mergeWith/mergeWithCore.ts @@ -44,177 +44,162 @@ export const isClassInstance = (value: unknown): boolean => { return value.constructor && value.constructor !== Object; }; +type PairMemo = WeakMap>; + +function memoHasOrAdd(memo: PairMemo, a: object, b: object): boolean { + const set = memo.get(a); + if (set && set.has(b)) return true; + if (set) set.add(b); + else memo.set(a, new WeakSet([b])); + return false; +} + /** - * Performs a deep comparison between two values to determine if they are equivalent. - * This is similar to Lodash's isEqual implementation but simplified. + * Deep semantic equality with cycle safety and symbol-key support. + * Keeps your existing semantics: + * - Dates/RegExps compared by value + * - "Class instances" are treated atomically (unequal unless ===) + * - NaN equals NaN; -0 equals 0 (same as ===) */ export const isEqual = ( value1: unknown, value2: unknown, - compareStack = new Set<[unknown, unknown]>(), - objectStack1 = new WeakSet(), - objectStack2 = new WeakSet(), + pairMemo: PairMemo = new WeakMap(), ): boolean => { - // Handle simple equality cases first - if (value1 === value2) return true; - - // If either is null/undefined, they're not equal (already checked ===) + if (value1 === value2) return true; // includes -0 === 0 if (value1 == null || value2 == null) return false; - // Get the type of both values - const type1 = typeof value1; - const type2 = typeof value2; + const t1 = typeof value1; + const t2 = typeof value2; + if (t1 !== t2) return false; - // Different types mean they're not equal - if (type1 !== type2) return false; - - // Handle non-object types that need special comparison - if (type1 !== 'object') { - // Special case for NaN + if (t1 !== 'object') { + // NaN handling // eslint-disable-next-line no-self-compare - if (value1 !== value1 && value2 !== value2) return true; - return value1 === value2; + return value1 !== value1 && value2 !== value2 ? true : value1 === value2; } - // At this point, both values are objects - const obj1 = value1 as object; - const obj2 = value2 as object; - - // Check for circular references in each object - if (objectStack1.has(obj1) || objectStack2.has(obj2)) { - // If either object has been seen before, consider them equal - // if they're both in a circular reference - return objectStack1.has(obj1) && objectStack2.has(obj2); - } + // Objects + const o1 = value1 as object; + const o2 = value2 as object; - // Add objects to their respective stacks - objectStack1.add(obj1); - objectStack2.add(obj2); + // Fast path for tag mismatch + const tag1 = Object.prototype.toString.call(o1); + const tag2 = Object.prototype.toString.call(o2); + if (tag1 !== tag2) return false; - // Handle Date objects - needs to be before the class instance check - if (value1 instanceof Date && value2 instanceof Date) { - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return value1.getTime() === value2.getTime(); + // Special cases before instance test + if (o1 instanceof Date && o2 instanceof Date) { + return (o1 as Date).getTime() === (o2 as Date).getTime(); } - - // Handle RegExp objects - needs to be before the class instance check - if (value1 instanceof RegExp && value2 instanceof RegExp) { - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return value1.toString() === value2.toString(); - } - - // If either is a class instance, use reference equality (already checked above) - if (isClassInstance(value1) || isClassInstance(value2)) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; + if (o1 instanceof RegExp && o2 instanceof RegExp) { + const r1 = o1 as RegExp, + r2 = o2 as RegExp; + return r1.source === r2.source && r1.flags === r2.flags; } - // Handle arrays - const isArray1 = Array.isArray(value1); - const isArray2 = Array.isArray(value2); + // Handle Set comparison + // Two sets are equal if they have the same size and + // every value in one has an equivalent value in the + // other (using deep equality). + // Cannot use the same item for multiple matches in another set. + if (value1 instanceof Set && value2 instanceof Set) { + if (value1.size !== value2.size) return false; + if (memoHasOrAdd(pairMemo, value1, value2)) return true; + + const unmatched = new Set(value2); + + for (const v1 of value1) { + let matched = false; + for (const v2 of unmatched) { + if (isEqual(v1, v2, pairMemo)) { + unmatched.delete(v2); // consume the match + matched = true; + break; + } + } + if (!matched) return false; + } - if (isArray1 !== isArray2) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; + return unmatched.size === 0; } - if (isArray1 && isArray2) { - const arr1 = value1 as unknown[]; - const arr2 = value2 as unknown[]; + // Handle Map comparison + if (value1 instanceof Map && value2 instanceof Map) { + if (value1.size !== value2.size) return false; - if (arr1.length !== arr2.length) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; - } + if (memoHasOrAdd(pairMemo, value1, value2)) return true; - // Check for circular references in the comparison context - const pairKey: [unknown, unknown] = [value1, value2]; - if (compareStack.has(pairKey)) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return true; - } - compareStack.add(pairKey); - - // Compare each element - for (let i = 0; i < arr1.length; i++) { - if (!isEqual(arr1[i], arr2[i], compareStack, objectStack1, objectStack2)) { - compareStack.delete(pairKey); - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; - } - } + const unmatched = new Set(value2); // tracks entries in map2 not yet matched - compareStack.delete(pairKey); - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return true; - } + for (const [k1, v1] of value1) { + let matchedEntry: [unknown, unknown] | null = null; - // Handle plain objects - const plainObj1 = value1 as Record; - const plainObj2 = value2 as Record; + for (const entry of unmatched) { + const [k2, v2] = entry as [unknown, unknown]; + if (isEqual(k1, k2, pairMemo) && isEqual(v1, v2, pairMemo)) { + matchedEntry = entry; + break; + } + } - const keys1 = Object.keys(plainObj1); - const keys2 = Object.keys(plainObj2); + if (!matchedEntry) return false; // nothing matched this entry + unmatched.delete(matchedEntry); // consume it + } - // If key counts differ, objects aren't equal - if (keys1.length !== keys2.length) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; + return unmatched.size === 0; } - // Verify all keys in obj2 are in obj1 (we already checked counts, so this - // also ensures all keys in obj1 are in obj2) - for (const key of keys2) { - if (!Object.prototype.hasOwnProperty.call(plainObj1, key)) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; + // Treat non-plain instances atomically (your current rule) + if (isClassInstance(o1) || isClassInstance(o2)) return false; + + // Cycle guard (pairwise) + if (memoHasOrAdd(pairMemo, o1, o2)) return true; + + // Arrays (respect holes vs undefined) + if (Array.isArray(o1)) { + const a1 = value1 as unknown[], + a2 = value2 as unknown[]; + if (a1.length !== a2.length) return false; + for (let i = 0; i < a1.length; i++) { + const has1 = i in a1, + has2 = i in a2; + if (has1 !== has2) return false; + if (has1 && !isEqual(a1[i], a2[i], pairMemo)) return false; } - } - - // Check for circular references in the comparison context - const pairKey: [unknown, unknown] = [value1, value2]; - if (compareStack.has(pairKey)) { - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return true; - } - compareStack.add(pairKey); - - // Compare each property's value - for (const key of keys1) { - if ( - !isEqual(plainObj1[key], plainObj2[key], compareStack, objectStack1, objectStack2) - ) { - compareStack.delete(pairKey); - // Clean up before returning - objectStack1.delete(obj1); - objectStack2.delete(obj2); - return false; + // Compare enumerable non-index props as well (to align with objects) + const extraKeys1 = Reflect.ownKeys(o1) + .filter((k) => typeof k !== 'string' || isNaN(+k)) + .filter((k) => Object.prototype.propertyIsEnumerable.call(o1, k)); + const extraKeys2 = Reflect.ownKeys(o2) + .filter((k) => typeof k !== 'string' || isNaN(+k)) + .filter((k) => Object.prototype.propertyIsEnumerable.call(o2, k)); + if (extraKeys1.length !== extraKeys2.length) return false; + for (const k of extraKeys1) { + if (!Object.prototype.hasOwnProperty.call(o2, k)) return false; + // @ts-expect-error index signature + if (!isEqual(o1[k], o2[k], pairMemo)) return false; } + return true; } - compareStack.delete(pairKey); - // Clean up before returning successful comparison - objectStack1.delete(obj1); - objectStack2.delete(obj2); + // Plain objects (string + symbol enumerable own keys) + const keys1 = Reflect.ownKeys(o1).filter((k) => + Object.prototype.propertyIsEnumerable.call(o1, k), + ); + const keys2 = Reflect.ownKeys(o2).filter((k) => + Object.prototype.propertyIsEnumerable.call(o2, k), + ); + if (keys1.length !== keys2.length) return false; + + // enforce same prototype to avoid {} == Object.create(null, ...) + if (Object.getPrototypeOf(o1) !== Object.getPrototypeOf(o2)) return false; + + for (const k of keys1) { + if (!Object.prototype.hasOwnProperty.call(o2, k)) return false; + // @ts-expect-error index signature + if (!isEqual(o1[k], o2[k], pairMemo)) return false; + } return true; }; @@ -241,13 +226,7 @@ function compareAndBuildDiff( modified: unknown, parentDiffNode: DiffNode, key?: string | symbol, - /** - * Tracks pairs of objects being compared - * - It stores pairs of values that are being compared `[original, modified]` - * - This helps detect when we're comparing the same pair of objects again - * - It prevents infinite recursion when comparing complex object structures - */ - compareStack = new Set<[unknown, unknown]>(), + pairMemo: PairMemo = new WeakMap(), /** * Tracks individual objects that are being processed in the current traversal path * - It's used to detect when we encounter the same object multiple times in a single traversal path @@ -257,9 +236,7 @@ function compareAndBuildDiff( objectStack = new Set(), ): void { // If values are equal, no diff to record - if (isEqual(original, modified, new Set(compareStack))) { - return; - } + if (isEqual(original, modified)) return; // Handle additions (value in modified but not in original) if (original === undefined || original === null) { @@ -335,16 +312,20 @@ function compareAndBuildDiff( parentDiffNode.children[String(key)] = currentDiffNode; } - // Check for circular references in comparison - const pairKey: [unknown, unknown] = [original, modified]; - if (compareStack.has(pairKey)) { - // Remove from object stack before returning - if (typeof original === 'object' && original !== null) { - objectStack.delete(original); + // Pairwise cycle check (prevents infinite recursion across the *pair*) + if ( + typeof original === 'object' && + original !== null && + typeof modified === 'object' && + modified !== null + ) { + if (memoHasOrAdd(pairMemo, original as object, modified as object)) { + // already visited this exact pair in this diff traversal + // (prevents infinite recursion), so stop here + if (typeof original === 'object') objectStack.delete(original); + return; } - return; } - compareStack.add(pairKey); // Process all keys from both objects const allKeys = new Set([ @@ -380,17 +361,12 @@ function compareAndBuildDiff( modifiedValue, currentDiffNode, childKey, - compareStack, + pairMemo, objectStack, ); } - compareStack.delete(pairKey); - - // Remove from object stack before returning - if (typeof original === 'object' && original !== null) { - objectStack.delete(original); - } + if (typeof original === 'object' && original !== null) objectStack.delete(original); } export function createMergeCore(options: { trackDiff?: boolean } = {}) { diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts index 0d29234e95..be4a09966b 100644 --- a/test/unit/ChannelPaginatorsOrchestrator.test.ts +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -31,7 +31,6 @@ describe('ChannelPaginatorsOrchestrator', () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); expect(orchestrator.paginators).toHaveLength(0); - // @ts-expect-error accessing protected property expect(orchestrator.pipelines.size).toBe(Object.keys(defaultHandlers).length); }); @@ -74,17 +73,15 @@ describe('ChannelPaginatorsOrchestrator', () => { }); expect(orchestrator.paginators).toHaveLength(1); expect(orchestrator.getPaginatorById(paginator.id)).toStrictEqual(paginator); - // @ts-expect-error accessing protected property expect(orchestrator.pipelines.size).toBe(Object.keys(defaultHandlers).length + 1); + expect(orchestrator.pipelines.get('channel.visible')?.size).toBe(2); // @ts-expect-error accessing protected property - expect(orchestrator.pipelines.get('channel.visible').size).toBe(2); - // @ts-expect-error accessing protected property - expect(orchestrator.pipelines.get('channel.visible').handlers[0].id).toBe( + expect(orchestrator.pipelines.get('channel.visible')?.handlers[0].id).toBe( eventHandlers['channel.visible'][0].id, ); // @ts-expect-error accessing protected property - expect(orchestrator.pipelines.get('channel.visible').handlers[1].id).toBe( + expect(orchestrator.pipelines.get('channel.visible')?.handlers[1].id).toBe( eventHandlers['channel.visible'][1].id, ); @@ -249,6 +246,113 @@ describe('ChannelPaginatorsOrchestrator', () => { }); }); + describe('setEventHandler', () => { + it('replaces the existing handlers for a given event type', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const eventType = 'channel.updated'; + const channelUpdatedEvent = { type: eventType, cid: 'x' } as const; + const channelUpdatedHandler1 = vi.fn(); + const channelUpdatedHandler2 = vi.fn(); + const unsubscribe = orchestrator.addEventHandler({ + eventType, + id: 'custom', + handle: channelUpdatedHandler1, + }); + + orchestrator.registerSubscriptions(); + + client.dispatchEvent(channelUpdatedEvent); + // event listeners are executed async + await vi.waitFor(() => { + expect(channelUpdatedHandler1).toHaveBeenCalledWith({ + ctx: { orchestrator }, + event: channelUpdatedEvent, + }); + }); + expect(channelUpdatedHandler1).toHaveBeenCalledTimes(1); + expect(channelUpdatedHandler2).toHaveBeenCalledTimes(0); + + orchestrator.setEventHandlers({ + eventType, + handlers: [{ id: 'custom2', handle: channelUpdatedHandler2 }], + }); + + client.dispatchEvent(channelUpdatedEvent); + await vi.waitFor(() => { + expect(channelUpdatedHandler2).toHaveBeenCalledWith({ + ctx: { orchestrator }, + event: channelUpdatedEvent, + }); + }); + + // Unsubscribe the custom handler and ensure it no longer fires + unsubscribe(); + + // still 1 call total (did not increment) + expect(channelUpdatedHandler1).toHaveBeenCalledTimes(1); + expect(channelUpdatedHandler2).toHaveBeenCalledTimes(1); + }); + }); + + describe('removeEventHandler', () => { + it('does not create a pipeline for which the event type is removed', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const eventType = 'channel.updatedX'; + + expect(orchestrator.pipelines.get(eventType)).toBeUndefined(); + orchestrator.removeEventHandlers({ + eventType, + handlers: [{ idMatch: { id: 'XXX' } }], + }); + expect(orchestrator.pipelines.get(eventType)).toBeUndefined(); + }); + + it('removes the existing handlers for a given event type', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const eventType = 'channel.updated'; + const channelUpdatedEvent = { type: eventType, cid: 'x' } as const; + const channelUpdatedHandler1 = vi.fn(); + const channelUpdatedHandler2 = vi.fn(); + orchestrator.setEventHandlers({ + eventType, + handlers: [ + { + id: 'custom1', + handle: channelUpdatedHandler1, + }, + { + id: 'custom2', + handle: channelUpdatedHandler2, + }, + ], + }); + + orchestrator.registerSubscriptions(); + // @ts-expect-error accessing protected property handlers + expect(orchestrator.pipelines.get(eventType).handlers).toHaveLength(2); + + client.dispatchEvent(channelUpdatedEvent); + // wait for async handler execution + await vi.waitFor(() => { + expect(channelUpdatedHandler1).toHaveBeenCalledTimes(1); + expect(channelUpdatedHandler2).toHaveBeenCalledTimes(1); + }); + + orchestrator.removeEventHandlers({ + eventType, + handlers: [{ idMatch: { id: 'custom', regexMatch: true } }], + }); + client.dispatchEvent(channelUpdatedEvent); + // wait for async handler execution + await vi.waitFor(() => { + expect(channelUpdatedHandler1).toHaveBeenCalledTimes(1); + expect(channelUpdatedHandler2).toHaveBeenCalledTimes(1); + }); + // @ts-expect-error accessing protected property handlers + expect(orchestrator.pipelines.get(eventType).handlers).toHaveLength(0); + }); + }); + describe('ensurePipeline', () => { it('returns the same pipeline instance for the same event type', () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); @@ -280,63 +384,66 @@ describe('ChannelPaginatorsOrchestrator', () => { return client.channel(type, id); } - describe('channel.deleted', () => { - it('removes the channel from all paginators', async () => { - const cid = 'messaging:1'; - const ch = makeChannel(cid); + describe.each(['channel.deleted', 'channel.hidden'] as EventTypes[])( + 'event %s', + (eventType) => { + it('removes the channel from all paginators', async () => { + const cid = 'messaging:1'; + const ch = makeChannel(cid); - const p1 = new ChannelPaginator({ client }); - const p2 = new ChannelPaginator({ client }); - const r1 = vi.spyOn(p1, 'removeItem'); - const r2 = vi.spyOn(p2, 'removeItem'); + const p1 = new ChannelPaginator({ client }); + const p2 = new ChannelPaginator({ client }); + const r1 = vi.spyOn(p1, 'removeItem'); + const r2 = vi.spyOn(p2, 'removeItem'); - const orchestrator = new ChannelPaginatorsOrchestrator({ - client, - paginators: [p1, p2], - }); - client.activeChannels[cid] = ch; + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2], + }); + client.activeChannels[cid] = ch; - orchestrator.registerSubscriptions(); - client.dispatchEvent({ type: 'channel.deleted', cid } as const); + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'channel.deleted', cid } as const); - await vi.waitFor(() => { - // client.activeChannels does not contain the deleted channel, therefore the search is performed with id - expect(r1).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); - expect(r2).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); + await vi.waitFor(() => { + // client.activeChannels does not contain the deleted channel, therefore the search is performed with id + expect(r1).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); + expect(r2).toHaveBeenCalledWith({ id: ch.cid, item: undefined }); + }); }); - }); - it('is a no-op when cid is missing', async () => { - const orchestrator = new ChannelPaginatorsOrchestrator({ client }); - const p = new ChannelPaginator({ client }); - const r = vi.spyOn(p, 'removeItem'); + it('is a no-op when cid is missing', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); - orchestrator.insertPaginator({ paginator: p }); - orchestrator.registerSubscriptions(); + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); - client.dispatchEvent({ type: 'channel.deleted' } as const); // no cid - await vi.waitFor(() => { - expect(r).not.toHaveBeenCalled(); + client.dispatchEvent({ type: 'channel.deleted' } as const); // no cid + await vi.waitFor(() => { + expect(r).not.toHaveBeenCalled(); + }); }); - }); - it('tries to remove non-existent channel from all paginators', async () => { - const orchestrator = new ChannelPaginatorsOrchestrator({ client }); - const p = new ChannelPaginator({ client }); - const r = vi.spyOn(p, 'removeItem'); + it('tries to remove non-existent channel from all paginators', async () => { + const orchestrator = new ChannelPaginatorsOrchestrator({ client }); + const p = new ChannelPaginator({ client }); + const r = vi.spyOn(p, 'removeItem'); - orchestrator.insertPaginator({ paginator: p }); - orchestrator.registerSubscriptions(); + orchestrator.insertPaginator({ paginator: p }); + orchestrator.registerSubscriptions(); - client.dispatchEvent({ type: 'channel.deleted', cid: 'messaging:404' }); // no such channel - await vi.waitFor(() => { - expect(r).toHaveBeenCalledWith({ id: 'messaging:404', item: undefined }); + client.dispatchEvent({ type: 'channel.deleted', cid: 'messaging:404' }); // no such channel + await vi.waitFor(() => { + expect(r).toHaveBeenCalledWith({ id: 'messaging:404', item: undefined }); + }); }); - }); - }); + }, + ); describe.each(['notification.removed_from_channel'] as EventTypes[])( - '%s', + 'event %s', (eventType) => { it('removes the channel from all paginators', async () => { const cid = 'messaging:2'; @@ -394,7 +501,7 @@ describe('ChannelPaginatorsOrchestrator', () => { ); describe.each(['channel.updated', 'channel.truncated'] as EventTypes[])( - '%s', + 'event %s', (eventType) => { it('re-emits item lists for paginators that already contain the channel', async () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); @@ -430,7 +537,7 @@ describe('ChannelPaginatorsOrchestrator', () => { 'message.new', 'notification.added_to_channel', 'notification.message_new', - ] as EventTypes[])('%s', (eventType) => { + ] as EventTypes[])('event %s', (eventType) => { it('ingests when matchesFilter, removes when not', async () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); const ch = makeChannel('messaging:5'); diff --git a/test/unit/EventHandlerPipeline.test.ts b/test/unit/EventHandlerPipeline.test.ts index 67a0ce934e..de47aaf6f3 100644 --- a/test/unit/EventHandlerPipeline.test.ts +++ b/test/unit/EventHandlerPipeline.test.ts @@ -203,6 +203,68 @@ describe('EventHandlerPipeline', () => { }); }); + describe('findIndex', () => { + const h1 = { + id: 'h1', + handle: () => { + console.log(1); + }, + }; + const h2 = { + id: 'h2', + handle: () => { + console.log(2); + }, + }; + + it('searches by handler function identity', () => { + const h3 = { + id: 'h2', + handle: () => { + console.log(2); + }, + }; + + pipeline.insert(h1); + pipeline.insert(h2); + expect(pipeline.findIndex({ handler: h1 })).toBe(0); + expect(pipeline.findIndex({ handler: h2 })).toBe(1); + expect(pipeline.findIndex({ handler: h3 })).toBe(-1); + }); + + it('searches by exact handler id match', () => { + const h3 = { + id: 'H2', + handle: () => { + console.log(2); + }, + }; + + pipeline.insert(h1); + pipeline.insert(h2); + expect(pipeline.findIndex({ idMatch: { id: h1.id } })).toBe(0); + expect(pipeline.findIndex({ idMatch: { id: h2.id } })).toBe(1); + expect(pipeline.findIndex({ idMatch: { id: h3.id } })).toBe(-1); + }); + + it('searches by handler id matching as regex', () => { + const h3 = { + id: 'H2', + handle: () => { + console.log(2); + }, + }; + + pipeline.insert(h1); + pipeline.insert(h2); + expect(pipeline.findIndex({ idMatch: { id: h1.id, regexMatch: true } })).toBe(0); + expect(pipeline.findIndex({ idMatch: { id: h2.id, regexMatch: true } })).toBe(1); + expect(pipeline.findIndex({ idMatch: { id: new RegExp(h3.id, 'i') } })).toBe(1); + expect(pipeline.findIndex({ idMatch: { id: h3.id, regexMatch: true } })).toBe(-1); + expect(pipeline.findIndex({ idMatch: { id: /h/ } })).toBe(0); + }); + }); + describe('remove', () => { it('removes by handler object identity', async () => { const out: string[] = []; @@ -221,7 +283,7 @@ describe('EventHandlerPipeline', () => { pipeline.insert(h1); pipeline.insert(h2); - pipeline.remove(h2); // remove by object + pipeline.remove({ handler: h2 }); // remove by object // @ts-expect-error passing custom event type await pipeline.run(makeEvt('evt'), ctx); @@ -235,7 +297,7 @@ describe('EventHandlerPipeline', () => { }; const h1: LabeledEventHandler = { id: 'h1', handle: fn }; pipeline.insert(h1); - pipeline.remove(fn); // remove by function ref + pipeline.remove({ handler: fn }); // remove by function ref // @ts-expect-error passing custom event type await pipeline.run(makeEvt('evt'), ctx); @@ -247,7 +309,7 @@ describe('EventHandlerPipeline', () => { const fn = () => { out.push('a'); }; - pipeline.remove(fn); // nothing inserted yet + pipeline.remove({ handler: fn }); // nothing inserted yet // @ts-expect-error passing custom event type await pipeline.run(makeEvt('evt'), ctx); // no errors diff --git a/test/unit/messageDelivery/MessageReceiptsTracker.test.ts b/test/unit/messageDelivery/MessageReceiptsTracker.test.ts index 380aad2d21..df903c0d5f 100644 --- a/test/unit/messageDelivery/MessageReceiptsTracker.test.ts +++ b/test/unit/messageDelivery/MessageReceiptsTracker.test.ts @@ -439,6 +439,51 @@ describe('MessageDeliveryReadTracker', () => { ).toEqual([]); }); }); + + describe('groupUsersByLastReadMessage / groupUsersByLastDeliveredMessage', () => { + it('returns users for whom the given message is their exact *last* read/delivered', () => { + const a = U('a'); + const b = U('b'); + const c = U('c'); + const d = U('d'); // will share timestamp with m3 but different msgId via direct id override + const e = U('e'); // same for delivered side + const f = U('f'); // same for delivered side + + tracker.onMessageDelivered({ + user: c, + deliveredAt: iso(2000), + lastDeliveredMessageId: '2000', + }); + tracker.onMessageDelivered({ + user: a, + deliveredAt: iso(2000), + lastDeliveredMessageId: '2000', + }); + tracker.onMessageDelivered({ + user: e, + deliveredAt: iso(3000), + lastDeliveredMessageId: '3000', + }); + tracker.onMessageDelivered({ + user: f, + deliveredAt: iso(3000), + lastDeliveredMessageId: '3000', + }); + + tracker.onMessageRead({ user: a, readAt: iso(1000), lastReadMessageId: '1000' }); + tracker.onMessageRead({ user: d, readAt: iso(3000), lastReadMessageId: '3000' }); + tracker.onMessageRead({ user: b, readAt: iso(3000), lastReadMessageId: '3000' }); + + expect(tracker.groupUsersByLastDeliveredMessage()).toStrictEqual({ + '2000': [c, a], + '3000': [e, f, d, b], + }); + expect(tracker.groupUsersByLastReadMessage()).toStrictEqual({ + '1000': [a], + '3000': [d, b], + }); + }); + }); }); describe('ordering & movement in sorted arrays', () => { diff --git a/test/unit/pagination/BasePaginator.test.ts b/test/unit/pagination/BasePaginator.test.ts index bf4aa43e84..f82516e31e 100644 --- a/test/unit/pagination/BasePaginator.test.ts +++ b/test/unit/pagination/BasePaginator.test.ts @@ -5,11 +5,17 @@ import { DEFAULT_PAGINATION_OPTIONS, PaginationQueryParams, PaginationQueryReturnValue, + PaginatorCursor, type PaginatorOptions, + PaginatorState, + PrimitiveFilter, + QueryFilter, QueryFilters, + RequireOnlyOne, } from '../../../src'; import { sleep } from '../../../src/utils'; import { makeComparator } from '../../../src/pagination/sortCompiler'; +import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../../src/constants'; const toNextTick = async () => { const sleepPromise = sleep(0); @@ -26,7 +32,16 @@ type TestItem = { age?: number; }; -class Paginator extends BasePaginator { +type QueryShape = { + filters: { + [Key in keyof TestItem]: + | RequireOnlyOne> + | PrimitiveFilter; + }; + sort: { [Key in keyof TestItem]?: AscDesc }; +}; + +class IncompletePaginator extends BasePaginator { sort: QueryFilters | undefined; sortComparator: (a: TestItem, b: TestItem) => number = vi.fn(); queryResolve: Function = vi.fn(); @@ -34,11 +49,13 @@ class Paginator extends BasePaginator { queryPromise: Promise> | null = null; mockClientQuery = vi.fn(); - constructor(options: PaginatorOptions = {}) { + constructor(options: PaginatorOptions = {}) { super(options); } - query(params: PaginationQueryParams): Promise> { + query( + params: PaginationQueryParams, + ): Promise> { const promise = new Promise>( (queryResolve, queryReject) => { this.queryResolve = queryResolve; @@ -55,11 +72,20 @@ class Paginator extends BasePaginator { } } +const defaultNextQueryShape: QueryShape = { filters: { id: 'test-id' }, sort: { id: 1 } }; + +class Paginator extends IncompletePaginator { + constructor(options: PaginatorOptions = {}) { + super(options); + } + + getNextQueryShape = vi.fn().mockReturnValue(defaultNextQueryShape); +} + describe('BasePaginator', () => { describe('constructor', () => { it('initiates with the defaults', () => { const paginator = new Paginator(); - expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); expect(paginator.state.getLatestValue()).toEqual({ hasNext: true, hasPrev: true, @@ -69,30 +95,126 @@ describe('BasePaginator', () => { cursor: undefined, offset: 0, }); + expect(paginator.isInitialized).toBe(false); // @ts-expect-error accessing protected property expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + expect(paginator.config.initialCursor).toBeUndefined(); + expect(paginator.config.initialOffset).toBeUndefined(); + expect(paginator.config.throwErrors).toBe(false); + expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); + expect(paginator.config.debounceMs).toBe(DEFAULT_PAGINATION_OPTIONS.debounceMs); + expect(paginator.config.lockItemOrder).toBe( + DEFAULT_PAGINATION_OPTIONS.lockItemOrder, + ); + expect(paginator.config.hasPaginationQueryShapeChanged).toBe( + DEFAULT_PAGINATION_OPTIONS.hasPaginationQueryShapeChanged, + ); }); it('initiates with custom options', () => { - const paginator = new Paginator({ pageSize: 1 }); - expect(paginator.pageSize).not.toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); - expect(paginator.pageSize).toBe(1); + const options: PaginatorOptions = { + debounceMs: DEFAULT_PAGINATION_OPTIONS.debounceMs - 100, + doRequest: () => Promise.resolve({ items: [{ id: 'test-id' }] }), + hasPaginationQueryShapeChanged: () => true, + initialCursor: { next: 'next', prev: 'prev' }, + initialOffset: 10, + lockItemOrder: !DEFAULT_PAGINATION_OPTIONS.lockItemOrder, + pageSize: DEFAULT_PAGINATION_OPTIONS.pageSize - 1, + throwErrors: true, + }; + const paginator = new Paginator(options); expect(paginator.state.getLatestValue()).toEqual({ hasNext: true, hasPrev: true, isLoading: false, items: undefined, lastQueryError: undefined, - cursor: undefined, - offset: 0, + cursor: options.initialCursor, + offset: options.initialOffset, }); + expect(paginator.isInitialized).toBe(false); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + expect(paginator.config.initialCursor).toStrictEqual(options.initialCursor); + expect(paginator.config.initialOffset).toStrictEqual(options.initialOffset); + expect(paginator.config.throwErrors).toBe(options.throwErrors); + expect(paginator.pageSize).toBe(options.pageSize); + expect(paginator.config.hasPaginationQueryShapeChanged).toStrictEqual( + options.hasPaginationQueryShapeChanged, + ); + expect(paginator.config.debounceMs).toBe(options.debounceMs); + expect(paginator.config.lockItemOrder).toBe(options.lockItemOrder); }); }); describe('pagination API', () => { - it('paginates to next pages', async () => { + it('throws is the paginator does implement own getNextQueryShape', () => { + const paginator = new IncompletePaginator(); + // @ts-expect-error accessing protected property + expect(paginator.getNextQueryShape).toThrow( + 'Paginator.getNextQueryShape() is not implemented', + ); + }); + + describe('shouldResetStateBeforeQuery', () => { + const stateBeforeQuery: PaginatorState = { + hasNext: true, + hasPrev: true, + isLoading: false, + items: [{ id: 'test-item' }], + lastQueryError: undefined, + cursor: { next: 'next', prev: 'prev' }, + offset: 10, + }; + + const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; + const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; + + it('resets the state before a query when querying the first page', () => { + const paginator = new Paginator(); + const initialState = { ...stateBeforeQuery, items: undefined }; + paginator.state.next(initialState); + expect(paginator.state.getLatestValue()).toEqual(initialState); + // @ts-expect-error accessing protected property + expect(paginator.shouldResetStateBeforeQuery()).toBe(true); + }); + + it('resets the state before a query when query shape changed', () => { + const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; + const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; + const paginator = new Paginator(); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), + ).toBe(true); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), + ).toBe(false); + }); + + it('determines whether pagination state should be reset before a query using custom logic', () => { + const options = { + hasPaginationQueryShapeChanged: vi.fn().mockReturnValue(true), + }; + const paginator = new Paginator(options); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), + ).toBe(true); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), + ).toBe(true); + expect(options.hasPaginationQueryShapeChanged).toHaveBeenCalledTimes(2); + }); + }); + + it('paginates to next pages (cursor)', async () => { const paginator = new Paginator(); let nextPromise = paginator.next(); + // wait for the DB data first page load + await sleep(0); expect(paginator.isLoading).toBe(true); expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); @@ -104,7 +226,12 @@ describe('BasePaginator', () => { expect(paginator.hasPrev).toBe(true); expect(paginator.items).toEqual([{ id: 'id1' }]); expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ direction: 'next' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'next', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); nextPromise = paginator.next(); expect(paginator.isLoading).toBe(true); @@ -127,6 +254,55 @@ describe('BasePaginator', () => { expect(paginator.isLoading).toBe(false); expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); }); + + it('paginates to next pages (offset)', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.next(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'next', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + nextPromise = paginator.next(); + expect(paginator.isLoading).toBe(true); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(2); + + nextPromise = paginator.next(); + paginator.queryResolve({ items: [] }); + await nextPromise; + expect(paginator.hasNext).toBe(false); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(2); + + paginator.next(); + expect(paginator.isLoading).toBe(false); + expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); + }); + it('paginates to next pages debounced', async () => { vi.useFakeTimers(); const paginator = new Paginator({ debounceMs: 2000 }); @@ -136,6 +312,8 @@ describe('BasePaginator', () => { expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); vi.advanceTimersByTime(2000); + // await first page load from the DB + await toNextTick(); expect(paginator.isLoading).toBe(true); expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); @@ -148,7 +326,12 @@ describe('BasePaginator', () => { expect(paginator.hasPrev).toBe(true); expect(paginator.items).toEqual([{ id: 'id1' }]); expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ direction: 'next' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'next', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); vi.useRealTimers(); }); @@ -156,6 +339,7 @@ describe('BasePaginator', () => { it('paginates to a previous page', async () => { const paginator = new Paginator(); let nextPromise = paginator.prev(); + await sleep(0); expect(paginator.isLoading).toBe(true); expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); @@ -167,7 +351,12 @@ describe('BasePaginator', () => { expect(paginator.hasPrev).toBe(true); expect(paginator.items).toEqual([{ id: 'id1' }]); expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ direction: 'prev' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'prev', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); nextPromise = paginator.prev(); expect(paginator.isLoading).toBe(true); @@ -189,6 +378,7 @@ describe('BasePaginator', () => { paginator.prev(); expect(paginator.isLoading).toBe(false); }); + it('debounces the pagination to a previous page', async () => { vi.useFakeTimers(); const paginator = new Paginator({ debounceMs: 2000 }); @@ -198,6 +388,7 @@ describe('BasePaginator', () => { expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); vi.advanceTimersByTime(2000); + await toNextTick(); expect(paginator.isLoading).toBe(true); expect(paginator.hasNext).toBe(true); expect(paginator.hasPrev).toBe(true); @@ -210,13 +401,20 @@ describe('BasePaginator', () => { expect(paginator.hasPrev).toBe(true); expect(paginator.items).toEqual([{ id: 'id1' }]); expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ direction: 'prev' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'prev', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); vi.useRealTimers(); }); it('prevents pagination if another query is in progress', async () => { const paginator = new Paginator(); const nextPromise1 = paginator.next(); + // wait for the first page load from the DB + await sleep(0); expect(paginator.isLoading).toBe(true); expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); const nextPromise2 = paginator.next(); @@ -225,20 +423,150 @@ describe('BasePaginator', () => { expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); }); + it('resets the state if the query shape changed', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.next(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + paginator.getNextQueryShape.mockReturnValueOnce({ + filters: { id: 'test' }, + sort: { id: -1 }, + }); + nextPromise = paginator.next(); + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.items).toBeUndefined(); + expect(paginator.offset).toBe(0); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.offset).toBe(1); + }); + + it('resets the state if forced', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.next(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + nextPromise = paginator.next({ reset: 'yes' }); + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.items).toBeUndefined(); + expect(paginator.offset).toBe(0); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.offset).toBe(1); + }); + + it('does not reset the state if forced', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.next(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + paginator.getNextQueryShape.mockReturnValueOnce({ + filters: { id: 'test' }, + sort: { id: -1 }, + }); + nextPromise = paginator.next({ reset: 'no' }); + await sleep(0); + expect(paginator.items).toStrictEqual([{ id: 'id1' }]); + expect(paginator.offset).toBe(1); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.offset).toBe(2); + }); + it('stores lastQueryError and clears it with the next successful query', async () => { const paginator = new Paginator(); let nextPromise = paginator.next(); + // wait for the first page load from DB + await sleep(0); const error = new Error('Failed'); paginator.queryReject(error); + // hand over to finish the cleanup and state update after the query execution + await sleep(0); + expect(paginator.lastQueryError).toEqual(error); + expect(paginator.isLoading).toEqual(false); + + nextPromise = paginator.next(); + paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); await nextPromise; + expect(paginator.lastQueryError).toBeUndefined(); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); + }); + + it('throws error if enabled', async () => { + const paginator = new Paginator({ throwErrors: true }); + let nextPromise = paginator.next(); + // wait for the first page load from DB + await sleep(0); + const error = new Error('Failed'); + paginator.queryReject(error); + await expect(nextPromise).rejects.toThrowError(error); + // hand over to finish the cleanup and state update after the query execution + await sleep(0); expect(paginator.lastQueryError).toEqual(error); + expect(paginator.isLoading).toEqual(false); nextPromise = paginator.next(); + // wait for the first page load from DB + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); + await nextPromise; + expect(paginator.lastQueryError).toBeUndefined(); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); + }); + + it('retries the query', async () => { + vi.useFakeTimers(); + const paginator = new Paginator(); + let nextPromise = paginator.next({ retryCount: 2 }); + // wait for the first page load from DB + await toNextTick(); + const error = new Error('Failed'); + paginator.queryReject(error); + // hand over to finish the cleanup and state update after the query execution + await toNextTick(); + expect(paginator.lastQueryError).toEqual(error); + vi.advanceTimersByTime(DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES); + await toNextTick(); + paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); await nextPromise; expect(paginator.lastQueryError).toBeUndefined(); expect(paginator.items).toEqual([{ id: 'id1' }]); expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); + vi.useRealTimers(); }); }); @@ -538,15 +866,118 @@ describe('BasePaginator', () => { }); }); - describe('reload', () => { - it('starts the pagination from the beginning', async () => { - const a: TestItem = { id: 'a', age: 30 }; - const b: TestItem = { id: 'b', age: 25 }; - const c: TestItem = { id: 'c', age: 25 }; - const d: TestItem = { id: 'd', age: 20 }; + describe('setItems', () => { + it('overrides all the items in the state with provided value', () => { + const paginator = new Paginator(); + const items1 = [{ id: 'test-item1' }]; + const items2 = [{ id: 'test-item2' }]; + paginator.setItems(items1); + expect(paginator.items).toStrictEqual(items1); + paginator.setItems(items2); + expect(paginator.items).toStrictEqual(items2); + }); + + const items = [{ id: 'test-item1' }]; + const expectedStateEmissions = [ + { + cursor: undefined, + hasNext: true, + hasPrev: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + offset: 0, + }, + { + cursor: undefined, + hasNext: true, + hasPrev: true, + isLoading: false, + items, + lastQueryError: undefined, + offset: 1, + }, + ]; + + it('emits state change as long as the items are not the same', () => { + const paginator = new Paginator(); + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + expect(subscriptionHandler).toHaveBeenCalledTimes(1); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[0], + undefined, + ); + + paginator.setItems(items); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + // setting an object with the same reference + paginator.setItems(items); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + unsubscribe(); + }); + it('emits state change as long as the state factory returns objects with different reference', () => { const paginator = new Paginator(); - const nextSpy = vi.spyOn(paginator, 'next').mockResolvedValue(); + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + + paginator.setItems(() => items); + expect(paginator.items).toStrictEqual(items); + // first call is on subscribe + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + // setting an object with the same reference + paginator.setItems(() => items); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + unsubscribe(); + }); + + it('updates the cursor if provided', () => { + const paginator = new Paginator(); + const cursors: PaginatorCursor[] = [ + { next: 'next1', prev: 'prev1' }, + { next: 'next2', prev: 'prev1' }, + ]; + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + + paginator.setItems(items, cursors[0]); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + { ...expectedStateEmissions[1], cursor: cursors[0], offset: 0 }, + { ...expectedStateEmissions[0], cursor: undefined, offset: 0 }, + ); + + unsubscribe(); + }); + }); + + describe('reload', () => { + it('starts the ended pagination from the beginning', async () => { + const paginator = new Paginator({ pageSize: 2 }); paginator.state.next({ hasNext: false, hasPrev: false, @@ -554,10 +985,67 @@ describe('BasePaginator', () => { items: [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }], offset: 4, }); - await paginator.reload(); - expect(nextSpy).toHaveBeenCalledTimes(1); - expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); - nextSpy.mockRestore(); + let reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(false); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'next', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + + reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2' }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.cursor).toStrictEqual({ next: 'next2', prev: null }); + expect(paginator.offset).toBe(0); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'next', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + + // reset in another direction + reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + expect(paginator.items).toBe(undefined); + + paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2' }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.cursor).toStrictEqual({ next: 'next2', prev: null }); + expect(paginator.offset).toBe(0); }); }); diff --git a/test/unit/pagination/ChannelPaginator.test.ts b/test/unit/pagination/ChannelPaginator.test.ts index b33b18a8bc..08877482c7 100644 --- a/test/unit/pagination/ChannelPaginator.test.ts +++ b/test/unit/pagination/ChannelPaginator.test.ts @@ -1,4 +1,4 @@ -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { beforeEach, describe, expect, it, MockInstance, vi } from 'vitest'; import { Channel, type ChannelFilters, @@ -7,10 +7,12 @@ import { ChannelSort, DEFAULT_PAGINATION_OPTIONS, type FilterBuilderGenerators, + PaginatorCursor, type StreamChat, } from '../../../src'; import { getClientWithUser } from '../test-utils/getClient'; import type { FieldToDataResolver } from '../../../src/pagination/types.normalization'; +import { MockOfflineDB } from '../offline-support/MockOfflineDB'; const user = { id: 'custom-id' }; @@ -31,85 +33,116 @@ describe('ChannelPaginator', () => { channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; }); - it('initiates with defaults', () => { - const paginator = new ChannelPaginator({ client }); - expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); - expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, - isLoading: false, - items: undefined, - lastQueryError: undefined, - cursor: undefined, - offset: 0, - }); - expect(paginator.id.startsWith('channel-paginator')).toBeTruthy(); - expect(paginator.sortComparator).toBeDefined(); - - channel1.state.last_message_at = new Date('1970-01-01T08:39:35.235Z'); - channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; + describe('constructor()', () => { + it('initiates with defaults', () => { + const paginator = new ChannelPaginator({ client }); + expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); + expect(paginator.state.getLatestValue()).toEqual({ + hasNext: true, + hasPrev: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: undefined, + offset: 0, + }); + expect(paginator.id.startsWith('channel-paginator')).toBeTruthy(); + expect(paginator.sortComparator).toBeDefined(); - channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); - channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; + channel1.state.last_message_at = new Date('1970-01-01T08:39:35.235Z'); + channel1.data!.updated_at = '1970-01-01T08:39:35.235Z'; - expect(paginator.sortComparator(channel1, channel2)).toBe(1); // channel2 comes before channel1 - expect(paginator.filterBuilder.buildFilters()).toStrictEqual({}); - expect( - paginator.filterBuilder.buildFilters({ baseFilters: paginator.filters }), - ).toStrictEqual({}); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(4); - }); + channel2.state.last_message_at = new Date('1971-01-01T08:39:35.235Z'); + channel2.data!.updated_at = '1971-01-01T08:39:35.235Z'; - it('initiates with options', () => { - const customId = 'custom-id'; - const filterGenerators: FilterBuilderGenerators = { - custom: { - enabled: true, - generate: (context) => context, - }, - }; - const initialFilterBuilderContext = { x: 'y' }; - - channel1.data!.created_at = '1970-01-01T08:39:35.235Z'; - channel2.data!.created_at = '1971-01-01T08:39:35.235Z'; - - const paginator = new ChannelPaginator({ - client, - id: customId, - filterBuilderOptions: { - initialContext: initialFilterBuilderContext, - initialFilterConfig: filterGenerators, - }, - filters: { type: 'type' }, - paginatorOptions: { pageSize: 2 }, - requestOptions: { member_limit: 5 }, - sort: { created_at: 1 }, + expect(paginator.sortComparator(channel1, channel2)).toBe(1); // channel2 comes before channel1 + expect(paginator.filterBuilder.buildFilters()).toStrictEqual({}); + expect( + paginator.filterBuilder.buildFilters({ baseFilters: paginator.staticFilters }), + ).toStrictEqual({}); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + expect(paginator.config.doRequest).toBeUndefined(); }); - expect(paginator.pageSize).toBe(2); - expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, - isLoading: false, - items: undefined, - lastQueryError: undefined, - cursor: undefined, - offset: 0, - }); - expect(paginator.id.startsWith(customId)).toBeTruthy(); - expect(paginator.sortComparator(channel1, channel2)).toBe(-1); // channel1 comes before channel2 - expect(paginator.filterBuilder.buildFilters()).toStrictEqual({ - ...initialFilterBuilderContext, - }); - expect( - paginator.filterBuilder.buildFilters({ baseFilters: paginator.filters }), - ).toStrictEqual({ - type: 'type', - ...initialFilterBuilderContext, + it('initiates with options', () => { + const customId = 'custom-id'; + const filterGenerators: FilterBuilderGenerators = { + custom: { + enabled: true, + generate: (context) => context, + }, + }; + const initialFilterBuilderContext = { x: 'y' }; + + channel1.data!.created_at = '1970-01-01T08:39:35.235Z'; + channel2.data!.created_at = '1971-01-01T08:39:35.235Z'; + const doRequest = () => Promise.resolve({ items: [channel1] }); + const hasPaginationQueryShapeChanged = () => true; + const paginatorOptions = { + debounceMs: 45000, + doRequest, + hasPaginationQueryShapeChanged, + initialCursor: { prev: 'prev', next: '' }, + initialOffset: 10, + lockItemOrder: true, + pageSize: 2, + throwErrors: true, + }; + + const paginator = new ChannelPaginator({ + client, + id: customId, + filterBuilderOptions: { + initialContext: initialFilterBuilderContext, + initialFilterConfig: filterGenerators, + }, + filters: { type: 'type' }, + paginatorOptions, + requestOptions: { member_limit: 5 }, + sort: { created_at: 1 }, + }); + expect(paginator.pageSize).toBe(2); + expect(paginator.state.getLatestValue()).toEqual({ + hasNext: true, + hasPrev: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: paginatorOptions.initialCursor, + offset: paginatorOptions.initialOffset, + }); + expect(paginator.id.startsWith(customId)).toBeTruthy(); + + expect(paginator.sortComparator(channel1, channel2)).toBe(-1); // channel1 comes before channel2 + expect(paginator.filterBuilder.buildFilters()).toStrictEqual({ + ...initialFilterBuilderContext, + }); + expect( + paginator.filterBuilder.buildFilters({ baseFilters: paginator.staticFilters }), + ).toStrictEqual({ + type: 'type', + ...initialFilterBuilderContext, + }); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + expect(paginator.config.debounceMs).toStrictEqual(paginatorOptions.debounceMs); + expect(paginator.config.doRequest).toStrictEqual(doRequest); + expect(paginator.config.hasPaginationQueryShapeChanged).toStrictEqual( + hasPaginationQueryShapeChanged, + ); + expect(paginator.config.initialCursor).toStrictEqual( + paginatorOptions.initialCursor, + ); + expect(paginator.config.initialOffset).toStrictEqual( + paginatorOptions.initialOffset, + ); + expect(paginator.config.pageSize).toStrictEqual(paginatorOptions.pageSize); + expect(paginator.config.lockItemOrder).toStrictEqual( + paginatorOptions.lockItemOrder, + ); + expect(paginator.config.throwErrors).toStrictEqual(paginatorOptions.throwErrors); }); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(4); }); describe('sortComparator', () => { @@ -371,26 +404,72 @@ describe('ChannelPaginator', () => { lastQueryError: undefined, cursor: undefined, }; - it('filters reset state', () => { + + it('filters reset does not reset the paginator state', () => { const paginator = new ChannelPaginator({ client }); paginator.state.partialNext(stateAfterQuery); expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); - paginator.filters = {}; - expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + paginator.staticFilters = {}; + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + expect(paginator.staticFilters).toStrictEqual({}); }); - it('sort reset state', () => { + + it('sort reset does not reset the paginator state updates the comparator', () => { const paginator = new ChannelPaginator({ client }); paginator.state.partialNext(stateAfterQuery); expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + const originalComparator = paginator.sortComparator; paginator.sort = {}; - expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + expect(paginator.sort).toStrictEqual({}); + expect(paginator.sortComparator).not.toEqual(originalComparator); }); - it('options reset state', () => { + + it('options reset does not reset the paginator state', () => { const paginator = new ChannelPaginator({ client }); paginator.state.partialNext(stateAfterQuery); expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); paginator.options = {}; - expect(paginator.state.getLatestValue()).toStrictEqual(paginator.initialState); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + expect(paginator.options).toStrictEqual({}); + }); + + it('channelStateOptions reset does not reset the paginator state', () => { + const paginator = new ChannelPaginator({ client }); + paginator.state.partialNext(stateAfterQuery); + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + paginator.channelStateOptions = {}; + expect(paginator.state.getLatestValue()).toStrictEqual(stateAfterQuery); + expect(paginator.channelStateOptions).toStrictEqual({}); + }); + }); + + describe('setItems', () => { + it('stores the new items in the offlineDB', async () => { + client.setOfflineDBApi(new MockOfflineDB({ client })); + (client.offlineDb!.initializeDB as unknown as MockInstance).mockReturnValue(true); + await client.offlineDb!.init(client.userID as string); + ( + client.offlineDb?.upsertCidsForQuery as unknown as MockInstance + ).mockImplementation(() => Promise.resolve(true)); + + const filters = { id: 'abc' }; + const sort = { id: 1 }; + const items1 = [channel1]; + + const paginator = new ChannelPaginator({ client }); + paginator.staticFilters = filters; + paginator.sort = sort; + + paginator.setItems(items1); + expect(paginator.items).toStrictEqual(items1); + expect( + client.offlineDb?.upsertCidsForQuery as unknown as MockInstance, + ).toHaveBeenCalledWith({ + cids: [channel1.cid], + filters, + sort, + }); }); }); @@ -435,6 +514,7 @@ describe('ChannelPaginator', () => { message_limit: 3, offset: 0, }, + undefined, // channelStateOptions ); }); }); diff --git a/test/unit/utils/mergeWith.test.ts b/test/unit/utils/mergeWith.test.ts index 555b6d791a..6d709f701b 100644 --- a/test/unit/utils/mergeWith.test.ts +++ b/test/unit/utils/mergeWith.test.ts @@ -638,10 +638,13 @@ describe('isEqual', () => { expect(isEqual(true, true)).toBe(true); expect(isEqual(null, null)).toBe(true); expect(isEqual(undefined, undefined)).toBe(true); + expect(isEqual(-0, 0)).toBe(true); }); it('should consider different primitives not equal', () => { expect(isEqual(42, 43)).toBe(false); + expect(isEqual('1', 1)).toBe(false); + expect(isEqual(1, true)).toBe(false); expect(isEqual('hello', 'world')).toBe(false); expect(isEqual(true, false)).toBe(false); expect(isEqual(null, undefined)).toBe(false); @@ -659,6 +662,7 @@ describe('isEqual', () => { expect(isEqual([1, 2, 3], [1, 2, 3])).toBe(true); expect(isEqual([1, 2, 3], [1, 2, 4])).toBe(false); expect(isEqual([1, 2], [1, 2, 3])).toBe(false); + expect(isEqual([1, 2], [2, 1])).toBe(false); expect(isEqual([], [])).toBe(true); }); @@ -666,6 +670,7 @@ describe('isEqual', () => { expect(isEqual([1, [2, 3]], [1, [2, 3]])).toBe(true); expect(isEqual([1, [2, 3]], [1, [2, 4]])).toBe(false); expect(isEqual([1, [2, [3]]], [1, [2, [3]]])).toBe(true); + expect(isEqual([1], [1, 2])).toBe(false); }); it('should compare objects by value', () => { @@ -684,12 +689,119 @@ describe('isEqual', () => { ); }); + it('ignores property order; compares by keys/values', () => { + expect(isEqual({ a: 1, b: 2 }, { b: 2, a: 1 })).toBe(true); + }); + it('should compare mixed nested structures', () => { expect(isEqual({ a: [1, { b: 2 }] }, { a: [1, { b: 2 }] })).toBe(true); expect(isEqual({ a: [1, { b: 2 }] }, { a: [1, { b: 3 }] })).toBe(false); expect(isEqual([{ a: 1 }, [2, 3]], [{ a: 1 }, [2, 3]])).toBe(true); }); + it('arrays: holes vs explicit undefined are not equal', () => { + const a = [, 1]; // hole at index 0 + const b = [undefined, 1]; + expect(isEqual(a, b)).toBe(false); + }); + + it('symbol keys: equal when both present and equal; unequal when missing or different', () => { + const s1 = Symbol('s'); + const s2 = Symbol('s'); // different identity even if same description + + expect(isEqual({ [s1]: 1 }, { [s1]: 1 })).toBe(true); + expect(isEqual({ [s1]: 1 }, { [s1]: 2 })).toBe(false); + expect(isEqual({ [s1]: 1 }, {})).toBe(false); + expect(isEqual({ [s1]: 1 }, { [s2]: 1 })).toBe(false); + }); + + it('sets: equal contents regardless of order', () => { + const a = new Set([1, 2, 3]); + const b = new Set([3, 2, 1]); + expect(isEqual(a, b)).toBe(true); + }); + + it('sets: unequal when contents differ', () => { + expect(isEqual(new Set([1, 2]), new Set([1, 3]))).toBe(false); + }); + + it('sets: deep equality of object elements', () => { + expect( + isEqual(new Set([{ id: 1 }, { id: 2 }]), new Set([{ id: 2 }, { id: 1 }])), + ).toBe(true); + expect( + isEqual(new Set([{ id: 1 }, { id: 1 }]), new Set([{ id: 2 }, { id: 1 }])), + ).toBe(false); + expect( + isEqual(new Set([{ id: 2 }, { id: 1 }]), new Set([{ id: 1 }, { id: 1 }])), + ).toBe(false); + }); + + it('sets: unequal sizes', () => { + expect(isEqual(new Set([1]), new Set([1, 2]))).toBe(false); + }); + + it('sets: identical references are always equal', () => { + const s = new Set([1]); + expect(isEqual(s, s)).toBe(true); + }); + + it('maps: same entries regardless of order', () => { + const a = new Map([ + ['x', 1], + ['y', 2], + ]); + const b = new Map([ + ['y', 2], + ['x', 1], + ]); + expect(isEqual(a, b)).toBe(true); + }); + + it('maps: unequal size', () => { + const a = new Map([['x', 1]]); + const b = new Map([ + ['x', 1], + ['y', 2], + ]); + expect(isEqual(a, b)).toBe(false); + }); + + it('maps: unequal value for same key', () => { + const a = new Map([['x', 1]]); + const b = new Map([['x', 2]]); + expect(isEqual(a, b)).toBe(false); + }); + + it('maps: deep key equality', () => { + const a = new Map([[{ id: 1 }, 'A']]); + const b = new Map([[{ id: 1 }, 'A']]); + expect(isEqual(a, b)).toBe(true); + }); + + it('maps: deep value equality', () => { + const a = new Map([['user', { name: 'Ann' }]]); + const b = new Map([['user', { name: 'Ann' }]]); + expect(isEqual(a, b)).toBe(true); + }); + + it('maps: duplicate keys or values require one-to-one pairing', () => { + const a = new Map([ + [{ id: 1 }, 'x'], + [{ id: 1 }, 'x'], + ]); + const b = new Map([ + [{ id: 1 }, 'x'], + [{ id: 2 }, 'x'], + ]); + expect(isEqual(a, b)).toBe(false); + }); + + it('maps: identical reference maps equal', () => { + const m = new Map([['a', 1]]); + expect(isEqual(m, m)).toBe(true); + }); + it('should handle Date objects', () => { const date1 = new Date('2023-01-01'); const date2 = new Date('2023-01-01'); @@ -698,6 +810,8 @@ describe('isEqual', () => { expect(isEqual(date1, date2)).toBe(true); expect(isEqual(date1, date3)).toBe(false); expect(isEqual({ date: date1 }, { date: date2 })).toBe(true); + // invalid dates compare false + expect(isEqual(new Date('x'), new Date('x'))).toBe(false); }); it('should handle RegExp objects', () => { @@ -708,6 +822,18 @@ describe('isEqual', () => { expect(isEqual(regex1, regex2)).toBe(true); expect(isEqual(regex1, regex3)).toBe(false); expect(isEqual({ regex: regex1 }, { regex: regex2 })).toBe(true); + expect(isEqual([regex1, regex2], [regex1, regex2])).toBe(true); + expect(isEqual([regex2, regex1], [regex1, regex2])).toBe(true); + expect(isEqual([regex3, regex1], [regex1, regex3])).toBe(false); + }); + + it('different object prototypes but same enumerable props', () => { + const a = { x: 1 }; + // creates an object without a prototype + const b = Object.create(null); + b.x = 1; + + expect(isEqual(a, b)).toBe(false); }); it('should handle class instances as not equal', () => { @@ -719,6 +845,13 @@ describe('isEqual', () => { expect(isEqual(file1, file1)).toBe(true); // Same reference is equal }); + it('typed arrays / buffers (treated atomically via instance rule)', () => { + const ta1 = new Uint8Array([1, 2, 3]); + const ta2 = new Uint8Array([1, 2, 3]); + expect(isEqual(ta1, ta2)).toBe(false); + expect(isEqual(ta1, ta1)).toBe(true); + }); + it('should handle circular references', () => { const obj1: any = { a: 1 }; obj1.self = obj1; @@ -748,15 +881,17 @@ describe('isEqual', () => { expect(isEqual(obj1, obj3)).toBe(true); expect(isEqual(obj1, obj5)).toBe(false); - }); - it('should compare object property keys correctly', () => { - // Objects with same keys but different order - expect(isEqual({ a: 1, b: 2, c: 3 }, { c: 3, b: 2, a: 1 })).toBe(true); + const a1: any = { n: 1 }, + a2: any = { n: 2 }; + a1.other = a2; + a2.other = a1; + + const b1: any = { n: 1 }, + b2: any = { n: 2 }; + b1.other = b2; + b2.other = b1; - // Ensure keys in second object are correctly checked - const obj1 = { a: 1, b: 2 }; - const obj2 = { a: 1, c: 3 }; - expect(isEqual(obj1, obj2)).toBe(false); + expect(isEqual(a1, b1)).toBe(true); }); }); From f4892c60d6ddcea250ed688cc837b489f0f98741 Mon Sep 17 00:00:00 2001 From: martincupela Date: Mon, 10 Nov 2025 15:39:59 +0100 Subject: [PATCH 04/31] feat: add filter resolvers for channel filters archived, app_banned, has_unread, last_updated --- src/pagination/ChannelPaginator.ts | 72 ++++++-- test/unit/pagination/ChannelPaginator.test.ts | 163 +++++++++++++++++- 2 files changed, 220 insertions(+), 15 deletions(-) diff --git a/src/pagination/ChannelPaginator.ts b/src/pagination/ChannelPaginator.ts index 6609ff1d7a..cb11ac7780 100644 --- a/src/pagination/ChannelPaginator.ts +++ b/src/pagination/ChannelPaginator.ts @@ -50,8 +50,15 @@ export type ChannelPaginatorOptions = { }; const getQueryShapeRelevantChannelOptions = (options: ChannelOptions) => { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { limit: _, member_limit: __, message_limit: ___, ...relevantShape } = options; + const { + /* eslint-disable @typescript-eslint/no-unused-vars */ + limit: _, + member_limit: __, + message_limit: ___, + offset: ____, + /* eslint-enable @typescript-eslint/no-unused-vars */ + ...relevantShape + } = options; return relevantShape; }; @@ -69,9 +76,45 @@ const hasPaginationQueryShapeChanged: PaginationQueryShapeChangeIdentifier< }, ); -const pinnedFilterResolver: FieldToDataResolver = { - matchesField: (field) => field === 'pinned', - resolve: (channel) => !!channel.state.membership.pinned_at, +const archivedFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'archived', + resolve: (channel) => !!channel.state.membership.archived_at, +}; + +const appBannedFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'app_banned', + resolve: (channel) => { + const ownUserId = channel.getClient().user?.id; + const otherMembers = Object.values(channel.state.members).filter( + ({ user }) => user?.id !== ownUserId, + ); + // Only applies to channels with exactly 2 members. + if (otherMembers.length !== 1) return false; + const otherMember = otherMembers[0]; + return otherMember.user?.banned ? 'only' : 'excluded'; + }, +}; + +const hasUnreadFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'has_unread', + resolve: (channel) => { + const ownUserId = channel.getClient().user?.id; + return ownUserId && channel.state.read[ownUserId].unread_messages > 0; + }, +}; + +const lastUpdatedFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'last_updated', + resolve: (channel) => { + // combination of last_message_at and updated_at + const lastMessageAt = channel.state.last_message_at?.getTime() ?? null; + const updatedAt = channel.data?.updated_at + ? new Date(channel.data?.updated_at).getTime() + : undefined; + return lastMessageAt !== null && updatedAt !== undefined + ? Math.max(lastMessageAt, updatedAt) + : (lastMessageAt ?? updatedAt); + }, }; const membersFilterResolver: FieldToDataResolver = { @@ -100,6 +143,11 @@ const memberUserNameFilterResolver: FieldToDataResolver = { : [], }; +const pinnedFilterResolver: FieldToDataResolver = { + matchesField: (field) => field === 'pinned', + resolve: (channel) => !!channel.state.membership.pinned_at, +}; + const dataFieldFilterResolver: FieldToDataResolver = { matchesField: () => true, resolve: (channel, path) => resolveDotPathValue(channel.data, path), @@ -111,16 +159,10 @@ const channelSortPathResolver: PathResolver = (channel, path) => { case 'last_message_at': return channel.state.last_message_at; case 'has_unread': { - const userId = channel.getClient().user?.id; - return !!(userId && channel.state.read[userId].unread_messages); + return hasUnreadFilterResolver.resolve(channel, path); } case 'last_updated': { - // combination of last_message_at and updated_at - const lastMessageAt = channel.state.last_message_at?.getTime() ?? 0; - const updatedAt = channel.data?.updated_at - ? new Date(channel.data?.updated_at).getTime() - : 0; - return lastMessageAt >= updatedAt ? lastMessageAt : updatedAt; + return lastUpdatedFilterResolver.resolve(channel, path) ?? 0; } case 'pinned_at': return channel.state.membership.pinned_at; @@ -175,6 +217,10 @@ export class ChannelPaginator extends BasePaginator }, }); this.setFilterResolvers([ + archivedFilterResolver, + appBannedFilterResolver, + hasUnreadFilterResolver, + lastUpdatedFilterResolver, pinnedFilterResolver, membersFilterResolver, memberUserNameFilterResolver, diff --git a/test/unit/pagination/ChannelPaginator.test.ts b/test/unit/pagination/ChannelPaginator.test.ts index 08877482c7..9ce1c0564a 100644 --- a/test/unit/pagination/ChannelPaginator.test.ts +++ b/test/unit/pagination/ChannelPaginator.test.ts @@ -61,7 +61,7 @@ describe('ChannelPaginator', () => { paginator.filterBuilder.buildFilters({ baseFilters: paginator.staticFilters }), ).toStrictEqual({}); // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + expect(paginator._filterFieldToDataResolvers).toHaveLength(8); expect(paginator.config.doRequest).toBeUndefined(); }); @@ -125,7 +125,7 @@ describe('ChannelPaginator', () => { ...initialFilterBuilderContext, }); // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(4); + expect(paginator._filterFieldToDataResolvers).toHaveLength(8); expect(paginator.config.debounceMs).toStrictEqual(paginatorOptions.debounceMs); expect(paginator.config.doRequest).toStrictEqual(doRequest); expect(paginator.config.hasPaginationQueryShapeChanged).toStrictEqual( @@ -286,6 +286,165 @@ describe('ChannelPaginator', () => { }); describe('filter resolvers', () => { + const otherUserId = 'other-user'; + it('resolves field "archived"', () => { + const paginator = new ChannelPaginator({ + client, + filters: { members: { $in: [user.id] }, archived: true }, + }); + + channel1.state.members = { + [user.id]: { user }, + [otherUserId]: { user: { id: otherUserId } }, + }; + + channel1.state.membership = { + user, + archived_at: '2025-09-03T12:19:39.101089Z', + }; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.state.membership = { + user, + archived_at: undefined, + }; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it('resolves field "app_banned"', () => { + const paginator = new ChannelPaginator({ + client, + filters: { members: { $in: [user.id] }, app_banned: 'only' }, + }); + + channel1.state.members = { + [user.id]: { user }, + [otherUserId]: { user: { id: otherUserId, banned: true } }, + }; + + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.state.members[otherUserId].user!.banned = false; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + + // ===== excluded ==== + paginator.staticFilters = { members: { $in: [user.id] }, app_banned: 'excluded' }; + + channel1.state.members[otherUserId].user!.banned = true; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + + channel1.state.members[otherUserId].user!.banned = false; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + }); + + it('resolves field "has_unread"', () => { + const paginator = new ChannelPaginator({ + client, + filters: { has_unread: true }, + }); + + channel1.state.read = { + [user.id]: { last_read: new Date(2000), unread_messages: 0, user }, + [otherUserId]: { + last_read: new Date(1000), + unread_messages: 1, + user: { id: otherUserId }, + }, + }; + + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + + channel1.state.read[user.id].unread_messages = 1; + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + }); + + describe('resolves field "last_updated"', () => { + it('for primitive filter', () => { + const paginator = new ChannelPaginator({ + client, + filters: { last_updated: new Date(1000).toISOString() }, + }); + channel1.data = { updated_at: undefined }; + channel1.state.last_message_at = new Date(1000); + + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.data = { updated_at: new Date(1000).toISOString() }; + channel1.state.last_message_at = null; + + expect(paginator.matchesFilter(channel1)).toBeTruthy(); + + channel1.data = { updated_at: undefined }; + channel1.state.last_message_at = null; + expect(paginator.matchesFilter(channel1)).toBeFalsy(); + }); + + it.each([ + [ + '$eq', + [ + { val: 1000, expected: true }, + { val: 1001, expected: false }, + { val: 999, expected: false }, + ], + ], + [ + '$gt', + [ + { val: 1000, expected: false }, + { val: 1001, expected: true }, + { val: 999, expected: false }, + ], + ], + [ + '$gte', + [ + { val: 1000, expected: true }, + { val: 1001, expected: true }, + { val: 999, expected: false }, + ], + ], + [ + '$lt', + [ + { val: 1000, expected: false }, + { val: 1001, expected: false }, + { val: 999, expected: true }, + ], + ], + [ + '$lte', + [ + { val: 1000, expected: true }, + { val: 1001, expected: false }, + { val: 999, expected: true }, + ], + ], + ])('for operator %s', (operator, scenarios) => { + const paginator = new ChannelPaginator({ + client, + // @ts-expect-error operator in variable + filters: { last_updated: { [operator]: new Date(1000).toISOString() } }, + }); + + channel1.data = { updated_at: undefined }; + scenarios.forEach(({ val, expected }) => { + channel1.state.last_message_at = new Date(val); + expect(paginator.matchesFilter(channel1)).toBe(expected); + }); + + channel1.state.last_message_at = null; + scenarios.forEach(({ val, expected }) => { + channel1.data = { updated_at: new Date(val).toISOString() }; + expect(paginator.matchesFilter(channel1)).toBe(expected); + }); + + channel1.data = { updated_at: undefined }; + channel1.state.last_message_at = null; + expect(paginator.matchesFilter(channel1)).toBe(false); + }); + }); + it('resolves "pinned" field', () => { const paginator = new ChannelPaginator({ client, From aeaa92d9d55293b9b5d9da01341ff06fb8b50074 Mon Sep 17 00:00:00 2001 From: martincupela Date: Mon, 10 Nov 2025 17:28:52 +0100 Subject: [PATCH 05/31] feat: allow to keep channels in certain matching paginators and not in other matching paginators --- src/ChannelPaginatorsOrchestrator.ts | 118 ++++++++++++-- .../ChannelPaginatorsOrchestrator.test.ts | 150 +++++++++++++++++- 2 files changed, 251 insertions(+), 17 deletions(-) diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index b3aecabc9e..b9526b1eea 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -22,6 +22,52 @@ type EventHandlerContext = ChannelPaginatorsOrchestratorEventHandlerContext; type SupportedEventType = EventTypes | (string & {}); +/** + * Resolves which paginators should be the "owners" of a channel + * when the channel matches multiple paginator filters. + * + * Return a set of paginator ids that should keep/own the item. + * Returning an empty set means the channel will be removed everywhere. + */ +export type PaginatorOwnershipResolver = (args: { + channel: Channel; + matchingPaginators: ChannelPaginator[]; +}) => string[]; + +/** + * Convenience factory for a priority-based ownership resolver. + * - Provide an ordered list of paginator ids from highest to lowest priority. + * - If two or more paginators match a channel, the one with the highest priority wins. + * - If none of the matching paginator ids are in the priority list, all matches are kept (back-compat). + */ +export const createPriorityOwnershipResolver = ( + priority?: string[], +): PaginatorOwnershipResolver => { + if (!priority) { + return ({ matchingPaginators }) => matchingPaginators.map((p) => p.id); + } + const rank = new Map(priority.map((id, index) => [id, index])); + return ({ matchingPaginators }) => { + if (matchingPaginators.length <= 1) { + return matchingPaginators.map((p) => p.id); + } + // The winner is the first item in the sorted array of matching paginators + const winner = [...matchingPaginators].sort((a, b) => { + const rankA = rank.get(a.id); + const rankB = rank.get(b.id); + const valueA = rankA === undefined ? Number.POSITIVE_INFINITY : rankA; + const valueB = rankB === undefined ? Number.POSITIVE_INFINITY : rankB; + return valueA - valueB; + })[0]; + const winnerValue = rank.get(winner.id); + // If no explicit priority is set for any, keep all (preserve current behavior) + if (winnerValue === undefined) { + return matchingPaginators.map((p) => p.id); + } + return [winner.id]; + }; +}; + const getCachedChannelFromEvent = ( event: Event, cache: Record, @@ -101,25 +147,41 @@ const updateLists: EventHandlerPipelineHandler = async ({ if (!channel) return; + const matchingPaginators = orchestrator.paginators.filter((p) => + p.matchesFilter(channel), + ); + const matchingIds = new Set(matchingPaginators.map((p) => p.id)); + + const ownerIds = orchestrator.resolveOwnership(channel, matchingPaginators); + orchestrator.paginators.forEach((paginator) => { - if (paginator.matchesFilter(channel)) { - const channelBoost = paginator.getBoost(channel.cid); - if ( - [ - 'message.new', - 'notification.message_new', - 'notification.added_to_channel', - 'channel.visible', - ].includes(event.type) && - (!channelBoost || channelBoost.seq < paginator.maxBoostSeq) - ) { - paginator.boost(channel.cid, { seq: paginator.maxBoostSeq + 1 }); - } - paginator.ingestItem(channel); - } else { + if (!matchingIds.has(paginator.id)) { // remove if it does not match the filter anymore paginator.removeItem({ item: channel }); + return; + } + + // Only if owners are specified, the items is removed from the non-owner matching paginators + if (ownerIds.size > 0 && !ownerIds.has(paginator.id)) { + // matched, but not selected to own - remove to enforce exclusivity + paginator.removeItem({ item: channel }); + return; + } + + // Selected owner: optionally boost then ingest + const channelBoost = paginator.getBoost(channel.cid); + if ( + [ + 'message.new', + 'notification.message_new', + 'notification.added_to_channel', + 'channel.visible', + ].includes(event.type) && + (!channelBoost || channelBoost.seq < paginator.maxBoostSeq) + ) { + paginator.boost(channel.cid, { seq: paginator.maxBoostSeq + 1 }); } + paginator.ingestItem(channel); }); }; @@ -215,6 +277,13 @@ export type ChannelPaginatorsOrchestratorOptions = { client: StreamChat; paginators?: ChannelPaginator[]; eventHandlers?: ChannelPaginatorsOrchestratorEventHandlers; + /** + * Decide which paginator(s) should own a channel when multiple match. + * Defaults to keeping the channel in all matching paginators. + * Channels are kept only in the paginators that are listed in the ownershipResolver array. + * Empty ownershipResolver array means that the channel is kept in all matching paginators. + */ + ownershipResolver?: PaginatorOwnershipResolver | string[]; }; export class ChannelPaginatorsOrchestrator extends WithSubscriptions { @@ -224,6 +293,7 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { SupportedEventType, EventHandlerPipeline >(); + protected ownershipResolver?: PaginatorOwnershipResolver; protected static readonly defaultEventHandlers: ChannelPaginatorsOrchestratorEventHandlers = { @@ -244,10 +314,17 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { client, eventHandlers, paginators, + ownershipResolver, }: ChannelPaginatorsOrchestratorOptions) { super(); this.client = client; this.state = new StateStore({ paginators: paginators ?? [] }); + if (ownershipResolver) { + this.ownershipResolver = Array.isArray(ownershipResolver) + ? createPriorityOwnershipResolver(ownershipResolver) + : ownershipResolver; + } + const finalEventHandlers = eventHandlers ?? ChannelPaginatorsOrchestrator.getDefaultHandlers(); for (const [type, handlers] of Object.entries(finalEventHandlers)) { @@ -281,6 +358,17 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { return out; } + /** + * Which paginators should own the channel among the ones that matched. + * Default behavior keeps the channel in all matching paginators. + */ + resolveOwnership( + channel: Channel, + matchingPaginators: ChannelPaginator[], + ): Set { + return new Set(this.ownershipResolver?.({ channel, matchingPaginators }) ?? []); + } + getPaginatorById(id: string) { return this.paginators.find((p) => p.id === id); } diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts index be4a09966b..a57fff97fb 100644 --- a/test/unit/ChannelPaginatorsOrchestrator.test.ts +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -6,7 +6,10 @@ import { EventTypes, type StreamChat, } from '../../src'; -import { ChannelPaginatorsOrchestrator } from '../../src/ChannelPaginatorsOrchestrator'; +import { + ChannelPaginatorsOrchestrator, + createPriorityOwnershipResolver, +} from '../../src/ChannelPaginatorsOrchestrator'; vi.mock('../../src/pagination/utility.queryChannel', async () => { return { getChannel: vi.fn(async ({ client, id, type }) => { @@ -24,6 +27,146 @@ describe('ChannelPaginatorsOrchestrator', () => { vi.clearAllMocks(); }); + describe('ownershipResolver', () => { + it('keeps channel in all matching paginators by default', async () => { + const ch = makeChannel('messaging:100'); + client.activeChannels[ch.cid] = ch; + + const p1 = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const p2 = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2], + }); + orchestrator.registerSubscriptions(); + + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + await vi.waitFor(() => { + expect(orchestrator.getPaginatorById(p1.id)).toStrictEqual(p1); + expect(orchestrator.getPaginatorById(p2.id)).toStrictEqual(p2); + expect(p1.items).toHaveLength(1); + expect(p1.items![0]).toStrictEqual(ch); + expect(p2.items).toHaveLength(1); + expect(p2.items![0]).toStrictEqual(ch); + }); + }); + + it('keeps channel only in highest-priority matching paginator when resolver provided', async () => { + const pHigh = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const pLow = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [pLow, pHigh], + ownershipResolver: createPriorityOwnershipResolver([pHigh.id, pLow.id]), + }); + + const ch = makeChannel('messaging:101'); + client.activeChannels[ch.cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + + await vi.waitFor(() => { + expect(pHigh.items).toHaveLength(1); + expect(pHigh.items![0]).toStrictEqual(ch); + expect(pLow.items).toBeUndefined(); + }); + }); + + it('keeps item in all priority ownership paginators when resolver returns multiple ids', async () => { + const pHigh = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const pLow = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [pLow, pHigh], + ownershipResolver: () => [pHigh.id, pLow.id], + }); + + const ch = makeChannel('messaging:101'); + client.activeChannels[ch.cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + + await vi.waitFor(() => { + expect(pHigh.items).toHaveLength(1); + expect(pHigh.items![0]).toStrictEqual(ch); + expect(pLow.items).toHaveLength(1); + expect(pLow.items![0]).toStrictEqual(ch); + }); + }); + + it('accepts ownershipResolver as array of ids and applies priority', async () => { + const pLow = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const pHigh = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [pLow, pHigh], + ownershipResolver: [pHigh.id, pLow.id], + }); + + const ch = makeChannel('messaging:102'); + client.activeChannels[ch.cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + + await vi.waitFor(() => { + expect(pHigh.items).toHaveLength(1); + expect(pHigh.items![0]).toStrictEqual(ch); + expect(pLow.items).toBeUndefined(); + }); + }); + + it('keeps items only in owner paginators if some matching paginators are not listed in ownershipResolver array', async () => { + const pLow = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const pHigh = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [pLow, pHigh], + ownershipResolver: [pHigh.id], + }); + + const ch = makeChannel('messaging:102'); + client.activeChannels[ch.cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + + await vi.waitFor(() => { + expect(pHigh.items).toHaveLength(1); + expect(pHigh.items![0]).toStrictEqual(ch); + expect(pLow.items).toBeUndefined(); + }); + }); + + it('keeps items only in matching paginators if owner paginators are not matching', async () => { + const p1 = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const p2 = new ChannelPaginator({ client, filters: { type: 'messaging' } }); + const p3 = new ChannelPaginator({ client, filters: { type: 'messagingX' } }); + const orchestrator = new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2, p3], + ownershipResolver: [p3.id], + }); + + const ch = makeChannel('messaging:102'); + client.activeChannels[ch.cid] = ch; + + orchestrator.registerSubscriptions(); + client.dispatchEvent({ type: 'message.new', cid: ch.cid }); + + await vi.waitFor(() => { + expect(p1.items).toHaveLength(1); + expect(p1.items![0]).toStrictEqual(ch); + expect(p2.items).toHaveLength(1); + expect(p2.items![0]).toStrictEqual(ch); + expect(p3.items).toBeUndefined(); + }); + }); + }); + describe('constructor', () => { it('initiates with default options', () => { // @ts-expect-error accessing protected property @@ -381,7 +524,10 @@ describe('ChannelPaginatorsOrchestrator', () => { // Helper to create a minimal channel with needed state function makeChannel(cid: string) { const [type, id] = cid.split(':'); - return client.channel(type, id); + const channel = client.channel(type, id); + channel.data!.type = type; + channel.data!.id = id; + return channel; } describe.each(['channel.deleted', 'channel.hidden'] as EventTypes[])( From 8938419ea1cd252d469a1b3dbee462e00340f8e1 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 11 Nov 2025 10:06:16 +0100 Subject: [PATCH 06/31] feat: allow to keep channels in certain matching paginators and not in other matching paginators --- src/ChannelPaginatorsOrchestrator.ts | 46 +++++++++++++++++++ src/pagination/ChannelPaginator.ts | 6 ++- .../ChannelPaginatorsOrchestrator.test.ts | 45 ++++++++++++++++++ 3 files changed, 96 insertions(+), 1 deletion(-) diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index b9526b1eea..077d0c5fb2 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -294,6 +294,8 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { EventHandlerPipeline >(); protected ownershipResolver?: PaginatorOwnershipResolver; + /** Track paginators already wrapped with ownership-aware filtering */ + protected ownershipFilterAppliedPaginators = new WeakSet(); protected static readonly defaultEventHandlers: ChannelPaginatorsOrchestratorEventHandlers = { @@ -330,6 +332,8 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { for (const [type, handlers] of Object.entries(finalEventHandlers)) { if (handlers) this.ensurePipeline(type).replaceAll(handlers); } + // Ensure ownership rules are applied to initial paginators' query results + this.paginators.forEach((p) => this.wrapPaginatorFiltering(p)); } get paginators(): ChannelPaginator[] { @@ -369,6 +373,46 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { return new Set(this.ownershipResolver?.({ channel, matchingPaginators }) ?? []); } + /** + * Filter a page of query results for a specific paginator according to ownership rules. + * If no owners are specified by the resolver, all matching paginators keep the item. + */ + protected filterItemsByOwnership({ + paginator, + items, + }: { + paginator: ChannelPaginator; + items: Channel[]; + }): Channel[] { + if (!items.length) return items; + const result: Channel[] = []; + for (const ch of items) { + const matchingPaginators = this.paginators.filter((p) => p.matchesFilter(ch)); + const ownerIds = this.resolveOwnership(ch, matchingPaginators); + const noOwnersOrPaginatorIsOwner = + ownerIds.size === 0 || ownerIds.has(paginator.id); + + if (noOwnersOrPaginatorIsOwner) { + result.push(ch); + } + } + return result; + } + + /** + * Wrap paginator.filterQueryResults so that ownership rules are applied whenever + * the paginator ingests results from a server query (first page and subsequent pages). + */ + protected wrapPaginatorFiltering(paginator: ChannelPaginator) { + if (this.ownershipFilterAppliedPaginators.has(paginator)) return; + const original = paginator.filterQueryResults.bind(paginator); + paginator.filterQueryResults = (items: Channel[]) => { + const filtered = original(items) as Channel[]; + return this.filterItemsByOwnership({ paginator, items: filtered }); + }; + this.ownershipFilterAppliedPaginators.add(paginator); + } + getPaginatorById(id: string) { return this.paginators.find((p) => p.id === id); } @@ -392,6 +436,8 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { ); paginators.splice(validIndex, 0, paginator); this.state.partialNext({ paginators }); + // Wrap newly inserted paginator to enforce ownership on query results + this.wrapPaginatorFiltering(paginator); } addEventHandler({ diff --git a/src/pagination/ChannelPaginator.ts b/src/pagination/ChannelPaginator.ts index cb11ac7780..57ca67be69 100644 --- a/src/pagination/ChannelPaginator.ts +++ b/src/pagination/ChannelPaginator.ts @@ -99,7 +99,11 @@ const hasUnreadFilterResolver: FieldToDataResolver = { matchesField: (field) => field === 'has_unread', resolve: (channel) => { const ownUserId = channel.getClient().user?.id; - return ownUserId && channel.state.read[ownUserId].unread_messages > 0; + return ( + ownUserId && + channel.state.read[ownUserId] && + channel.state.read[ownUserId].unread_messages > 0 + ); }, }; diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts index a57fff97fb..28bde42cd1 100644 --- a/test/unit/ChannelPaginatorsOrchestrator.test.ts +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -165,6 +165,51 @@ describe('ChannelPaginatorsOrchestrator', () => { expect(p3.items).toBeUndefined(); }); }); + + it('applies ownership rules to paginators when they paginate', async () => { + const ch1 = makeChannel('messaging:101'); + const ch2 = makeChannel('messaging:102'); + const queryChannelSpy = vi.spyOn(client, 'queryChannels').mockResolvedValue([ch1]); + const p1 = new ChannelPaginator({ + client, + filters: { type: 'messaging' }, + id: 'p1', + paginatorOptions: { pageSize: 1 }, + }); + const p2 = new ChannelPaginator({ + client, + filters: { type: 'messaging' }, + id: 'p2', + paginatorOptions: { pageSize: 1 }, + }); + new ChannelPaginatorsOrchestrator({ + client, + paginators: [p1, p2], + ownershipResolver: [p2.id], + }); + + await Promise.all([p1, p2].map((p) => p.next())); + + await vi.waitFor(() => { + expect(p1.items).toHaveLength(0); + // even though ownership claimed by p2, it is still possible to request next page. + expect(p1.hasNext).toBe(true); + expect(p2.items).toHaveLength(1); + expect(p2.items).toStrictEqual([ch1]); + expect(p2.hasNext).toBe(true); + }); + + queryChannelSpy.mockResolvedValue([ch2]); + await Promise.all([p1, p2].map((p) => p.next())); + + await vi.waitFor(() => { + expect(p1.items).toHaveLength(0); + expect(p1.hasNext).toBe(true); + expect(p2.items).toHaveLength(2); + expect(p2.items).toStrictEqual([ch1, ch2]); + expect(p2.hasNext).toBe(true); + }); + }); }); describe('constructor', () => { From 03614c77598109972eebf3c6dd426bb10ea119a0 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 11 Nov 2025 10:36:24 +0100 Subject: [PATCH 07/31] fix: do not remove channel from paginator on channel.hidden --- src/ChannelPaginatorsOrchestrator.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index 077d0c5fb2..bbfc7f5bfe 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -300,7 +300,6 @@ export class ChannelPaginatorsOrchestrator extends WithSubscriptions { protected static readonly defaultEventHandlers: ChannelPaginatorsOrchestratorEventHandlers = { 'channel.deleted': [channelDeletedHandler], - 'channel.hidden': [channelDeletedHandler], 'channel.updated': [channelUpdatedHandler], 'channel.truncated': [channelTruncatedHandler], 'channel.visible': [channelVisibleHandler], From 01876963bdf49548fcab5117b9ebe32651a15d52 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 18 Nov 2025 15:50:19 +0100 Subject: [PATCH 08/31] chore: move paginators to a dedicated folder --- src/pagination/index.ts | 4 +--- .../{ => paginators}/BasePaginator.ts | 19 +++++++++------- .../{ => paginators}/ChannelPaginator.ts | 22 +++++++++---------- .../{ => paginators}/ReminderPaginator.ts | 4 ++-- src/pagination/paginators/index.ts | 3 +++ 5 files changed, 28 insertions(+), 24 deletions(-) rename src/pagination/{ => paginators}/BasePaginator.ts (97%) rename src/pagination/{ => paginators}/ChannelPaginator.ts (95%) rename src/pagination/{ => paginators}/ReminderPaginator.ts (96%) create mode 100644 src/pagination/paginators/index.ts diff --git a/src/pagination/index.ts b/src/pagination/index.ts index 733c5efe8c..2b0bd0d523 100644 --- a/src/pagination/index.ts +++ b/src/pagination/index.ts @@ -1,4 +1,2 @@ -export * from './BasePaginator'; -export * from './ChannelPaginator'; +export * from './paginators'; export * from './FilterBuilder'; -export * from './ReminderPaginator'; diff --git a/src/pagination/BasePaginator.ts b/src/pagination/paginators/BasePaginator.ts similarity index 97% rename from src/pagination/BasePaginator.ts rename to src/pagination/paginators/BasePaginator.ts index 6912554dfe..938dd64792 100644 --- a/src/pagination/BasePaginator.ts +++ b/src/pagination/paginators/BasePaginator.ts @@ -1,11 +1,14 @@ -import { binarySearchInsertIndex } from './sortCompiler'; -import { itemMatchesFilter } from './filterCompiler'; -import { isPatch, StateStore, type ValueOrPatch } from '../store'; -import { debounce, type DebouncedFunc, sleep } from '../utils'; -import type { FieldToDataResolver } from './types.normalization'; -import { locateOnPlateauAlternating, locateOnPlateauScanOneSide } from './utility.search'; -import { isEqual } from '../utils/mergeWith/mergeWithCore'; -import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../constants'; +import { binarySearchInsertIndex } from '../sortCompiler'; +import { itemMatchesFilter } from '../filterCompiler'; +import { isPatch, StateStore, type ValueOrPatch } from '../../store'; +import { debounce, type DebouncedFunc, sleep } from '../../utils'; +import type { FieldToDataResolver } from '../types.normalization'; +import { + locateOnPlateauAlternating, + locateOnPlateauScanOneSide, +} from '../utility.search'; +import { isEqual } from '../../utils/mergeWith/mergeWithCore'; +import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../constants'; const noOrderChange = () => 0; diff --git a/src/pagination/ChannelPaginator.ts b/src/pagination/paginators/ChannelPaginator.ts similarity index 95% rename from src/pagination/ChannelPaginator.ts rename to src/pagination/paginators/ChannelPaginator.ts index 57ca67be69..7a3a805a2d 100644 --- a/src/pagination/ChannelPaginator.ts +++ b/src/pagination/paginators/ChannelPaginator.ts @@ -6,22 +6,22 @@ import type { PaginatorState, } from './BasePaginator'; import { BasePaginator } from './BasePaginator'; -import type { FilterBuilderOptions } from './FilterBuilder'; -import { FilterBuilder } from './FilterBuilder'; -import { makeComparator } from './sortCompiler'; -import { generateUUIDv4 } from '../utils'; -import type { StreamChat } from '../client'; -import type { Channel } from '../channel'; +import type { FilterBuilderOptions } from '../FilterBuilder'; +import { FilterBuilder } from '../FilterBuilder'; +import { makeComparator } from '../sortCompiler'; +import { generateUUIDv4 } from '../../utils'; +import type { StreamChat } from '../../client'; +import type { Channel } from '../../channel'; import type { ChannelFilters, ChannelOptions, ChannelSort, ChannelStateOptions, -} from '../types'; -import type { FieldToDataResolver, PathResolver } from './types.normalization'; -import { resolveDotPathValue } from './utility.normalization'; -import type { ValueOrPatch } from '../store'; -import { isEqual } from '../utils/mergeWith/mergeWithCore'; +} from '../../types'; +import type { FieldToDataResolver, PathResolver } from '../types.normalization'; +import { resolveDotPathValue } from '../utility.normalization'; +import type { ValueOrPatch } from '../../store'; +import { isEqual } from '../../utils/mergeWith/mergeWithCore'; const DEFAULT_BACKEND_SORT: ChannelSort = { last_message_at: -1, updated_at: -1 }; // {last_updated: -1} diff --git a/src/pagination/ReminderPaginator.ts b/src/pagination/paginators/ReminderPaginator.ts similarity index 96% rename from src/pagination/ReminderPaginator.ts rename to src/pagination/paginators/ReminderPaginator.ts index 9bbf56c5ce..8cf23b914d 100644 --- a/src/pagination/ReminderPaginator.ts +++ b/src/pagination/paginators/ReminderPaginator.ts @@ -9,8 +9,8 @@ import type { ReminderFilters, ReminderResponse, ReminderSort, -} from '../types'; -import type { StreamChat } from '../client'; +} from '../../types'; +import type { StreamChat } from '../../client'; export class ReminderPaginator extends BasePaginator< ReminderResponse, diff --git a/src/pagination/paginators/index.ts b/src/pagination/paginators/index.ts new file mode 100644 index 0000000000..1c5fbb4d44 --- /dev/null +++ b/src/pagination/paginators/index.ts @@ -0,0 +1,3 @@ +export * from './BasePaginator'; +export * from './ChannelPaginator'; +export * from './ReminderPaginator'; From 53941efebdb5fb964840fc604f5f52d3c915d98e Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 21 Nov 2025 16:53:59 +0100 Subject: [PATCH 09/31] feat: introduce intervals to BasePaginator --- src/pagination/ItemIndex.ts | 97 ++ src/pagination/paginators/BasePaginator.ts | 1149 ++++++++++++++--- src/pagination/paginators/ChannelPaginator.ts | 19 +- src/pagination/sortCompiler.ts | 120 +- src/pagination/types.normalization.ts | 9 +- 5 files changed, 1175 insertions(+), 219 deletions(-) create mode 100644 src/pagination/ItemIndex.ts diff --git a/src/pagination/ItemIndex.ts b/src/pagination/ItemIndex.ts new file mode 100644 index 0000000000..18310c9e4a --- /dev/null +++ b/src/pagination/ItemIndex.ts @@ -0,0 +1,97 @@ +/** + * The ItemIndex is a canonical, ID-addressable storage layer for domain items. + * + * It provides a single source of truth for all items managed by one or more + * paginators, views, or interval caches. Instead of duplicating objects inside + * multiple paginated ranges, every item is stored exactly once in the ItemIndex + * and is referenced by ID from interval windows, caches, or UI layers. + * + * ## Purpose + * + * Pagination flows (especially those supporting random-access page jumps + * or “load-around-anchor” requests) require representing discontinuous windows + * of items. Attempting to store full item objects in every interval causes + * duplication, inconsistent updates, increased memory usage, and difficult + * merging logic. + * + * The ItemIndex solves this by: + * + * - Storing each item exactly once. + * - Making all intervals store only `itemIds: string[]` in sorted order. + * - Making paginators read visible items through `itemIndex.get(id)`. + * - Ensuring that any mutation of an item is immediately visible everywhere. + * + * ## Benefits + * + * - **Consistency:** Updates propagate automatically because intervals reference + * items by ID. No need to synchronize multiple arrays of objects. + * - **Efficiency:** Items are only stored once; intervals are lightweight lists + * of IDs. + * - **Scalability:** Supports multiple disjoint intervals (e.g. random jumps), + * merging of ranges, and multiple independent paginators sharing the same + * item set. + * - **Clean separation of concerns:** The paginator manages window boundaries; + * the ItemIndex manages object identity and update semantics. + * + * ## Typical Usage + * + * 1. A paginator fetches a page of items from the server. + * 2. It calls `itemIndex.setMany(fetchedItems)` to update the canonical store. + * 3. It constructs or updates an interval using the IDs only: + * `{ itemIds: fetchedItems.map(item => itemIndex.getId(item)) }` + * 4. The UI renders the active interval’s items using: + * `interval.itemIds.map(id => itemIndex.get(id))` + * + * ## Update Semantics + * + * Updates should always be performed through `setOne()` or `setMany()`. + * This ensures that: + * + * - The item object is replaced (immutable semantics). + * - All consumers reading via ID immediately observe the new value. + * + * The ItemIndex does not automatically re-sort intervals; interval or paginator + * logic may reorder their `itemIds` arrays when necessary. + * + * ## Notes + * + * - The ItemIndex does not apply filtering or sorting. Those are the paginator’s + * responsibilities. + * - The ItemIndex intentionally exposes only minimal CRUD operations to keep it + * predictable and side-effect-free. + * - Consumers should treat items as immutable snapshots. If mutation is needed, + * always create a new item instance and pass it to `setOne()`. + * + * @template T The domain item type managed by the index. + */ +export class ItemIndex { + private byId = new Map(); + + constructor(private getId: (item: T) => string) {} + + setMany(items: T[]) { + for (const item of items) { + this.byId.set(this.getId(item), item); + } + } + + setOne(item: T) { + this.byId.set(this.getId(item), item); + } + + get(id: string): T | undefined { + return this.byId.get(id); + } + + has(id: string): boolean { + return this.byId.has(id); + } + + remove(id: string) { + this.byId.delete(id); + } + + entries() { + return [...this.byId.entries()]; + } +} diff --git a/src/pagination/paginators/BasePaginator.ts b/src/pagination/paginators/BasePaginator.ts index 938dd64792..43fdfd2b10 100644 --- a/src/pagination/paginators/BasePaginator.ts +++ b/src/pagination/paginators/BasePaginator.ts @@ -1,17 +1,270 @@ -import { binarySearchInsertIndex } from '../sortCompiler'; +import type { ItemLocation } from '../sortCompiler'; +import { binarySearch } from '../sortCompiler'; import { itemMatchesFilter } from '../filterCompiler'; import { isPatch, StateStore, type ValueOrPatch } from '../../store'; -import { debounce, type DebouncedFunc, sleep } from '../../utils'; -import type { FieldToDataResolver } from '../types.normalization'; import { - locateOnPlateauAlternating, - locateOnPlateauScanOneSide, -} from '../utility.search'; + debounce, + type DebouncedFunc, + generateUUIDv4, + normalizeQuerySort, + sleep, +} from '../../utils'; +import type { FieldToDataResolver } from '../types.normalization'; +import { ComparisonResult } from '../types.normalization'; +import type { ItemIndex } from '../ItemIndex'; import { isEqual } from '../../utils/mergeWith/mergeWithCore'; import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../constants'; +import type { AscDesc } from '../..'; +import { + normalizeStringAccentInsensitive, + toEpochMillis, + toNumberLike, +} from '../utility.normalization'; const noOrderChange = () => 0; +const LIVE_HEAD_INTERVAL_ID = '__live_head__'; +const LIVE_TAIL_INTERVAL_ID = '__live_tail__'; +const MISSING_LOW = Number.NEGATIVE_INFINITY; // "smaller than anything" +const MISSING_HIGH = Number.POSITIVE_INFINITY; // "bigger than anything" + +type SortKeyScalar = number | string | null; + +/** + * Normalize a raw field value into a comparable scalar. + * + * Rules: + * - Date / ISO / epoch-like → epoch millis (number) + * - numeric-like string → number + * - boolean → boolean (or 0/1, see below) + * - string → normalized string (case/accent insensitive) + * - everything else → stringified fallback + */ +function normalizeForSort(x: unknown): SortKeyScalar { + // 1) Date-like + const d = toEpochMillis(x); + if (d !== null) return d; + + // 2) numeric-like + const n = toNumberLike(x); + if (n !== null) return n; + + // 3) boolean + if (typeof x === 'boolean') return x ? 1 : 0; + + // 4) string (accent-insensitive) + if (typeof x === 'string') { + return normalizeStringAccentInsensitive(x); + } + + // 5) fallback + return x == null ? null : String(x); +} + +/** + * Sortable value that represents the item according to the paginator’s comparator. + * A comparable key that lets you determine: + * “Does this item fall inside the sort boundaries of any given interval?” + */ +export type SortKey = number[]; + +// Encodes a string into a numeric sequence suitable for lexicographic comparison. +// 0 as a terminal sentinel ensures shorter prefix strings sort before longer ones (e.g. "a" before "aa"). +const STRING_SENTINEL_ASC = 0; + +function encodeStringComponents(s: string, direction: 1 | -1): number[] { + // Ascending: [charCode+1, ..., charCode+1, 0] + const base: number[] = []; + for (let i = 0; i < s.length; i++) { + base.push(s.charCodeAt(i) + 1); // > 0 + } + base.push(STRING_SENTINEL_ASC); // 0 < any charCode+1 + + // Descending = element-wise sign flip of the ascending sequence + if (direction === 1) return base; + return base.map((v) => -v); +} + +/** Compare two SortKeys. */ +export function compareSortKeys(a: SortKey, b: SortKey): number { + if (typeof a !== 'object' && typeof b !== 'object') { + return a < b + ? ComparisonResult.A_PRECEDES_B + : a > b + ? ComparisonResult.A_COMES_AFTER_B + : ComparisonResult.A_IS_EQUAL_TO_B; + } + + const arrA = a as (number | string)[]; + const arrB = b as (number | string)[]; + + const len = Math.min(arrA.length, arrB.length); + for (let i = 0; i < len; i++) { + if (arrA[i] < arrB[i]) return ComparisonResult.A_PRECEDES_B; + if (arrA[i] > arrB[i]) return ComparisonResult.A_COMES_AFTER_B; + } + + return arrA.length - arrB.length; +} + +function minSortKey(a: SortKey, b: SortKey): SortKey { + return compareSortKeys(a, b) <= 0 ? a : b; +} + +function maxSortKey(a: SortKey, b: SortKey): SortKey { + return compareSortKeys(a, b) >= 0 ? a : b; +} + +function mergeUniqueStrings(a: string[], b: string[]): string[] { + const set = new Set(a); + for (const id of b) { + if (!set.has(id)) { + set.add(id); + a.push(id); + } + } + return a; +} + +type Sort = Record; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +type PathResolver = (item: T, path: string) => any; + +export type LogicalInterval = { + itemIds: string[]; + id: typeof LIVE_HEAD_INTERVAL_ID | typeof LIVE_TAIL_INTERVAL_ID; + /** Key of the first item according to sorting. */ + startKey: SortKey; + /** Key of the last item according to sorting. */ + endKey: SortKey; +}; + +export type Interval = { + itemIds: string[]; + id: string; + /** Key of the first item according to sorting. */ + startKey: SortKey; + /** Key of the last item according to sorting. */ + endKey: SortKey; + /** + * True if this interval represents the global head of the dataset + * under the current sortComparator. + * + * Cursor pagination: + * prev === null + * + * Offset pagination: + * offset === 0 + */ + isHead?: boolean; + /** + * True if this interval represents the global tail of the dataset + * under the current sortComparator. + * + * Cursor pagination: + * next === null + * + * Offset pagination: + * returnedItems.length < pageSize + */ + isTail?: boolean; +}; + +export type AnyInterval = Interval | LogicalInterval; + +export type ItemCoordinates = { + /** Location inside state.items (visible list) */ + state?: ItemLocation; + /** Location inside an interval (anchored or logical) */ + interval?: ItemLocation & { + interval: Interval | LogicalInterval; + }; +}; + +const isLiveHeadInterval = (interval: AnyInterval): interval is LogicalInterval => + interval.id === LIVE_HEAD_INTERVAL_ID; + +const isLiveTailInterval = (interval: AnyInterval): interval is LogicalInterval => + interval.id === LIVE_TAIL_INTERVAL_ID; + +/** + * Returns true if intervals A and B overlap. + * + * Overlap condition: + * A.startKey ≤ B.endKey AND B.startKey ≤ A.endKey + */ +function intervalsOverlap(a: Interval, b: Interval): boolean { + return ( + compareSortKeys(a.startKey, b.endKey) <= 0 && + compareSortKeys(b.startKey, a.endKey) <= 0 + ); +} + +function cloneInterval(interval: Interval): Interval { + return { + ...interval, + itemIds: [...interval.itemIds], + }; +} + +function mergeTwoAnchoredIntervals(preceding: Interval, following: Interval): Interval { + return { + ...preceding, + itemIds: mergeUniqueStrings([...preceding.itemIds], following.itemIds), + startKey: minSortKey(preceding.startKey, following.startKey), + endKey: maxSortKey(preceding.endKey, following.endKey), + isHead: preceding.isHead || following.isHead, + isTail: preceding.isTail || following.isTail, + }; +} + +/** + * Merges anchored intervals. Returns null if there are no intervals to merge. + */ +function mergeAnchoredIntervals(intervals: Interval[]): Interval | null { + if (intervals.length === 0) return null; + + const intervalsCopy = [...intervals]; + intervalsCopy.sort((a, b) => compareSortKeys(a.startKey, b.startKey)); + + let acc = cloneInterval(intervalsCopy[0]); + for (let i = 1; i < intervalsCopy.length; i++) { + const next = intervalsCopy[i]; + acc = mergeTwoAnchoredIntervals(acc, next); + } + + return acc; +} + +/** + * Whether a SortKey belongs to an anchored interval. + */ +function belongsToInterval(itemSortKey: SortKey, interval: Interval): boolean { + return ( + compareSortKeys(itemSortKey, interval.startKey) >= 0 && + compareSortKeys(itemSortKey, interval.endKey) <= 0 + ); +} + +export type MakeIntervalParams = { + page: T[]; + isHead?: boolean; + isTail?: boolean; +}; + +export type SetPaginatorItemsParams = { + valueOrFactory: ValueOrPatch; + cursor?: PaginatorCursor; + isFirstPage?: boolean; + isLastPage?: boolean; +}; + +type MergeIntervalsResult = { + logicalHead: LogicalInterval | null; + merged: Interval | null; + logicalTail: LogicalInterval | null; +}; + type PaginationDirection = 'next' | 'prev'; export type PaginatorCursor = { next: string | null; prev: string | null }; type StateResetPolicy = 'auto' | 'yes' | 'no' | (string & {}); @@ -67,6 +320,8 @@ export type PaginatorOptions = { initialCursor?: PaginatorCursor; /** In case of offset pagination, specify the initial offset value. */ initialOffset?: number; + /** If item index is provided, this index ensures updates in place and all consumers have access to a single source of data. */ + itemIndex?: ItemIndex; /** Will prevent changing the index of existing items. */ lockItemOrder?: boolean; /** The item page size to be requested from the server. */ @@ -79,6 +334,7 @@ type OptionalPaginatorConfigFields = | 'doRequest' | 'initialCursor' | 'initialOffset' + | 'itemIndex' | 'throwErrors'; export type BasePaginatorConfig = Pick< @@ -103,46 +359,38 @@ export const DEFAULT_PAGINATION_OPTIONS: BasePaginatorConfig = { export abstract class BasePaginator { state: StateStore>; config: BasePaginatorConfig; + + /** + * Intervals keep items in disconnected ranges. + * That is a scenario of jumping to non-sequential pages. + * Intervals are populated only if itemIndex is provided. + */ + protected _itemIntervals: Map = new Map(); + protected _activeIntervalId: string | undefined; + + /** + * ItemIndex is a canonical, ID-addressable storage layer for domain items. + * It serves as a single source of truth for all those that need to access the items + * outside of the paginator. + */ + protected _itemIndex: ItemIndex | undefined; + protected _executeQueryDebounced!: DebouncedExecQueryFunction; protected _isCursorPagination = false; /** Last effective query shape produced by subclass for the most recent request. */ protected _lastQueryShape?: Q; protected _nextQueryShape?: Q; - /** - * Comparison function used to keep items in a paginator sorted. - * - * The comparator must follow the standard contract of `Array.prototype.sort`: - * - return a negative number if `a` should come before `b` - * - return a positive number if `a` should come after `b` - * - return 0 if they are considered equal for ordering - * - * Typical implementations are generated from a "sort spec" (e.g. `{ field: 1, otherField: -1 }`) - * so that insertion and pagination can maintain the same order as the backend. - * - * Notes: - * - The comparator must be deterministic: the same inputs always return - * the same result. - * - If multiple fields are used, they are evaluated in order of normalized sort ({ direction: AscDesc; field: keyof T }[]) - * until a non-zero comparison is found. - * - Equality (0) does not imply object identity; it only means neither item - * is considered greater than the other by the sort rules. - */ + sortComparator: (a: T, b: T) => number; - /** - * Allows defining data extraction logic for filter fields like member.user.name or members - * @protected - */ protected _filterFieldToDataResolvers: FieldToDataResolver[]; - /** - * Ephemeral priority for attention UX without breaking sort invariants - * @protected - */ + protected boosts = new Map(); - protected _maxBoostSeq: number = 0; + protected _maxBoostSeq = 0; protected constructor({ initialCursor, initialOffset, + itemIndex, ...options }: PaginatorOptions = {}) { this.config = { @@ -160,8 +408,13 @@ export abstract class BasePaginator { this.setDebounceOptions({ debounceMs }); this.sortComparator = noOrderChange; this._filterFieldToDataResolvers = []; + this._itemIndex = itemIndex; } + // --------------------------------------------------------------------------- + // Basic getters + // --------------------------------------------------------------------------- + get lastQueryError() { return this.state.getLatestValue().lastQueryError; } @@ -194,9 +447,9 @@ export abstract class BasePaginator { get initialState(): PaginatorState { return { hasNext: true, - hasPrev: true, //todo: check if optimistic value does not cause problems in UI + hasPrev: true, isLoading: false, - items: undefined, // todo: maybe should be null? + items: undefined, lastQueryError: undefined, cursor: this.config.initialCursor, offset: this.config.initialOffset ?? 0, @@ -240,12 +493,42 @@ export abstract class BasePaginator { return this._maxBoostSeq; } + protected get itemIntervals(): AnyInterval[] { + return Array.from(this._itemIntervals.values()); + } + + protected get liveHeadLogical(): LogicalInterval | undefined { + const itv = this._itemIntervals.get(LIVE_HEAD_INTERVAL_ID); + return itv && isLiveHeadInterval(itv) ? itv : undefined; + } + + protected get liveTailLogical(): LogicalInterval | undefined { + const itv = this._itemIntervals.get(LIVE_TAIL_INTERVAL_ID); + return itv && isLiveTailInterval(itv) ? itv : undefined; + } + + protected get usesItemIntervalStorage(): boolean { + return !!this._itemIndex; + } + + // --------------------------------------------------------------------------- + // Abstracts + // --------------------------------------------------------------------------- + abstract query( params: PaginationQueryParams, ): Promise>; abstract filterQueryResults(items: T[]): T[] | Promise; + /** + * Should be implemented in child classes from the specific sort requirements followed by the child classes. + * Should return a value according to which the given item can be correctly inserted into the target item interval + * based on the current sort rules. + * @param item + */ + abstract computeSortKey(item: T): SortKey; + /** * Subclasses must return the query shape. */ @@ -256,41 +539,83 @@ export abstract class BasePaginator { throw new Error('Paginator.getNextQueryShape() is not implemented'); } - /** - * Decide whether a param change between queries requires a state reset. - * Default: deep inequality => reset. - * Subclasses can override to implement domain rules - * (e.g. ChannelPaginator filters {cid: { $in: string[]}} with different CIDs may be required not to lead to reset). - */ - protected shouldResetStateBeforeQuery( - prevQueryShape: unknown | undefined, - nextQueryShape: unknown | undefined, - ): boolean { - return ( - typeof prevQueryShape === 'undefined' || - this.config.hasPaginationQueryShapeChanged(prevQueryShape, nextQueryShape) - ); - } - protected buildFilters(): object | null { return null; // === no filters } - getItemId(item: T): string { - return (item as { id: string }).id; - } - matchesFilter(item: T): boolean { const filters = this.buildFilters(); - - // no filters => accept all if (filters == null) return true; - return itemMatchesFilter(item, filters, { resolvers: this._filterFieldToDataResolvers, }); } + setFilterResolvers(resolvers: FieldToDataResolver[]) { + this._filterFieldToDataResolvers = resolvers; + } + + addFilterResolvers(resolvers: FieldToDataResolver[]) { + this._filterFieldToDataResolvers.push(...resolvers); + } + + // --------------------------------------------------------------------------- + // Item accessors + // --------------------------------------------------------------------------- + getItemId(item: T): string { + return (item as { id: string }).id; + } + + getItem(id: string | undefined): T | undefined { + return typeof id === 'string' ? this._itemIndex?.get(id) : undefined; + } + + // --------------------------------------------------------------------------- + // Sort key generator (optional helper) + // --------------------------------------------------------------------------- + + /** + * Factory function to create a sort key generator. + * Sort key generation must be consistent with the comparator logic. + * + * The resulting SortKey is an array of numbers, e.g. + * [{last_updated_at}, {}] + */ + makeSortKeyGenerator({ + sort, + resolvePathValue, + }: { + sort: Sort | Sort[]; + resolvePathValue: PathResolver; + }): (item: T) => SortKey { + const normalizedSort = normalizeQuerySort(sort); // [{ field, direction }, ...] + + return (item: T): SortKey => { + const key: SortKey = []; + + for (const { field, direction } of normalizedSort) { + const raw = resolvePathValue(item, field); + const normalized = normalizeForSort(raw); + if (normalized === null) { + // No usable value → push a sentinel that depends on direction. + key.push(direction === 1 ? MISSING_LOW : MISSING_HIGH); + } else if (typeof normalized === 'number') { + key.push(direction === 1 ? normalized : -normalized); + } else { + // string + // If most of your sorts are numeric/date and string sorts are asc-only, + // you can just store the string as-is: + key.push(...encodeStringComponents(normalized, direction)); + } + } + return key; + }; + } + + // --------------------------------------------------------------------------- + // Boosts + // --------------------------------------------------------------------------- + protected clearExpiredBoosts(now = Date.now()) { for (const [id, b] of this.boosts) if (now > b.until) this.boosts.delete(id); this._maxBoostSeq = Math.max( @@ -299,7 +624,11 @@ export abstract class BasePaginator { ); } - /** Comparator that consults boosts first, then falls back to sortComparator */ + /** + * Applied by the effectiveComparator to take into consideration item boosts when sorting items. + * @param a + * @param b + */ protected boostComparator = (a: T, b: T): number => { const now = Date.now(); this.clearExpiredBoosts(now); @@ -316,25 +645,27 @@ export abstract class BasePaginator { if (!aIsBoosted && bIsBoosted) return 1; if (aIsBoosted && bIsBoosted) { - // higher seq wins const seqDistance = (boostB.seq ?? 0) - (boostA.seq ?? 0); if (seqDistance !== 0) return seqDistance > 0 ? 1 : -1; - // fall through to normal comparator for stability } return this.sortComparator(a, b); }; - /** Public API to manage boosts */ - boost(id: string, opts?: { ttlMs?: number; until?: number; seq?: number }) { + /** + * Increases the item's importance when sorting. + * @param itemId + * @param opts + */ + boost(itemId: string, opts?: { ttlMs?: number; until?: number; seq?: number }) { const now = Date.now(); - const until = opts?.until ?? (opts?.ttlMs != null ? now + opts.ttlMs : now + 15000); // default 15s + const until = opts?.until ?? (opts?.ttlMs != null ? now + opts.ttlMs : now + 15000); if (typeof opts?.seq === 'number' && opts.seq > this._maxBoostSeq) { this._maxBoostSeq = opts.seq; } const seq = opts?.seq ?? 0; - this.boosts.set(id, { until, seq }); + this.boosts.set(itemId, { until, seq }); } getBoost(id: string) { @@ -354,145 +685,536 @@ export abstract class BasePaginator { return !!(boost && Date.now() <= boost.until); } - ingestItem(ingestedItem: T): boolean { - const items = this.items ?? []; - const id = this.getItemId(ingestedItem); - const next = items.slice(); - // If it doesn't match this paginator's filters, remove if present and exit. - const existingIndex = items.findIndex((ch) => this.getItemId(ch) === id); - if (!this.matchesFilter(ingestedItem)) { - if (existingIndex >= 0) { - next.splice(existingIndex, 1); - this.state.partialNext({ items: next }); - return true; // list changed (item removed) + // --------------------------------------------------------------------------- + // Interval helpers + // --------------------------------------------------------------------------- + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + generateIntervalId(page: T[]): string { + return `interval-${generateUUIDv4()}`; + } + + intervalToItems(interval: Interval | LogicalInterval): T[] { + return interval.itemIds + .map((id) => this._itemIndex?.get(id)) + .filter((item): item is T => !!item); + } + + makeInterval({ page, isHead, isTail }: MakeIntervalParams): Interval { + const sorted = [...page].sort((a, b) => + compareSortKeys(this.computeSortKey(a), this.computeSortKey(b)), + ); + return { + id: this.generateIntervalId(page), + itemIds: sorted.map(this.getItemId.bind(this)), + startKey: this.computeSortKey(sorted[0]), + endKey: this.computeSortKey(sorted[sorted.length - 1]), + isHead, + isTail, + }; + } + + protected recomputeIntervalBoundaries(interval: AnyInterval): { + startKey: SortKey; + endKey: SortKey; + } { + // Recompute boundaries from the first and last items in the interval. + // Since ids are kept sorted by effectiveComparator, + // the first and last items define the correct startKey/endKey. + const ids = interval.itemIds; + const first = this.getItem(ids[0]); + const last = this.getItem(ids[ids.length - 1]); + + if (!first || !last) { + throw new Error('Invalid interval to recompute boundaries: empty item array'); + } + + const startKey = this.computeSortKey(first); + const endKey = first === last ? startKey : this.computeSortKey(last); + return { startKey, endKey }; + } + + // --------------------------------------------------------------------------- + // Locate items + // --------------------------------------------------------------------------- + + /** + * Locate item inside a specific interval using the same logic as locateByItem, + * but scoped to interval items. + */ + protected locateByItemInInterval({ + item, + interval, + }: { + item: T; + interval: Interval | LogicalInterval; + }): ItemLocation | null { + const ids = interval.itemIds; + + return binarySearch({ + needle: item, + length: ids.length, + getItemAt: (index: number) => this.getItem(ids[index]), + itemIdentityEquals: (item1, item2) => + this.getItemId(item1) === this.getItemId(item2), + compare: this.effectiveComparator.bind(this), + plateauScan: true, + }); + } + + protected locateIntervalForItem(item: T): AnyInterval | undefined { + if (this._itemIntervals.size === 0) return undefined; + + const itemSortKey = this.computeSortKey(item); + + for (const itv of this.itemIntervals) { + if (belongsToInterval(itemSortKey, itv)) { + return itv; } - return false; // no change } + } + + protected locateByItemInIntervals(item: T): ItemCoordinates['interval'] | undefined { + const interval = this.locateIntervalForItem(item); + if (!interval) return undefined; + const itemLocation = this.locateByItemInInterval({ item, interval }); + if (!itemLocation) return undefined; + return { interval, ...itemLocation }; + } - if (existingIndex >= 0) { - // Update existing: remove then re-insert at the correct position - next.splice(existingIndex, 1); + /** + * Locates the current position of the item and the index at which the item should be inserted + * according to effectiveComparator. + * @param item + */ + protected locateItemInState(item: T): ItemLocation | null { + const items = [...(this.items ?? [])]; + + return binarySearch({ + needle: item, + length: items.length, + getItemAt: (index: number) => items[index], + itemIdentityEquals: (item1, item2) => + this.getItemId(item1) === this.getItemId(item2), + compare: this.effectiveComparator.bind(this), + plateauScan: true, + }); + } + + protected locateByItem = (item: T): ItemCoordinates => { + const result: ItemCoordinates = {}; + + // 1. Search in visible state.items + const stateLoc = this.locateItemInState(item); + if (stateLoc) { + result.state = stateLoc; + } + + // 2. Search in intervals if interval-mode is active + if (this.usesItemIntervalStorage) { + const intervalLoc = this.locateByItemInIntervals(item); + if (intervalLoc) { + result.interval = intervalLoc; + } } - const insertAt = - this.config.lockItemOrder && existingIndex >= 0 - ? existingIndex - : // Find insertion index via binary search: first index where existing > ingestionItem - binarySearchInsertIndex({ - needle: ingestedItem, - sortedArray: next, - compare: this.effectiveComparator, - }); + return result; + }; - next.splice(insertAt, 0, ingestedItem); - this.state.partialNext({ items: next }); - return true; // list changed (added or repositioned) + findItem(needle: T): T | undefined { + const { state, interval } = this.locateByItem(needle); + if (state && state.current > -1) { + return (this.items ?? [])[state.current]; + } else if (interval && interval.current > -1) { + const id = interval.interval.itemIds[interval.current]; + return this.getItem(id); + } + return undefined; } + // --------------------------------------------------------------------------- + // Item ingestion + // --------------------------------------------------------------------------- + /** - * Removes item from the paginator's state. - * It is preferable to provide item for better search performance. - * @param id - * @param item + * Inserts an item ID into the interval in the correct sorted position, + * preserving interval ordering and updating start/end keys. + * Returns unchaged interval if the correct insertion position could not be determined. */ - removeItem({ id, item }: { id?: string; item?: T }): boolean { - if (!id && !item) return false; - let index: number; - if (item) { - const location = this.locateByItem(item); - index = location.index; - } else { - index = this.items?.findIndex((i) => this.getItemId(i) === id) ?? -1; + protected insertItemIdIntoInterval( + interval: I, + item: T, + ): I { + const id = this.getItemId(item); + const itemLocation = this.locateByItemInInterval({ item, interval }); + + if (!itemLocation) return interval; + + // If already at the correct position, nothing to change + if (itemLocation.current >= 0 && itemLocation.current === itemLocation.expected) { + return interval; } - if (index === -1) return false; - const newItems = [...(this.items ?? [])]; - newItems.splice(index, 1); - this.state.partialNext({ items: newItems }); - return true; + const ids = [...interval.itemIds]; + + // Adjust insertion index if we are removing the item before reinserting index. + // locateByItemInInterval() computed insertionIndex with the item still in the array. + let insertionIndex = itemLocation.expected; + if (itemLocation.current >= 0 && itemLocation.expected > itemLocation.current) { + insertionIndex--; + } + + // Remove existing occurrence if present + if (itemLocation.current >= 0) { + ids.splice(itemLocation.current, 1); + } + + // Insert at the new position + ids.splice(insertionIndex, 0, id); + + const intervalWithUpdatedIds = { + ...interval, + itemIds: ids, + }; + + const boundaries = this.recomputeIntervalBoundaries(intervalWithUpdatedIds); + + return { + ...intervalWithUpdatedIds, + ...boundaries, + }; } - contains(item: T): boolean { - return !!this.items?.find((i) => this.getItemId(i) === this.getItemId(item)); + /** + * Splits a logical interval by checking each item individually. + * Items overlapping anchoredInterval are merged into it. + * Others stay in a retained logical interval. + */ + protected mergeItemsFromLogicalInterval( + logical: LogicalInterval, + anchored: Interval, + ): { mergedAnchored: Interval; remainingLogical: LogicalInterval | null } { + const mergeIds: string[] = []; + const keepIds: string[] = []; + + for (const id of logical.itemIds) { + const item = this.getItem(id); + if (!item) { + keepIds.push(id); + continue; + } + + const key = this.computeSortKey(item); + + if (belongsToInterval(key, anchored)) mergeIds.push(id); + else keepIds.push(id); + } + + let merged = anchored; + for (const id of mergeIds) { + const item = this.getItem(id); + if (!item) continue; + merged = this.insertItemIdIntoInterval(merged, item); + } + + const remainingLogical = keepIds.length > 0 ? { ...logical, itemIds: keepIds } : null; + + return { + mergedAnchored: merged, + remainingLogical: remainingLogical && { + ...remainingLogical, + ...this.recomputeIntervalBoundaries(remainingLogical), + }, + }; } /** - * Find the exact index of `needle` by ID (via getItemId) under the current sortComparator. - * Returns: - * - `index`: actual index if found, otherwise -1 - * - `insertionIndex`: lower-bound position where `needle` would be inserted - * to preserve order (always defined). - * - * Time: O(log n) + O(k) for a tie plateau of size k (unless comparator has ID tiebreaker). - * - * ### Usage examples + * Merges all intervals (anchored + logical head/tail). + * Returns: + * - merged anchored interval (or null if none) + * - possibly reduced logical head / tail intervals + */ + protected mergeIntervals(intervals: AnyInterval[]): MergeIntervalsResult { + let logicalHead: LogicalInterval | null = null; + let logicalTail: LogicalInterval | null = null; + const anchored: Interval[] = []; + + // Separate logical vs anchored + for (const itv of intervals) { + if (isLiveHeadInterval(itv)) logicalHead = itv; + else if (isLiveTailInterval(itv)) logicalTail = itv; + else anchored.push(itv); + } + + // nothing to merge + if (anchored.length === 0 && logicalHead && logicalTail) { + return { logicalHead, merged: null, logicalTail }; + } + + // Merge anchored intervals into one interval (if possible) + const mergedAnchored = mergeAnchoredIntervals(anchored); + + // No anchored intervals → just return logical ones + if (!mergedAnchored) { + return { logicalHead, merged: null, logicalTail }; + } + + let merged = mergedAnchored; + + // Merge items from logical HEAD interval + if (logicalHead) { + const { mergedAnchored, remainingLogical } = this.mergeItemsFromLogicalInterval( + logicalHead, + merged, + ); + merged = mergedAnchored; + logicalHead = remainingLogical; + } + + // Merge items from logical TAIL interval + if (logicalTail) { + const { mergedAnchored, remainingLogical } = this.mergeItemsFromLogicalInterval( + logicalTail, + merged, + ); + merged = mergedAnchored; + logicalTail = remainingLogical; + } + + return { logicalHead, merged, logicalTail }; + } + + // --------------------------------------------------------------------------- + // Consume and manage items + // --------------------------------------------------------------------------- + + /** + * Ingests the whole page into intervals and returns the resulting anchored interval. + */ + protected ingestPage({ + page, + isHead, + isTail, + targetIntervalId, + }: { + page: T[]; + isHead?: boolean; + isTail?: boolean; + targetIntervalId?: string; + }): Interval | null { + if (!this._itemIndex || !page?.length) return null; + + for (const item of page) { + this._itemIndex.setOne(item); + } + + const pageInterval = this.makeInterval({ + page, + isHead, + isTail, + }); + + const targetInterval = targetIntervalId + ? this._itemIntervals.get(targetIntervalId) + : null; + + // Find intervals that overlap with this page + const overlapping: Interval[] = []; + for (const itv of this.itemIntervals) { + // target will be appended separately + if (targetInterval?.id === itv.id) continue; + if (intervalsOverlap(pageInterval, itv)) { + overlapping.push(itv); + } + } + const toMerge: AnyInterval[] = [...overlapping, pageInterval]; + + if (targetInterval) { + toMerge.push(targetInterval); + } + + const { logicalHead, merged, logicalTail } = this.mergeIntervals(toMerge); + + // Remove all intervals that participated + for (const itv of toMerge) { + this._itemIntervals.delete(itv.id); + } + + // Decide which anchored interval we keep for this page: + const resultingInterval = merged ?? pageInterval; + this._itemIntervals.set(resultingInterval.id, resultingInterval); + + // Store logical head/tail (if any) + if (logicalHead) { + this._itemIntervals.set(LIVE_HEAD_INTERVAL_ID, logicalHead); + } else { + this._itemIntervals.delete(LIVE_HEAD_INTERVAL_ID); + } + + if (logicalTail) { + this._itemIntervals.set(LIVE_TAIL_INTERVAL_ID, logicalTail); + } else { + this._itemIntervals.delete(LIVE_TAIL_INTERVAL_ID); + } + + return resultingInterval; + } + + /** + * Ingests a single item on live update. * - * ```ts - * const { index, insertionIndex } = paginator.locateByItem(channel); + * If intervals + itemIndex exist, tries to: + * - update the ItemIndex + * - find an anchored interval whose sort bounds contain the item + * - insert the item into that interval using locate+plateau logic + * - if this is the active interval, re-emit state.items from interval * - * if (index > -1) { - * // Found -> e.g. remove the item - * items.splice(index, 1); - * } else { - * // Insert new at the right position - * items.splice(insertionIndex, 0, channel); - * } - * ``` + * If no intervals or no itemIndex exist, falls back to the legacy list-based ingestion. */ - public locateByItem( - needle: T, - options?: { alternatePlateauScan?: boolean }, - ): { index: number; insertionIndex: number } { - const items = this.items ?? []; - if (items.length === 0) return { index: -1, insertionIndex: 0 }; - - const insertionIndex = binarySearchInsertIndex({ - needle, - sortedArray: items, - compare: this.effectiveComparator, - }); + ingestItem(ingestedItem: T): boolean { + // If we don't have itemIndex, manipulate only items array in paginator state and not intervals + // as intervals do not store the whole items and have to rely on _itemIndex + if (!this.usesItemIntervalStorage) { + const items = this.items ?? []; + const next = items.slice(); + const { current: existingIndex, expected: insertionIndex } = binarySearch({ + needle: ingestedItem, + length: items.length, + getItemAt: (index: number) => items[index], + itemIdentityEquals: (item1, item2) => + this.getItemId(item1) === this.getItemId(item2), + compare: this.effectiveComparator.bind(this), + plateauScan: true, + }); + + if (!this.matchesFilter(ingestedItem)) { + if (existingIndex >= 0) { + next.splice(existingIndex, 1); + this.state.partialNext({ items: next }); + return true; + } + return false; + } + + // override the existing item even though it already exists to make sure it is up-to-date + if (existingIndex >= 0) { + next.splice(existingIndex, 1); + } + + const insertAt = + this.config.lockItemOrder && existingIndex >= 0 ? existingIndex : insertionIndex; - // quick neighbor checks - const id = this.getItemId(needle); - const left = insertionIndex - 1; - if (left >= 0 && this.effectiveComparator(items[left], needle) === 0) { - if (this.getItemId(items[left]) === id) return { index: left, insertionIndex }; - } - if ( - insertionIndex < items.length && - this.effectiveComparator(items[insertionIndex], needle) === 0 - ) { - if (this.getItemId(items[insertionIndex]) === id) - return { index: insertionIndex, insertionIndex }; - } - - // plateau scan - const index = - (options?.alternatePlateauScan ?? true) - ? locateOnPlateauAlternating( - items, - needle, - this.effectiveComparator, - this.getItemId.bind(this), - insertionIndex, - ) - : locateOnPlateauScanOneSide( - items, - needle, - this.effectiveComparator, - this.getItemId.bind(this), - insertionIndex, - ); - - return { index, insertionIndex }; - } - - findItem(needle: T, options?: { alternatePlateauScan?: boolean }): T | undefined { - const { index } = this.locateByItem(needle, options); - return index > -1 ? (this.items ?? [])[index] : undefined; - } - - setItems(valueOrFactory: ValueOrPatch, cursor?: PaginatorCursor) { + next.splice(insertAt, 0, ingestedItem); + this.state.partialNext({ items: next }); + return true; + } + + // Always update the itemIndex if present + this._itemIndex?.setOne(ingestedItem); + + // Ingestion into anchored intervals + let targetInterval = this.locateIntervalForItem(ingestedItem); + + // if no page has been loaded yet or the anchored interval could not be found, + // because the relevant page has not been loaded yet, + // keep the incoming items in logical interval if falls outside of the head and tail boundaries + if (!targetInterval) { + let targetLogical: LogicalInterval | undefined; + // add to head or tail if item exceeds the total bounds + if (this._itemIntervals.size > 0) { + const intervalsArray = this.itemIntervals; + const [firstInterval, lastInterval] = [ + intervalsArray[0], + intervalsArray.slice(-1)[0], + ]; + const itemSortKey = this.computeSortKey(ingestedItem); + if ( + isLiveHeadInterval(firstInterval) && + compareSortKeys(itemSortKey, firstInterval.startKey) <= + ComparisonResult.A_PRECEDES_B + ) { + targetLogical = firstInterval; + } else if ( + isLiveTailInterval(lastInterval) && + compareSortKeys(itemSortKey, lastInterval.endKey) >= + ComparisonResult.A_COMES_AFTER_B + ) { + targetLogical = lastInterval; + } + // ingested item would fall somewhere inside the boundaries but relevant page has not been loaded yet + // and thus the interval is not identifiable + if (!targetLogical) return false; + + targetInterval = this.insertItemIdIntoInterval(targetLogical, ingestedItem); + } else { + // no page has been loaded yet + targetInterval = { + id: LIVE_HEAD_INTERVAL_ID, + itemIds: [this.getItemId(ingestedItem)], + startKey: this.computeSortKey(ingestedItem), + endKey: this.computeSortKey(ingestedItem), + }; + + if (!this._activeIntervalId) { + this._activeIntervalId = targetInterval.id; + } + } + } else { + targetInterval = this.insertItemIdIntoInterval(targetInterval, ingestedItem); + } + + this._itemIntervals.set(targetInterval.id, targetInterval); + + if (this._activeIntervalId === targetInterval.id) { + this.state.partialNext({ items: this.intervalToItems(targetInterval) }); + } + + return true; + } + + // --------------------------------------------------------------------------- + // Remove / contains + // --------------------------------------------------------------------------- + + removeItem({ id, item: inputItem }: { id?: string; item?: T }): boolean { + if (!id && !inputItem) return false; + const item = inputItem ?? this.getItem(id); + // not in item index, and no item provided (cannot locate by item), so we will not check intervals, + // only state items and sequentially + if (!this._itemIndex || !item) { + const index = this.items?.findIndex((i) => this.getItemId(i) === id) ?? -1; + if (index === -1) return false; + const newItems = [...(this.items ?? [])]; + newItems.splice(index, 1); + this.state.partialNext({ items: newItems }); + return true; + } + + const { state: stateLocation, interval: intervalLocation } = this.locateByItem(item); + + if (intervalLocation && intervalLocation.current > -1) { + const itemIds = [...intervalLocation.interval.itemIds]; + itemIds.splice(intervalLocation.current, 1); + const newInterval: AnyInterval = { ...intervalLocation.interval, itemIds }; + const boundaries = this.recomputeIntervalBoundaries(newInterval); + this._itemIntervals.set(newInterval.id, { ...newInterval, ...boundaries }); + } + + if (stateLocation && stateLocation.current > -1) { + const newItems = [...(this.items ?? [])]; + newItems.splice(stateLocation.current, 1); + this.state.partialNext({ items: newItems }); + } + return true; + } + + /** Sets the items in the state. If intervals are kept, the active interval will be updated */ + setItems({ + valueOrFactory, + cursor, + isFirstPage, + isLastPage, + }: SetPaginatorItemsParams) { this.state.next((current) => { const { items: currentItems = [] } = current; const newItems = isPatch(valueOrFactory) @@ -511,22 +1233,36 @@ export abstract class BasePaginator { } else { newState.offset = newItems.length; } + + const interval = this.ingestPage({ + page: newItems, + isHead: isFirstPage, + isTail: isLastPage, + }); + if (interval) this._activeIntervalId = interval.id; + return newState; }); } - setFilterResolvers(resolvers: FieldToDataResolver[]) { - this._filterFieldToDataResolvers = resolvers; - } - - addFilterResolvers(resolvers: FieldToDataResolver[]) { - this._filterFieldToDataResolvers.push(...resolvers); - } + // --------------------------------------------------------------------------- + // Debounce & query execution + // --------------------------------------------------------------------------- setDebounceOptions = ({ debounceMs }: PaginatorDebounceOptions) => { this._executeQueryDebounced = debounce(this.executeQuery.bind(this), debounceMs); }; + protected shouldResetStateBeforeQuery( + prevQueryShape: unknown | undefined, + nextQueryShape: unknown | undefined, + ): boolean { + return ( + typeof prevQueryShape === 'undefined' || + this.config.hasPaginationQueryShapeChanged(prevQueryShape, nextQueryShape) + ); + } + protected canExecuteQuery = ({ direction, reset, @@ -560,7 +1296,7 @@ export abstract class BasePaginator { const current = this.state.getLatestValue(); return { ...current, - lastQueryError: undefined, // reset lastQueryError that can be overridden by the stateUpdate + lastQueryError: undefined, ...stateUpdate, isLoading: false, items: isFirstPage @@ -587,7 +1323,6 @@ export abstract class BasePaginator { try { return await this.query(params); } catch (e) { - // If the offline support is enabled, and there are items in the DB, we should not report the error. const isOfflineSupportEnabledWithItems = this.isOfflineSupportEnabled && (this.items ?? []).length > 0; if (!isOfflineSupportEnabledWithItems) { @@ -596,7 +1331,6 @@ export abstract class BasePaginator { const nextRetryCount = (retryCount ?? 0) - 1; if (nextRetryCount > 0) { - // not swapping isLoading flag to false as the load has not finished yet await sleep(DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES); return await this.runQueryRetryable({ ...params, @@ -613,7 +1347,7 @@ export abstract class BasePaginator { async executeQuery({ direction = 'next', - queryShape: forcedQueryShape, // todo: remove it? + queryShape: forcedQueryShape, reset, retryCount = 0, }: PaginationQueryParams = {}) { @@ -623,7 +1357,6 @@ export abstract class BasePaginator { const isFirstPage = this.isFirstPageQuery({ queryShape, reset }); if (isFirstPage) { const state = this.getStateBeforeFirstQuery(); - // preload from the offline DB only if no successful HTTP request has been run previously let items: T[] | undefined = undefined; if (!this.isInitialized) { items = @@ -649,13 +1382,14 @@ export abstract class BasePaginator { this._lastQueryShape = this._nextQueryShape; this._nextQueryShape = undefined; - // if the request failed the value is null, loading finished if (!results) { this.state.partialNext({ isLoading: false }); return; } - const stateUpdate: Partial> = { lastQueryError: undefined }; + const stateUpdate: Partial> = { + lastQueryError: undefined, + }; const { items, next, prev } = results; if (isFirstPage && (next || prev)) { @@ -672,11 +1406,29 @@ export abstract class BasePaginator { } stateUpdate.items = await this.filterQueryResults(items); + + // ingest page into intervals if itemIndex is present + const interval = this.ingestPage({ + page: stateUpdate.items, + isHead: !stateUpdate.hasNext, + isTail: !stateUpdate.hasPrev, + targetIntervalId: this._activeIntervalId, + }); + // item index is available if an Interval is returned + if (interval) { + this._activeIntervalId = interval.id; + stateUpdate.items = this.intervalToItems(interval); + } + const state = this.getStateAfterQuery(stateUpdate, isFirstPage); this.state.next(state); this.populateOfflineDbAfterQuery({ items: state.items, queryShape }); } + // --------------------------------------------------------------------------- + // Public API: navigation + // --------------------------------------------------------------------------- + cancelScheduledQuery() { this._executeQueryDebounced.cancel(); } @@ -696,6 +1448,7 @@ export abstract class BasePaginator { ) => { this._executeQueryDebounced({ direction: 'next', ...params }); }; + prevDebounced = ( params: Omit, 'direction' | 'queryShape'> = {}, ) => { diff --git a/src/pagination/paginators/ChannelPaginator.ts b/src/pagination/paginators/ChannelPaginator.ts index 7a3a805a2d..9d9ca3ea71 100644 --- a/src/pagination/paginators/ChannelPaginator.ts +++ b/src/pagination/paginators/ChannelPaginator.ts @@ -4,6 +4,8 @@ import type { PaginationQueryShapeChangeIdentifier, PaginatorOptions, PaginatorState, + SetPaginatorItemsParams, + SortKey, } from './BasePaginator'; import { BasePaginator } from './BasePaginator'; import type { FilterBuilderOptions } from '../FilterBuilder'; @@ -20,7 +22,6 @@ import type { } from '../../types'; import type { FieldToDataResolver, PathResolver } from '../types.normalization'; import { resolveDotPathValue } from '../utility.normalization'; -import type { ValueOrPatch } from '../../store'; import { isEqual } from '../../utils/mergeWith/mergeWithCore'; const DEFAULT_BACKEND_SORT: ChannelSort = { last_message_at: -1, updated_at: -1 }; // {last_updated: -1} @@ -244,8 +245,8 @@ export class ChannelPaginator extends BasePaginator return this._staticFilters; } - get sort(): ChannelSort | undefined { - return this._sort; + get sort(): ChannelSort { + return this._sort ?? DEFAULT_BACKEND_SORT; } get options(): ChannelOptions | undefined { @@ -284,6 +285,14 @@ export class ChannelPaginator extends BasePaginator baseFilters: { ...this.staticFilters }, }); + computeSortKey(item: Channel): SortKey { + const generateSortKey = super.makeSortKeyGenerator({ + sort: this.sort, + resolvePathValue: channelSortPathResolver, + }); + return generateSortKey(item); + } + // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; protected getNextQueryShape(): ChannelQueryShape { const shape: ChannelQueryShape = { @@ -386,8 +395,8 @@ export class ChannelPaginator extends BasePaginator filterQueryResults = (items: Channel[]) => items; - setItems(valueOrFactory: ValueOrPatch) { - super.setItems(valueOrFactory); + setItems(params: SetPaginatorItemsParams) { + super.setItems(params); if (!this.client.offlineDb) return; diff --git a/src/pagination/sortCompiler.ts b/src/pagination/sortCompiler.ts index b56e9cd13f..15a6c8ccaa 100644 --- a/src/pagination/sortCompiler.ts +++ b/src/pagination/sortCompiler.ts @@ -9,31 +9,121 @@ import { normalizeQuerySort } from '../utils'; import type { AscDesc } from '../types'; import type { Comparator, PathResolver } from './types.normalization'; -export function binarySearchInsertIndex({ - compare, +export type ItemLocation = { + expected: number; + current: number; +}; + +/** + * Generic binary-search + plateau lookup over an abstract sorted array. + * + * The array is represented by: + * - its length + * - a getter `getItemAt(index)` that returns the item (or undefined) + * + * It returns: + * - current: actual index of the item in the array + * - expected: lower-bound position where the item belongs according to compare function + */ +export function binarySearch({ needle, - sortedArray, + length, + getItemAt, + itemIdentityEquals, + compare, + plateauScan, }: { - sortedArray: T[]; + /** Target item in the searched array */ needle: T; + length: number; + /** Retrieves the item from an array. The array could be just an array of reference by id to an index. + * Therefore, we do not access the array directly but allow to determine, how the item is constructed. + */ + getItemAt: (index: number) => T | undefined; + /** Used to determine identity, not equality based on sort / comparator rules */ + itemIdentityEquals: (item1: T, item2: T) => boolean; + /** Used to determine equality from the sort order point of view. */ compare: Comparator; -}): number { - let low = 0; - let high = sortedArray.length; + plateauScan?: boolean; +}): ItemLocation { + // empty array + if (length === 0) return { current: -1, expected: 0 }; + + // --- 1) Binary search to find lower bound (insertionIndex) --- + let lo = 0; + let hi = length; + + while (lo < hi) { + const mid = (lo + hi) >> 1; // fast floor((low+high)/2) + const midItem = getItemAt(mid); + if (!midItem) { + // Corruption: we have an ID but no backing item. + // Bail out with "not found". + return { current: -1, expected: -1 }; + } + + const cmp = compare(midItem, needle); + if (cmp < 0) { + // midItem < needle ⇒ go right + lo = mid + 1; + } else { + // midItem ≥ needle ⇒ go left + hi = mid; + } + } + + const expected = lo; - while (low < high) { - const middle = (low + high) >>> 1; // fast floor((low+high)/2) - const comparisonResult = compare(sortedArray[middle], needle); + // item is located where it is expected to be according to the sort + const itemAtExpectedIndex = getItemAt(expected); + if (itemAtExpectedIndex && itemIdentityEquals(itemAtExpectedIndex, needle)) { + return { current: expected, expected }; + } else if (!plateauScan) { + return { current: -1, expected }; + } + + // --- 2) Plateau scan around insertionIndex --- - // We want the first position where existing > needle to insert before it - if (comparisonResult > 0) { - high = middle; + const checkSide = (atIndex: number) => { + const result = { exhausted: false, found: false }; + const item = getItemAt(atIndex); + if (!item) { + result.exhausted = true; } else { - low = middle + 1; + const cmp = compare(item, needle); + if (cmp !== 0) { + result.exhausted = true; + } else { + if (itemIdentityEquals(item, needle)) { + result.found = true; + } + } + } + return result; + }; + + // Alternating left/right scan + let iLeft = expected - 1; + let iRight = expected + 1; // we've already checked insertionIndex + let leftDone = iLeft < 0; + let rightDone = iRight >= length; + + while (!leftDone || !rightDone) { + if (!leftDone) { + const result = checkSide(iLeft); + if (result.found) return { current: iLeft, expected }; + leftDone = result.exhausted || --iLeft < 0; + } + + if (!rightDone) { + const result = checkSide(iRight); + if (result.found) return { current: iRight, expected }; + rightDone = result.exhausted || ++iRight >= length; } } - return low; + // Not found in plateau; insertion index is still the correct lower bound. + return { current: -1, expected }; } /** diff --git a/src/pagination/types.normalization.ts b/src/pagination/types.normalization.ts index 1932a5bc73..f9ecc09542 100644 --- a/src/pagination/types.normalization.ts +++ b/src/pagination/types.normalization.ts @@ -1,5 +1,12 @@ export type PathResolver = (item: DataSource, field: string) => unknown; -export type Comparator = (left: T, right: T) => number; + +export enum ComparisonResult { + A_PRECEDES_B = -1, + A_IS_EQUAL_TO_B = 0, + A_COMES_AFTER_B = 1, +} + +export type Comparator = (left: T, right: T) => ComparisonResult; export type FieldToDataResolver = { matchesField: (field: string) => boolean; From 7aa4f9107a0a507500409408c9b5c42cf6d2dd1f Mon Sep 17 00:00:00 2001 From: martincupela Date: Mon, 12 Jan 2026 08:14:40 +0100 Subject: [PATCH 10/31] feat: add MessagePaginator --- src/ChannelPaginatorsOrchestrator.ts | 3 +- src/channel.ts | 198 +- src/client.ts | 59 +- .../InstanceConfigurationService.ts | 73 + src/configuration/index.ts | 1 + src/configuration/types.ts | 81 + src/messageComposer/messageComposer.ts | 21 +- .../MessageOperationStatePolicy.ts | 85 + src/messageOperations/MessageOperations.ts | 107 + src/messageOperations/index.ts | 10 + src/messageOperations/types.ts | 57 + src/pagination/ItemIndex.ts | 17 +- .../createdAtAroundPaginationFlags.ts | 73 + .../idAroundPaginationFlags.ts | 53 + src/pagination/cursorDerivation/index.ts | 1 + .../cursorDerivation/linearPaginationFlags.ts | 83 + src/pagination/paginators/BasePaginator.ts | 1696 +++++--- src/pagination/paginators/ChannelPaginator.ts | 9 - src/pagination/paginators/MessagePaginator.ts | 520 +++ .../paginators/MessageReplyPaginator.ts | 301 ++ .../paginators/ReminderPaginator.ts | 6 +- src/pagination/paginators/index.ts | 2 + src/pagination/sortCompiler.ts | 47 +- src/reminders/ReminderManager.ts | 4 +- src/thread.ts | 113 +- .../ChannelPaginatorsOrchestrator.test.ts | 36 +- test/unit/EventHandlerPipeline.test.ts | 12 +- .../MessageComposer/messageComposer.test.ts | 59 +- .../MessageOperations.test.ts | 203 + test/unit/pagination/BasePaginator.test.ts | 1544 ------- test/unit/pagination/ItemIndex.test.ts | 175 + .../paginators/BasePaginator.test.ts | 3639 +++++++++++++++++ .../{ => paginators}/ChannelPaginator.test.ts | 24 +- .../paginators/MessagePaginator.test.ts | 493 +++ .../paginators/MessageReplyPaginator.test.ts | 114 + test/unit/pagination/sortCompiler.test.ts | 427 +- 36 files changed, 8081 insertions(+), 2265 deletions(-) create mode 100644 src/configuration/InstanceConfigurationService.ts create mode 100644 src/configuration/index.ts create mode 100644 src/configuration/types.ts create mode 100644 src/messageOperations/MessageOperationStatePolicy.ts create mode 100644 src/messageOperations/MessageOperations.ts create mode 100644 src/messageOperations/index.ts create mode 100644 src/messageOperations/types.ts create mode 100644 src/pagination/cursorDerivation/createdAtAroundPaginationFlags.ts create mode 100644 src/pagination/cursorDerivation/idAroundPaginationFlags.ts create mode 100644 src/pagination/cursorDerivation/index.ts create mode 100644 src/pagination/cursorDerivation/linearPaginationFlags.ts create mode 100644 src/pagination/paginators/MessagePaginator.ts create mode 100644 src/pagination/paginators/MessageReplyPaginator.ts create mode 100644 test/unit/messageOperations/MessageOperations.test.ts delete mode 100644 test/unit/pagination/BasePaginator.test.ts create mode 100644 test/unit/pagination/ItemIndex.test.ts create mode 100644 test/unit/pagination/paginators/BasePaginator.test.ts rename test/unit/pagination/{ => paginators}/ChannelPaginator.test.ts (97%) create mode 100644 test/unit/pagination/paginators/MessagePaginator.test.ts create mode 100644 test/unit/pagination/paginators/MessageReplyPaginator.test.ts diff --git a/src/ChannelPaginatorsOrchestrator.ts b/src/ChannelPaginatorsOrchestrator.ts index bbfc7f5bfe..33458fcb20 100644 --- a/src/ChannelPaginatorsOrchestrator.ts +++ b/src/ChannelPaginatorsOrchestrator.ts @@ -95,7 +95,8 @@ const reEmit: EventHandlerPipelineHandler = ({ if (!channel) return; orchestrator.paginators.forEach((paginator) => { const items = paginator.items; - if (paginator.findItem(channel) && items) { + const { state } = paginator.locateByItem(channel); + if ((state?.currentIndex ?? -1) > -1 && items) { paginator.state.partialNext({ items: [...items] }); } }); diff --git a/src/channel.ts b/src/channel.ts index 76cb7265ea..47856dc1c8 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -1,6 +1,8 @@ import { ChannelState } from './channel_state'; import { MessageComposer } from './messageComposer'; import { MessageReceiptsTracker } from './messageDelivery'; +import { MessagePaginator } from './pagination/paginators'; +import { MessageOperations } from './messageOperations'; import { generateChannelTempCid, logChatPromiseExecution, @@ -72,10 +74,56 @@ import type { UpdateChannelAPIResponse, UpdateChannelOptions, UpdateLocationPayload, + UpdateMessageOptions, UserResponse, } from './types'; import type { Role } from './permissions'; import type { CustomChannelData } from './custom_types'; +import { StateStore } from './store'; + +// todo: move to dedicated file +export type SendMessageWithStateUpdateParams = { + localMessage: LocalMessage; + message?: Message; + options?: SendMessageOptions; + /** + * Per-call override for the send/retry request (advanced). + * If set, it takes precedence over channel instance configuration handlers. + */ + sendMessageRequestFn?: CustomSendMessageRequestFn; +}; + +export type RetrySendMessageWithLocalUpdateParams = Omit< + SendMessageWithStateUpdateParams, + 'message' +>; + +export type UpdateMessageWithStateUpdateParams = { + localMessage: LocalMessage; + options?: UpdateMessageOptions; + /** + * Per-call override for the update request (advanced). + * If set, it takes precedence over channel instance configuration handlers. + */ + updateMessageRequestFn?: CustomUpdateMessageRequestFn; +}; + +// Custom request function types for configuration +export type CustomSendMessageRequestFn = ( + params: Omit, +) => Promise<{ message: MessageResponse }>; + +export type CustomUpdateMessageRequestFn = ( + params: Omit, +) => Promise<{ message: MessageResponse }>; + +export type ChannelInstanceConfig = { + requestHandlers?: { + sendMessageRequest?: CustomSendMessageRequestFn; + retrySendMessageRequest?: CustomSendMessageRequestFn; + updateMessageRequest?: CustomUpdateMessageRequestFn; + }; +}; /** * Channel - The Channel class manages it's own state. @@ -110,8 +158,11 @@ export class Channel { isTyping: boolean; disconnected: boolean; push_preferences?: PushPreference; + public readonly configState = new StateStore({}); public readonly messageComposer: MessageComposer; public readonly messageReceiptsTracker: MessageReceiptsTracker; + public readonly messagePaginator: MessagePaginator; + public readonly messageOperations: MessageOperations; /** * constructor - Create a channel @@ -167,6 +218,54 @@ export class Channel { return msg && { timestampMs, msgId: msg.id }; }, }); + + this.messagePaginator = new MessagePaginator({ channel: this }); + + this.messageOperations = new MessageOperations({ + ingest: (m) => this.messagePaginator.ingestItem(m), + get: (id) => this.messagePaginator.getItem(id), + handlers: () => { + const { requestHandlers } = this.configState.getLatestValue(); + const sendMessageRequest = requestHandlers?.sendMessageRequest; + const retrySendMessageRequest = requestHandlers?.retrySendMessageRequest; + const updateMessageRequest = requestHandlers?.updateMessageRequest; + return { + send: sendMessageRequest + ? (p) => + sendMessageRequest({ + localMessage: p.localMessage, + message: p.message, + options: p.options, + }) + : undefined, + retry: retrySendMessageRequest + ? (p) => + retrySendMessageRequest({ + localMessage: p.localMessage, + message: p.message, + options: p.options, + }) + : undefined, + update: updateMessageRequest + ? (p) => + updateMessageRequest({ + localMessage: p.localMessage, + options: p.options, + }) + : undefined, + }; + }, + defaults: { + send: async (m, o) => { + const result = await this.sendMessage(m, o); + return { message: result.message }; + }, + update: async (m, o) => { + const result = await this.getClient().updateMessage(m, undefined, o); + return { message: result.message }; + }, + }, + }); } /** @@ -240,6 +339,51 @@ export class Channel { return await this._sendMessage(message, options); } + /** + * Sends a message with optimistic local state update. + */ + async sendMessageWithLocalUpdate( + params: SendMessageWithStateUpdateParams, + ): Promise { + await this.messageOperations.send( + { + localMessage: params.localMessage, + message: params.message, + options: params.options, + }, + params.sendMessageRequestFn, + ); + if (this.messageComposer.config.text.publishTypingEvents) await this.stopTyping(); + } + + /** + * Retry sending a failed message. + */ + async retrySendMessageWithLocalUpdate( + params: Omit, + ) { + await this.messageOperations.retry( + { + localMessage: { ...params.localMessage, type: 'regular' }, + options: params.options, + }, + params.sendMessageRequestFn, + ); + } + + /** + * Updates a message with optimistic local state update. + */ + async updateMessageWithLocalUpdate(params: UpdateMessageWithStateUpdateParams) { + await this.messageOperations.update( + { + localMessage: params.localMessage, + options: params.options, + }, + params.updateMessageRequestFn, + ); + } + sendFile( uri: string | NodeJS.ReadableStream | Buffer | File, name?: string, @@ -1399,7 +1543,7 @@ export class Channel { if (message.user?.id && this.getClient().userMuteStatus(message.user.id)) return false; - // Return false if channel doesn't allow read events. + // Return false if channel doesn't allow ad events. if ( Array.isArray(this.data?.own_capabilities) && !this.data?.own_capabilities.includes('read-events') @@ -1472,18 +1616,7 @@ export class Channel { return await this.query(defaultOptions, 'latest'); }; - /** - * query - Query the API, get messages, members or other channel fields - * - * @param {ChannelQueryOptions} options The query options - * @param {MessageSetType} messageSetToAddToIfDoesNotExist It's possible to load disjunct sets of a channel's messages into state, use `current` to load the initial channel state or if you want to extend the currently displayed messages, use `latest` if you want to load/extend the latest messages, `new` is used for loading a specific message and it's surroundings - * - * @return {Promise} Returns a query response - */ - async query( - options: ChannelQueryOptions = {}, - messageSetToAddToIfDoesNotExist: MessageSetType = 'current', - ) { + async _query(options: ChannelQueryOptions = {}) { // Make sure we wait for the connect promise if there is a pending one await this.getClient().wsPromise; @@ -1507,15 +1640,26 @@ export class Channel { queryURL += `/${encodeURIComponent(this.id)}`; } - const state = await this.getClient().post( - queryURL + '/query', - { - data: this._data, - state: true, - ...options, - }, - ); + return await this.getClient().post(queryURL + '/query', { + data: this._data, + state: true, + ...options, + }); + } + /** + * query - Query the API, get messages, members or other channel fields + * + * @param {ChannelQueryOptions} options The query options + * @param {MessageSetType} messageSetToAddToIfDoesNotExist It's possible to load disjunct sets of a channel's messages into state, use `current` to load the initial channel state or if you want to extend the currently displayed messages, use `latest` if you want to load/extend the latest messages, `new` is used for loading a specific message and it's surroundings + * + * @return {Promise} Returns a query response + */ + async query( + options: ChannelQueryOptions = {}, + messageSetToAddToIfDoesNotExist: MessageSetType = 'current', + ) { + const state = await this._query(options); // update the channel id if it was missing if (!this.id) { this.id = state.channel.id; @@ -2052,6 +2196,9 @@ export class Channel { if (this._countMessageAsUnread(event.message)) { channelState.unreadCount = channelState.unreadCount + 1; + this.messagePaginator.setUnreadSnapshot({ + unreadCount: channelState.unreadCount, + }); } client.syncDeliveredCandidates([this]); @@ -2101,6 +2248,8 @@ export class Channel { } } + this.messagePaginator.clearUnreadSnapshot(); + break; case 'member.added': case 'member.updated': { @@ -2168,6 +2317,13 @@ export class Channel { user: event.user, unread_messages: unreadCount, }; + this.messagePaginator.setUnreadSnapshot({ + firstUnreadMessageId: + channelState.read[event.user.id].first_unread_message_id ?? null, + lastReadAt: channelState.read[event.user.id].last_read, + lastReadMessageId: channelState.read[event.user.id].last_read_message_id, + unreadCount, + }); channelState.unreadCount = unreadCount; this.messageReceiptsTracker.onNotificationMarkUnread({ diff --git a/src/client.ts b/src/client.ts index c2cbe87036..150a73dad3 100644 --- a/src/client.ts +++ b/src/client.ts @@ -19,6 +19,7 @@ import { addFileToFormData, axiosParamsSerializer, chatCodes, + formatMessage, generateChannelTempCid, isFunction, isOnline, @@ -244,34 +245,17 @@ import { ChannelManager } from './channel_manager'; import { MessageDeliveryReporter } from './messageDelivery'; import { NotificationManager } from './notifications'; import { ReminderManager } from './reminders'; -import { StateStore } from './store'; -import type { MessageComposer } from './messageComposer'; import type { AbstractOfflineDB } from './offline-support'; +import type { + MessageComposerSetupState, + SetInstanceConfigurationFunctions, +} from './configuration'; +import { InstanceConfigurationService } from './configuration/InstanceConfigurationService'; function isString(x: unknown): x is string { return typeof x === 'string' || x instanceof String; } -type MessageComposerTearDownFunction = () => void; - -type MessageComposerSetupFunction = ({ - composer, -}: { - composer: MessageComposer; -}) => void | MessageComposerTearDownFunction; - -export type MessageComposerSetupState = { - /** - * Each `MessageComposer` runs this function each time its signature changes or - * whenever you run `MessageComposer.registerSubscriptions`. Function returned - * from `applyModifications` will be used as a cleanup function - it will be stored - * and ran before new modification is applied. Cleaning up only the - * modified parts is the general way to go but if your setup gets a bit - * complicated, feel free to restore the whole composer with `MessageComposer.restore`. - */ - setupFunction: MessageComposerSetupFunction | null; -}; - export class StreamChat { private static _instance?: unknown | StreamChat; // type is undefined|StreamChat, unknown is due to TS limitations with statics messageDeliveryReporter: MessageDeliveryReporter; @@ -329,12 +313,7 @@ export class StreamChat { sdkIdentifier?: SdkIdentifier; deviceIdentifier?: DeviceIdentifier; private nextRequestAbortController: AbortController | null = null; - /** - * @private - */ - _messageComposerSetupState = new StateStore({ - setupFunction: null, - }); + instanceConfigurationService = new InstanceConfigurationService(); /** * Initialize a client @@ -581,7 +560,15 @@ export class StreamChat { public setMessageComposerSetupFunction = ( setupFunction: MessageComposerSetupState['setupFunction'], ) => { - this._messageComposerSetupState.partialNext({ setupFunction }); + this.instanceConfigurationService.setSetupFunctions({ + MessageComposer: setupFunction, + }); + }; + + public setInstanceConfigurationFunction = ( + setupFunctions: SetInstanceConfigurationFunctions, + ) => { + this.instanceConfigurationService.setSetupFunctions(setupFunctions); }; /** @@ -2008,7 +1995,19 @@ export class StreamChat { this.polls.hydratePollCache(channelState.messages, true); this.reminders.hydrateState(channelState.messages); } - + const requestedPageSize = + queryChannelsOptions?.message_limit ?? + DEFAULT_QUERY_CHANNELS_MESSAGE_LIST_PAGE_SIZE; + c.messagePaginator.postQueryReconcile({ + direction: 'tailward', + isFirstPage: true, + queryShape: { limit: requestedPageSize }, + requestedPageSize, + results: { + items: channelState.messages.map(formatMessage), + tailward: channelState.messages[0]?.id, + }, + }); c.messageComposer.initStateFromChannelResponse(channelState); channels.push(c); diff --git a/src/configuration/InstanceConfigurationService.ts b/src/configuration/InstanceConfigurationService.ts new file mode 100644 index 0000000000..c602ea4bb3 --- /dev/null +++ b/src/configuration/InstanceConfigurationService.ts @@ -0,0 +1,73 @@ +/** + * InstanceConfigurationService is a singleton class that is used to store the configuration for the instances of classes exposed by the SKD such as: + * - StreamChat + * - Channel + * - Thread + * - MessageComposer + * + * Every existing and future instance configuration of the above classes will be setup using the following pattern: + * - StreamChat: StreamChat.setClientSetupFunction(setupFunction) + * - Channel: StreamChat.setChannelSetupFunction(setupFunction) + * - Thread: StreamChat.setThreadSetupFunction(setupFunction) + * - MessageComposer: StreamChat.setMessageComposerSetupFunction(setupFunction) + * + * The setupFunction is a function that is used to set up the instance configuration. + */ + +import { StateStore } from '../store'; +import type { + ChannelSetupState, + MessageComposerSetupState, + SetInstanceConfigurationFunctions, + SetInstanceConfigurationServiceStates, + StreamChatSetupState, + ThreadSetupState, +} from './types'; + +type InstanceKey = keyof SetInstanceConfigurationServiceStates; + +export class InstanceConfigurationService { + private static instance: InstanceConfigurationService; + private setupStates: SetInstanceConfigurationServiceStates = { + Channel: new StateStore({ + setupFunction: null, + }), + MessageComposer: new StateStore({ + setupFunction: null, + }), + StreamChat: new StateStore({ + setupFunction: null, + }), + Thread: new StateStore({ + setupFunction: null, + }), + }; + + setSetupFunctions(setupFunctions: SetInstanceConfigurationFunctions) { + for (const [instance, setupFunction] of Object.entries(setupFunctions)) { + const setupState = + this.setupStates[instance as keyof SetInstanceConfigurationServiceStates]; + if (typeof setupState === 'undefined') return; // null is allowed + // todo: fix typing + (setupState as StateStore<{ setupFunction: unknown }>).partialNext({ + setupFunction: setupFunction as SetInstanceConfigurationFunctions[InstanceKey], + }); + } + } + + get Channel() { + return this.setupStates.Channel; + } + + get MessageComposer() { + return this.setupStates.MessageComposer; + } + + get StreamChat() { + return this.setupStates.StreamChat; + } + + get Thread() { + return this.setupStates.Thread; + } +} diff --git a/src/configuration/index.ts b/src/configuration/index.ts new file mode 100644 index 0000000000..fcb073fefc --- /dev/null +++ b/src/configuration/index.ts @@ -0,0 +1 @@ +export * from './types'; diff --git a/src/configuration/types.ts b/src/configuration/types.ts new file mode 100644 index 0000000000..1157e40709 --- /dev/null +++ b/src/configuration/types.ts @@ -0,0 +1,81 @@ +import type { StreamChat } from '../client'; +import type { MessageComposer } from '../messageComposer'; +import type { Channel } from '../channel'; +import type { Thread } from '../thread'; +import type { StateStore } from '../store'; + +export type MessageComposerTearDownFunction = () => void; + +export type MessageComposerSetupFunction = ({ + composer, +}: { + composer: MessageComposer; +}) => void | MessageComposerTearDownFunction; + +export type MessageComposerSetupState = { + /** + * Each `MessageComposer` runs this function each time its signature changes or + * whenever you run `MessageComposer.registerSubscriptions`. Function returned + * from `applyModifications` will be used as a cleanup function - it will be stored + * and ran before new modification is applied. Cleaning up only the + * modified parts is the general way to go but if your setup gets a bit + * complicated, feel free to restore the whole composer with `MessageComposer.restore`. + */ + setupFunction: MessageComposerSetupFunction | null; +}; + +export type StreamChatTearDownFunction = () => void; + +export type StreamChatSetupFunction = ({ + client, +}: { + client: StreamChat; +}) => void | StreamChatTearDownFunction; + +export type StreamChatSetupState = { + setupFunction: StreamChatSetupFunction | null; +}; + +export type ChannelTearDownFunction = () => void; + +export type ChannelSetupFunction = ({ + channel, +}: { + channel: Channel; +}) => void | ChannelTearDownFunction; + +export type ChannelSetupState = { + setupFunction: ChannelSetupFunction | null; +}; + +export type ThreadTearDownFunction = () => void; + +export type ThreadSetupFunction = ({ + thread, +}: { + thread: Thread; +}) => void | ThreadTearDownFunction; + +export type ThreadSetupState = { + setupFunction: ThreadSetupFunction | null; +}; + +export type SetInstanceConfigurationServiceStates = { + Channel: StateStore; + MessageComposer: StateStore; + StreamChat: StateStore; + Thread: StateStore; +}; + +export type SetupFnOf = + T extends StateStore + ? S extends { setupFunction?: infer F } + ? F + : never + : never; + +export type SetInstanceConfigurationFunctions = { + [K in keyof SetInstanceConfigurationServiceStates]?: SetupFnOf< + SetInstanceConfigurationServiceStates[K] + >; +}; diff --git a/src/messageComposer/messageComposer.ts b/src/messageComposer/messageComposer.ts index 08fd1b6890..5aaf56c648 100644 --- a/src/messageComposer/messageComposer.ts +++ b/src/messageComposer/messageComposer.ts @@ -41,6 +41,7 @@ export type EditingAuditState = { }; export type LocalMessageWithLegacyThreadId = LocalMessage & { legacyThreadId?: string }; +// todo: remove LocalMessageWithLegacyThreadId export type CompositionContext = Channel | Thread | LocalMessageWithLegacyThreadId; export type MessageComposerState = { @@ -475,15 +476,16 @@ export class MessageComposer extends WithSubscriptions { private subscribeMessageComposerSetupStateChange = () => { let tearDown: (() => void) | null = null; - const unsubscribe = this.client._messageComposerSetupState.subscribeWithSelector( - ({ setupFunction: setup }) => ({ - setup, - }), - ({ setup }) => { - tearDown?.(); - tearDown = setup?.({ composer: this }) ?? null; - }, - ); + const unsubscribe = + this.client.instanceConfigurationService.MessageComposer.subscribeWithSelector( + ({ setupFunction: setup }) => ({ + setup, + }), + ({ setup }) => { + tearDown?.(); + tearDown = setup?.({ composer: this }) ?? null; + }, + ); return () => { tearDown?.(); @@ -694,6 +696,7 @@ export class MessageComposer extends WithSubscriptions { }, localMessage: { attachments: [], + cid: this.channel.cid, // it is needed to match local paginator filters to be ingested into its state created_at, // only assigned to localMessage as this is used for optimistic update deleted_at: null, error: undefined, diff --git a/src/messageOperations/MessageOperationStatePolicy.ts b/src/messageOperations/MessageOperationStatePolicy.ts new file mode 100644 index 0000000000..82cc253356 --- /dev/null +++ b/src/messageOperations/MessageOperationStatePolicy.ts @@ -0,0 +1,85 @@ +import type { + APIErrorResponse, + ErrorFromResponse, + LocalMessage, + MessageResponse, +} from '../types'; +import { formatMessage } from '../utils'; + +export type MessageOperationStatePolicyContext = { + ingest: (m: LocalMessage) => void; + get: (id: string) => LocalMessage | undefined; +}; + +const parseError = (error: unknown): ErrorFromResponse => { + const stringError = JSON.stringify(error); + return ( + stringError ? JSON.parse(stringError) : {} + ) as ErrorFromResponse; +}; + +const isAlreadyExistsError = ( + error: unknown, + parsed: ErrorFromResponse, +) => + parsed.code === 4 && error instanceof Error && error.message.includes('already exists'); + +export class MessageOperationStatePolicy { + private ctx: MessageOperationStatePolicyContext; + + constructor(ctx: MessageOperationStatePolicyContext) { + this.ctx = ctx; + } + + optimistic(localMessage: LocalMessage) { + this.ctx.ingest({ + ...localMessage, + error: undefined, + status: + !localMessage.status || localMessage.status === 'failed' + ? 'sending' + : localMessage.status, + }); + } + + success({ + messageFromResponse, + messageId, + }: { + messageFromResponse: MessageResponse; + messageId: string; + }) { + const formatted = formatMessage({ ...messageFromResponse, status: 'received' }); + const existing = this.ctx.get(messageId); + + if ( + !existing || + existing.updated_at.getTime() < formatted.updated_at.getTime() || + existing.status === 'sending' + ) { + this.ctx.ingest(formatted); + } + } + + failure({ + error, + localMessage, + messageId, + }: { + error: unknown; + localMessage: LocalMessage; + messageId: string; + }) { + const parsed = parseError(error); + + if (isAlreadyExistsError(error, parsed)) { + const existing = this.ctx.get(messageId); + if (existing?.status === 'sending') { + this.ctx.ingest({ ...localMessage, status: 'received' }); + } + return; + } + + this.ctx.ingest({ ...localMessage, status: 'failed', error: parsed }); + } +} diff --git a/src/messageOperations/MessageOperations.ts b/src/messageOperations/MessageOperations.ts new file mode 100644 index 0000000000..b2833cb2b9 --- /dev/null +++ b/src/messageOperations/MessageOperations.ts @@ -0,0 +1,107 @@ +// todo: add tests +import type { Message, UpdateMessageOptions } from '../types'; +import { localMessageToNewMessagePayload } from '../utils'; +import { MessageOperationStatePolicy } from './MessageOperationStatePolicy'; +import type { + MessageOperationsContext, + OperationKind, + OperationParams, + OperationRequestFn, +} from './types'; + +export class MessageOperations { + private ctx: MessageOperationsContext; + private policy: MessageOperationStatePolicy; + + constructor(ctx: MessageOperationsContext) { + this.ctx = ctx; + this.policy = new MessageOperationStatePolicy({ ingest: ctx.ingest, get: ctx.get }); + } + + private normalizeMessage(message: Message): Message { + return this.ctx.normalizeOutgoingMessage + ? this.ctx.normalizeOutgoingMessage(message) + : message; + } + + private async run( + params: OperationParams, + doRequest: OperationRequestFn, + ): Promise { + const messageId = params.localMessage.id; + + this.policy.optimistic(params.localMessage); + + try { + const { message: messageFromResponse } = await doRequest(params); + this.policy.success({ messageFromResponse, messageId }); + } catch (e) { + this.policy.failure({ error: e, localMessage: params.localMessage, messageId }); + throw e; + } + } + + async send( + params: OperationParams<'send'>, + requestFn?: OperationRequestFn<'send'>, + ): Promise { + const handlers = this.ctx.handlers(); + const messageToSend = this.normalizeMessage( + params.message ?? localMessageToNewMessagePayload(params.localMessage), + ); + + return await this.run<'send'>( + { ...params, message: messageToSend }, + requestFn ?? + handlers.send ?? + (async (p) => + await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), + ); + } + + async retry( + params: OperationParams<'retry'>, + requestFn?: OperationRequestFn<'retry'>, + ): Promise { + const handlers = this.ctx.handlers(); + const messageToSend = this.normalizeMessage( + params.message ?? localMessageToNewMessagePayload(params.localMessage), + ); + + const send = handlers.send; + const sendAsRetry: OperationRequestFn<'retry'> | undefined = send + ? (p) => send({ ...p } as OperationParams<'send'>) + : undefined; + + return await this.run<'retry'>( + { ...params, message: messageToSend }, + requestFn ?? + handlers.retry ?? + sendAsRetry ?? + (async (p) => + await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), + ); + } + + async update( + params: OperationParams<'update'>, + requestFn?: OperationRequestFn<'update'>, + ): Promise { + const handlers = this.ctx.handlers(); + let updateOptions: UpdateMessageOptions | undefined; + if (params.options) { + updateOptions = {}; + if (typeof params.options.skip_enrich_url === 'boolean') + updateOptions.skip_enrich_url = params.options.skip_enrich_url; + if (typeof params.options.skip_push === 'boolean') + updateOptions.skip_push = params.options.skip_push; + } + + return await this.run<'update'>( + params, + requestFn ?? + handlers.update ?? + (async (p) => await this.ctx.defaults.update(p.localMessage, updateOptions)), + ); + } +} diff --git a/src/messageOperations/index.ts b/src/messageOperations/index.ts new file mode 100644 index 0000000000..c97374605c --- /dev/null +++ b/src/messageOperations/index.ts @@ -0,0 +1,10 @@ +export { MessageOperations } from './MessageOperations'; +export { MessageOperationStatePolicy } from './MessageOperationStatePolicy'; +export type { + MessageOperationsContext, + MessageOperationsHandlers, + OperationKind, + OperationParams, + OperationRequestFn, + OperationResponse, +} from './types'; diff --git a/src/messageOperations/types.ts b/src/messageOperations/types.ts new file mode 100644 index 0000000000..1403646f40 --- /dev/null +++ b/src/messageOperations/types.ts @@ -0,0 +1,57 @@ +import type { + LocalMessage, + Message, + MessageResponse, + SendMessageAPIResponse, + SendMessageOptions, + UpdateMessageAPIResponse, + UpdateMessageOptions, +} from '../types'; + +export type OperationKind = 'send' | 'retry' | 'update'; + +export type MessageOperationSpec = { + send: { + options: SendMessageOptions; + requestResult: SendMessageAPIResponse; + }; + retry: { + options: SendMessageOptions; + requestResult: SendMessageAPIResponse; + }; + update: { + options: UpdateMessageOptions; + requestResult: UpdateMessageAPIResponse; + }; +}; + +export type OperationParams = { + localMessage: LocalMessage; + options?: MessageOperationSpec[K]['options']; +} & (K extends 'update' ? {} : { message?: Message }); + +export type OperationResponse = { message: MessageResponse }; + +export type OperationRequestFn = ( + params: OperationParams, +) => Promise; + +export type MessageOperationsHandlers = { + send?: OperationRequestFn<'send'>; + retry?: OperationRequestFn<'retry'>; + update?: OperationRequestFn<'update'>; +}; + +export type MessageOperationsContext = { + ingest: (m: LocalMessage) => void; + get: (id: string) => LocalMessage | undefined; + + normalizeOutgoingMessage?: (m: Message) => Message; + + defaults: { + send: (m: Message, o?: SendMessageOptions) => Promise; + update: (m: LocalMessage, o?: UpdateMessageOptions) => Promise; + }; + + handlers: () => MessageOperationsHandlers; +}; diff --git a/src/pagination/ItemIndex.ts b/src/pagination/ItemIndex.ts index 18310c9e4a..fe9d2df266 100644 --- a/src/pagination/ItemIndex.ts +++ b/src/pagination/ItemIndex.ts @@ -1,3 +1,7 @@ +export type ItemIndexOptions = { + getId: (item: T) => string; +}; + /** * The ItemIndex is a canonical, ID-addressable storage layer for domain items. * @@ -66,8 +70,11 @@ */ export class ItemIndex { private byId = new Map(); + private readonly getId: (item: T) => string; - constructor(private getId: (item: T) => string) {} + constructor(options: ItemIndexOptions) { + this.getId = options.getId; + } setMany(items: T[]) { for (const item of items) { @@ -91,7 +98,15 @@ export class ItemIndex { this.byId.delete(id); } + clear() { + this.byId.clear(); + } + entries() { return [...this.byId.entries()]; } + + values() { + return [...this.byId.values()]; + } } diff --git a/src/pagination/cursorDerivation/createdAtAroundPaginationFlags.ts b/src/pagination/cursorDerivation/createdAtAroundPaginationFlags.ts new file mode 100644 index 0000000000..f581e807f2 --- /dev/null +++ b/src/pagination/cursorDerivation/createdAtAroundPaginationFlags.ts @@ -0,0 +1,73 @@ +import { binarySearch } from '../sortCompiler'; +import type { BasePaginator, CursorDeriveContext, PaginationFlags } from '../paginators'; +import { ComparisonResult } from '../types.normalization'; + +export const deriveCreatedAtAroundPaginationFlags = < + T extends { id: string; created_at: Date }, + Q extends { created_at_around?: Date | string }, + P extends BasePaginator, +>({ + hasMoreHead, + hasMoreTail, + interval, + page, + paginator, + queryShape, + requestedPageSize, +}: CursorDeriveContext & { paginator: P }): PaginationFlags => { + let flags: PaginationFlags = { hasMoreHead, hasMoreTail }; + if (!queryShape?.created_at_around) return flags; + const createdAtAroundDate = new Date(queryShape.created_at_around); + const [firstPageItem, lastPageItem] = [page[0], page.slice(-1)[0]]; + + // expect ASC order (from oldest to newest) + const isAboveHeadBound = + paginator.sortComparator({ created_at: createdAtAroundDate } as T, lastPageItem) === + ComparisonResult.A_PRECEDES_B; + const isBelowTailBound = + paginator.sortComparator(firstPageItem, { created_at: createdAtAroundDate } as T) === + ComparisonResult.A_PRECEDES_B; + + const requestedPageSizeNotMet = + requestedPageSize > interval.itemIds.length && requestedPageSize > page.length; + const noMoreMessages = + (requestedPageSize > interval.itemIds.length || + interval.itemIds.length >= page.length) && + requestedPageSize > page.length; + + if (isAboveHeadBound) { + flags.hasMoreHead = false; + if (requestedPageSizeNotMet) { + flags.hasMoreTail = false; + } + } else if (isBelowTailBound) { + flags.hasMoreTail = false; + if (requestedPageSizeNotMet) { + flags.hasMoreHead = false; + } + } else if (noMoreMessages) { + flags = { hasMoreHead: false, hasMoreTail: false }; + } else { + const [firstPageMsgIsFirstInSet, lastPageMsgIsLastInSet] = [ + firstPageItem?.id && firstPageItem.id === interval.itemIds[0], + lastPageItem?.id && lastPageItem.id === interval.itemIds.slice(-1)[0], + ]; + + const midPointByCount = Math.floor(page.length / 2); + const { insertionIndex } = binarySearch({ + needle: { created_at: createdAtAroundDate } as T, + length: page.length, + getItemAt: (index) => page[index], + compare: (a, b) => a.created_at?.getTime() - b.created_at.getTime(), + itemIdentityEquals: (a, b) => a.created_at?.getTime() === b.created_at?.getTime(), + plateauScan: false, + }); + + if (insertionIndex !== -1) { + if (firstPageMsgIsFirstInSet) flags.hasMoreTail = midPointByCount <= insertionIndex; + if (lastPageMsgIsLastInSet) flags.hasMoreHead = midPointByCount >= insertionIndex; + } + } + + return flags; +}; diff --git a/src/pagination/cursorDerivation/idAroundPaginationFlags.ts b/src/pagination/cursorDerivation/idAroundPaginationFlags.ts new file mode 100644 index 0000000000..55c8a08747 --- /dev/null +++ b/src/pagination/cursorDerivation/idAroundPaginationFlags.ts @@ -0,0 +1,53 @@ +import type { CursorDeriveContext, PaginationFlags } from '../paginators'; + +export const deriveIdAroundPaginationFlags = < + T extends { id: string }, + Q extends { id_around?: string }, +>({ + hasMoreHead, + hasMoreTail, + interval, + page, + queryShape, + requestedPageSize, +}: CursorDeriveContext): PaginationFlags => { + let flags: PaginationFlags = { hasMoreHead, hasMoreTail }; + if (!queryShape?.id_around) return flags; + const { id_around } = queryShape; + + const [firstPageMsg, lastPageMsg] = [page[0], page.slice(-1)[0]]; + const [firstPageMsgIsFirstInSet, lastPageMsgIsLastInSet] = [ + firstPageMsg?.id === interval.itemIds[0], + lastPageMsg?.id === interval.itemIds.slice(-1)[0], + ]; + + const midPoint = Math.floor(page.length / 2); + const noMoreMessages = + (requestedPageSize > interval.itemIds.length || + interval.itemIds.length >= page.length) && + requestedPageSize > page.length; + + if (noMoreMessages) { + flags = { hasMoreHead: false, hasMoreTail: false }; + } else if (!page[midPoint]) { + return flags; + } else if (page[midPoint].id === id_around) { + flags = { hasMoreHead: true, hasMoreTail: true }; + } else { + const halves = [page.slice(0, midPoint), page.slice(midPoint)]; + if (firstPageMsgIsFirstInSet) { + const targetMsg = halves[0].find((message) => message.id === id_around); + if (targetMsg) { + flags.hasMoreTail = false; + } + } + if (lastPageMsgIsLastInSet) { + const targetMsg = halves[1].find((message) => message.id === id_around); + if (targetMsg) { + flags.hasMoreHead = false; + } + } + } + + return flags; +}; diff --git a/src/pagination/cursorDerivation/index.ts b/src/pagination/cursorDerivation/index.ts new file mode 100644 index 0000000000..26c4176739 --- /dev/null +++ b/src/pagination/cursorDerivation/index.ts @@ -0,0 +1 @@ +export * from './createdAtAroundPaginationFlags'; diff --git a/src/pagination/cursorDerivation/linearPaginationFlags.ts b/src/pagination/cursorDerivation/linearPaginationFlags.ts new file mode 100644 index 0000000000..2f2c337b15 --- /dev/null +++ b/src/pagination/cursorDerivation/linearPaginationFlags.ts @@ -0,0 +1,83 @@ +import type { CursorDeriveContext, PaginationFlags } from '../paginators'; +import type { MessagePaginationOptions, PaginationOptions } from '../../types'; + +const TAILWARD_QUERY_PROPERTIES: Array = [ + 'created_at_before_or_equal', + 'created_at_before', + 'id_lt', + 'id_lte', + 'offset', +]; + +const HEADWARD_QUERY_PROPERTIES: Array = [ + 'created_at_after_or_equal', + 'created_at_after', + 'id_gt', + 'id_gte', +]; +export const deriveLinearPaginationFlags = < + T extends { id: string; created_at: Date }, + Q extends PaginationOptions, +>({ + direction, + hasMoreHead, + hasMoreTail, + interval, + page, + queryShape, + requestedPageSize, +}: CursorDeriveContext): PaginationFlags => { + const flags: PaginationFlags = { hasMoreHead, hasMoreTail }; + const [firstPageMsg, lastPageMsg] = [page[0], page.slice(-1)[0]]; + const [firstPageMsgIsFirstInSet, lastPageMsgIsLastInSet] = [ + firstPageMsg?.id && firstPageMsg.id === interval.itemIds[0], + lastPageMsg?.id && lastPageMsg.id === interval.itemIds.slice(-1)[0], + ]; + + const containsCursorPaginationProperties = + !!queryShape && + HEADWARD_QUERY_PROPERTIES.concat(TAILWARD_QUERY_PROPERTIES).some( + (p) => typeof queryShape[p] !== 'undefined', + ); + + const queriedMessagesTowardsHead = + direction === 'headward' || + (!!queryShape && + HEADWARD_QUERY_PROPERTIES.some((p) => typeof queryShape[p] !== 'undefined')); + + const queriedMessagesTowardsTail = + direction === 'tailward' || + typeof queryShape === 'undefined' || + TAILWARD_QUERY_PROPERTIES.some((p) => typeof queryShape[p] !== 'undefined'); + + const containsNonLinearPaginationProperties = + !!(queryShape as MessagePaginationOptions)?.id_around || + !!(queryShape as MessagePaginationOptions)?.created_at_around; + + const containsUnrecognizedOptionsOnly = + !queriedMessagesTowardsHead && + !queriedMessagesTowardsTail && + !containsNonLinearPaginationProperties; + + const isFirstPage = !containsCursorPaginationProperties; + + const hasMore = page.length >= requestedPageSize; + + if ( + typeof queriedMessagesTowardsTail !== 'undefined' || + containsUnrecognizedOptionsOnly + ) { + hasMoreTail = !hasMoreTail ? false : hasMore; + } + if (typeof queriedMessagesTowardsHead !== 'undefined') { + hasMoreHead = !hasMoreHead || isFirstPage ? false : hasMore; + } + const pageIsEmpty = page.length === 0; + + if ((firstPageMsgIsFirstInSet || pageIsEmpty) && typeof hasMoreTail !== 'undefined') + flags.hasMoreTail = hasMoreTail; + if ((lastPageMsgIsLastInSet || pageIsEmpty) && typeof hasMoreHead !== 'undefined') + flags.hasMoreHead = hasMoreHead; + + return flags; +}; diff --git a/src/pagination/paginators/BasePaginator.ts b/src/pagination/paginators/BasePaginator.ts index 43fdfd2b10..8758226bef 100644 --- a/src/pagination/paginators/BasePaginator.ts +++ b/src/pagination/paginators/BasePaginator.ts @@ -2,203 +2,78 @@ import type { ItemLocation } from '../sortCompiler'; import { binarySearch } from '../sortCompiler'; import { itemMatchesFilter } from '../filterCompiler'; import { isPatch, StateStore, type ValueOrPatch } from '../../store'; -import { - debounce, - type DebouncedFunc, - generateUUIDv4, - normalizeQuerySort, - sleep, -} from '../../utils'; +import { debounce, type DebouncedFunc, generateUUIDv4, sleep } from '../../utils'; import type { FieldToDataResolver } from '../types.normalization'; import { ComparisonResult } from '../types.normalization'; -import type { ItemIndex } from '../ItemIndex'; +import { ItemIndex } from '../ItemIndex'; import { isEqual } from '../../utils/mergeWith/mergeWithCore'; import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../constants'; -import type { AscDesc } from '../..'; -import { - normalizeStringAccentInsensitive, - toEpochMillis, - toNumberLike, -} from '../utility.normalization'; const noOrderChange = () => 0; -const LIVE_HEAD_INTERVAL_ID = '__live_head__'; -const LIVE_TAIL_INTERVAL_ID = '__live_tail__'; -const MISSING_LOW = Number.NEGATIVE_INFINITY; // "smaller than anything" -const MISSING_HIGH = Number.POSITIVE_INFINITY; // "bigger than anything" +export const LOGICAL_HEAD_INTERVAL_ID = '__logical_head__'; +export const LOGICAL_TAIL_INTERVAL_ID = '__logical_tail__'; -type SortKeyScalar = number | string | null; - -/** - * Normalize a raw field value into a comparable scalar. - * - * Rules: - * - Date / ISO / epoch-like → epoch millis (number) - * - numeric-like string → number - * - boolean → boolean (or 0/1, see below) - * - string → normalized string (case/accent insensitive) - * - everything else → stringified fallback - */ -function normalizeForSort(x: unknown): SortKeyScalar { - // 1) Date-like - const d = toEpochMillis(x); - if (d !== null) return d; - - // 2) numeric-like - const n = toNumberLike(x); - if (n !== null) return n; - - // 3) boolean - if (typeof x === 'boolean') return x ? 1 : 0; - - // 4) string (accent-insensitive) - if (typeof x === 'string') { - return normalizeStringAccentInsensitive(x); - } - - // 5) fallback - return x == null ? null : String(x); -} - -/** - * Sortable value that represents the item according to the paginator’s comparator. - * A comparable key that lets you determine: - * “Does this item fall inside the sort boundaries of any given interval?” - */ -export type SortKey = number[]; - -// Encodes a string into a numeric sequence suitable for lexicographic comparison. -// 0 as a terminal sentinel ensures shorter prefix strings sort before longer ones (e.g. "a" before "aa"). -const STRING_SENTINEL_ASC = 0; - -function encodeStringComponents(s: string, direction: 1 | -1): number[] { - // Ascending: [charCode+1, ..., charCode+1, 0] - const base: number[] = []; - for (let i = 0; i < s.length; i++) { - base.push(s.charCodeAt(i) + 1); // > 0 - } - base.push(STRING_SENTINEL_ASC); // 0 < any charCode+1 - - // Descending = element-wise sign flip of the ascending sequence - if (direction === 1) return base; - return base.map((v) => -v); -} - -/** Compare two SortKeys. */ -export function compareSortKeys(a: SortKey, b: SortKey): number { - if (typeof a !== 'object' && typeof b !== 'object') { - return a < b - ? ComparisonResult.A_PRECEDES_B - : a > b - ? ComparisonResult.A_COMES_AFTER_B - : ComparisonResult.A_IS_EQUAL_TO_B; - } - - const arrA = a as (number | string)[]; - const arrB = b as (number | string)[]; - - const len = Math.min(arrA.length, arrB.length); - for (let i = 0; i < len; i++) { - if (arrA[i] < arrB[i]) return ComparisonResult.A_PRECEDES_B; - if (arrA[i] > arrB[i]) return ComparisonResult.A_COMES_AFTER_B; - } - - return arrA.length - arrB.length; -} - -function minSortKey(a: SortKey, b: SortKey): SortKey { - return compareSortKeys(a, b) <= 0 ? a : b; -} - -function maxSortKey(a: SortKey, b: SortKey): SortKey { - return compareSortKeys(a, b) >= 0 ? a : b; -} - -function mergeUniqueStrings(a: string[], b: string[]): string[] { - const set = new Set(a); - for (const id of b) { - if (!set.has(id)) { - set.add(id); - a.push(id); - } - } - return a; -} - -type Sort = Record; -// eslint-disable-next-line @typescript-eslint/no-explicit-any -type PathResolver = (item: T, path: string) => any; +type IntervalSortBounds = { start: T; end: T }; +type IntervalPaginationEdges = { head: T; tail: T }; export type LogicalInterval = { itemIds: string[]; - id: typeof LIVE_HEAD_INTERVAL_ID | typeof LIVE_TAIL_INTERVAL_ID; - /** Key of the first item according to sorting. */ - startKey: SortKey; - /** Key of the last item according to sorting. */ - endKey: SortKey; + id: typeof LOGICAL_HEAD_INTERVAL_ID | typeof LOGICAL_TAIL_INTERVAL_ID; }; export type Interval = { + hasMoreHead: boolean; + hasMoreTail: boolean; itemIds: string[]; id: string; - /** Key of the first item according to sorting. */ - startKey: SortKey; - /** Key of the last item according to sorting. */ - endKey: SortKey; /** * True if this interval represents the global head of the dataset * under the current sortComparator. * * Cursor pagination: - * prev === null + * headward === null * * Offset pagination: * offset === 0 */ - isHead?: boolean; + isHead: boolean; /** * True if this interval represents the global tail of the dataset * under the current sortComparator. * * Cursor pagination: - * next === null + * tailward === null * * Offset pagination: * returnedItems.length < pageSize */ - isTail?: boolean; + isTail: boolean; }; export type AnyInterval = Interval | LogicalInterval; +export type IntervalMergePolicy = 'auto' | 'strict-overlap-only'; + +type ItemIntervalCoordinates = ItemLocation & { + interval: Interval | LogicalInterval; +}; + export type ItemCoordinates = { /** Location inside state.items (visible list) */ state?: ItemLocation; /** Location inside an interval (anchored or logical) */ - interval?: ItemLocation & { - interval: Interval | LogicalInterval; - }; + interval?: ItemIntervalCoordinates; }; -const isLiveHeadInterval = (interval: AnyInterval): interval is LogicalInterval => - interval.id === LIVE_HEAD_INTERVAL_ID; +export const isLiveHeadInterval = (interval: AnyInterval): interval is LogicalInterval => + interval.id === LOGICAL_HEAD_INTERVAL_ID; -const isLiveTailInterval = (interval: AnyInterval): interval is LogicalInterval => - interval.id === LIVE_TAIL_INTERVAL_ID; +export const isLiveTailInterval = (interval: AnyInterval): interval is LogicalInterval => + interval.id === LOGICAL_TAIL_INTERVAL_ID; -/** - * Returns true if intervals A and B overlap. - * - * Overlap condition: - * A.startKey ≤ B.endKey AND B.startKey ≤ A.endKey - */ -function intervalsOverlap(a: Interval, b: Interval): boolean { - return ( - compareSortKeys(a.startKey, b.endKey) <= 0 && - compareSortKeys(b.startKey, a.endKey) <= 0 - ); -} +export const isLogicalInterval = (interval: AnyInterval): interval is LogicalInterval => + isLiveHeadInterval(interval) || isLiveTailInterval(interval); function cloneInterval(interval: Interval): Interval { return { @@ -207,45 +82,6 @@ function cloneInterval(interval: Interval): Interval { }; } -function mergeTwoAnchoredIntervals(preceding: Interval, following: Interval): Interval { - return { - ...preceding, - itemIds: mergeUniqueStrings([...preceding.itemIds], following.itemIds), - startKey: minSortKey(preceding.startKey, following.startKey), - endKey: maxSortKey(preceding.endKey, following.endKey), - isHead: preceding.isHead || following.isHead, - isTail: preceding.isTail || following.isTail, - }; -} - -/** - * Merges anchored intervals. Returns null if there are no intervals to merge. - */ -function mergeAnchoredIntervals(intervals: Interval[]): Interval | null { - if (intervals.length === 0) return null; - - const intervalsCopy = [...intervals]; - intervalsCopy.sort((a, b) => compareSortKeys(a.startKey, b.startKey)); - - let acc = cloneInterval(intervalsCopy[0]); - for (let i = 1; i < intervalsCopy.length; i++) { - const next = intervalsCopy[i]; - acc = mergeTwoAnchoredIntervals(acc, next); - } - - return acc; -} - -/** - * Whether a SortKey belongs to an anchored interval. - */ -function belongsToInterval(itemSortKey: SortKey, interval: Interval): boolean { - return ( - compareSortKeys(itemSortKey, interval.startKey) >= 0 && - compareSortKeys(itemSortKey, interval.endKey) <= 0 - ); -} - export type MakeIntervalParams = { page: T[]; isHead?: boolean; @@ -255,7 +91,17 @@ export type MakeIntervalParams = { export type SetPaginatorItemsParams = { valueOrFactory: ValueOrPatch; cursor?: PaginatorCursor; + /** + * Relevant only is using item interval storage in the paginator. + * Indicates that the page would be the head of pagination intervals array. + * Items falling outside this intervals head bound will be merged into this interval. + */ isFirstPage?: boolean; + /** + * Relevant only is using item interval storage in the paginator. + * Indicates that the page would be the tail of pagination intervals array + * Items falling outside this intervals tail bound will be merged into this interval. + */ isLastPage?: boolean; }; @@ -265,13 +111,70 @@ type MergeIntervalsResult = { logicalTail: LogicalInterval | null; }; -type PaginationDirection = 'next' | 'prev'; -export type PaginatorCursor = { next: string | null; prev: string | null }; +/** + * headward - going from page X -> X-Y -> 0 + * tailward - goring from page 0 -> X -> X + Y ... + * + * Head is the place where new items are added - same as git. + * Tail is the place where retrieved pages are appended. + */ +export type PaginationDirection = 'headward' | 'tailward'; + +export type CursorDeriveContext = { + /** + * Current cursor to be merged with the newly derived cursor. + * Allows to preserve the direction we have not paginated with the given request. + */ + cursor: PaginatorCursor | undefined; + /** + * Direction we just paginated in. + * + * May be undefined for non-directional queries (e.g. jump-to / *_around). + */ + direction: PaginationDirection | undefined; + hasMoreTail: boolean; + hasMoreHead: boolean; + /** The parent interval the page was ingested into (if any) */ + interval: Interval; + /** The page we just received after filtering */ + page: T[]; + /** Last query shape (sometimes useful for bespoke logic) */ + queryShape: Q | undefined; + /** Number we asked for */ + requestedPageSize: number; +}; + +export type PaginationFlags = { + hasMoreHead: boolean; + hasMoreTail: boolean; +}; + +export type CursorDeriveResult = PaginationFlags & { + cursor: PaginatorCursor | undefined; +}; + +export type CursorDerivator = ( + ctx: CursorDeriveContext, +) => CursorDeriveResult; +/** + * string - there is a next page in the given direction + * null - pagination in the given direction has been exhausted + * undefined - no page has been requested in the given pagination direction + */ +export type PaginatorCursor = { + tailward: string | null | undefined; + headward: string | null | undefined; +}; +export const ZERO_PAGE_CURSOR: PaginatorCursor = { + tailward: undefined, + headward: undefined, +}; + type StateResetPolicy = 'auto' | 'yes' | 'no' | (string & {}); export type PaginationQueryShapeChangeIdentifier = ( - prevQueryShape?: S, - nextQueryShape?: S, + toHeadQueryShape?: S, + toTailQueryShape?: S, ) => boolean; export type PaginationQueryParams = { @@ -282,11 +185,32 @@ export type PaginationQueryParams = { reset?: StateResetPolicy; /** Should retry the failed request given number of times. Default is 0. */ retryCount?: number; + /** Determines, whether the page loaded with the query will be committed to the paginator state. Default: true. */ + updateState?: boolean; +}; + +export type PostQueryReconcileParams = Pick< + PaginationQueryParams, + 'direction' | 'queryShape' | 'updateState' +> & { + isFirstPage: boolean; + requestedPageSize: number; + results: PaginationQueryReturnValue | null; +}; + +export type ExecuteQueryReturnValue = { + /** + * State object resulting from the post query processing. + * The object is committed to the state if PaginationQueryParams['updateState'] === true. + */ + stateCandidate: Partial>; + /** In case the items are kept in intervals, the interval into which the page has been merged, will be returned. */ + targetInterval: AnyInterval | null; }; export type PaginationQueryReturnValue = { items: T[] } & { - next?: string; - prev?: string; + headward?: string; + tailward?: string; }; export type PaginatorDebounceOptions = { debounceMs: number; @@ -296,8 +220,8 @@ type DebouncedExecQueryFunction = DebouncedFunc< >; export type PaginatorState = { - hasNext: boolean; - hasPrev: boolean; + hasMoreHead: boolean; + hasMoreTail: boolean; isLoading: boolean; items: T[] | undefined; lastQueryError?: Error; @@ -305,6 +229,49 @@ export type PaginatorState = { offset?: number; }; +// todo: think whether plugins are necessary. Maybe we could just document how to add + +export type PaginatorItemsChangeProcessor = (params: { + nextItems: T[] | undefined; + previousItems: T[] | undefined; +}) => T[] | undefined; + +export interface PaginatorPlugin { + /** + * Optional plugin hook invoked immediately before the paginator emits a new + * `items` value to subscribers, but only when the `items` array has actually + * changed by reference. + * + * This hook allows plugins to post-process the visible items—such as + * deduplicating, normalizing, sorting, enriching, or otherwise transforming + * the array—at the final stage of state emission. The processed value becomes + * the `items` value delivered to subscribers. + * + * Return a new array to replace `nextState.items`, or return `undefined` + * to leave the items unchanged. + * + * Executed in the order plugins are registered. + */ + onBeforeItemsEmitted?: PaginatorItemsChangeProcessor; + + // future hooks (examples) + // onQueryStart?(ctx: { params: PaginationQueryParams; paginator: BasePaginator }): void | Promise; + // onQuerySuccess?(ctx: { state: PaginatorState; results: PaginationQueryReturnValue; paginator: BasePaginator }): void | Promise; + // onQueryError?(ctx: { error: unknown; paginator: BasePaginator }): void | Promise; +} + +/** + * Optional list of plugins that can hook into paginator lifecycle events. + * + * Plugins allow you to encapsulate cross-cutting behavior (such as items + * post-processing, analytics, offline caching, etc.) without modifying + * the core paginator logic. Each plugin can register handlers like + * `onItemsChange` that are invoked when relevant events occur. + * + * All registered plugins are executed in the order they appear in this array. + */ +// plugins?: PaginatorPlugin[]; + export type PaginatorOptions = { /** The number of milliseconds to debounce the search query. The default interval is 300ms. */ debounceMs?: number; @@ -314,15 +281,24 @@ export type PaginatorOptions = { */ // eslint-disable-next-line @typescript-eslint/no-explicit-any hasPaginationQueryShapeChanged?: PaginationQueryShapeChangeIdentifier; + /** + * Optional hook to fully control cursor + hasMore logic in 'derived' mode. + * If not provided, BasePaginator uses its own default implementation. + */ + deriveCursor?: CursorDerivator; /** Custom function to retrieve items pages and optionally return a cursor in case of cursor pagination. */ doRequest?: (queryParams: Q) => Promise<{ items: T[]; cursor?: PaginatorCursor }>; /** In case of cursor pagination, specify the initial cursor value. */ initialCursor?: PaginatorCursor; /** In case of offset pagination, specify the initial offset value. */ initialOffset?: number; - /** If item index is provided, this index ensures updates in place and all consumers have access to a single source of data. */ + /** If item index is provided, this index ensures updates in a single place and all consumers have access to a single source of data. */ itemIndex?: ItemIndex; - /** Will prevent changing the index of existing items. */ + /** + * Will prevent changing the index of existing items in state. + * If true, an item that is already visible keeps its relative position in the current items array when updated. + * It does not guarantee global stability across interval changes or page jumps. + */ lockItemOrder?: boolean; /** The item page size to be requested from the server. */ pageSize?: number; @@ -331,6 +307,7 @@ export type PaginatorOptions = { }; type OptionalPaginatorConfigFields = + | 'deriveCursor' | 'doRequest' | 'initialCursor' | 'initialOffset' @@ -371,22 +348,66 @@ export abstract class BasePaginator { /** * ItemIndex is a canonical, ID-addressable storage layer for domain items. * It serves as a single source of truth for all those that need to access the items - * outside of the paginator. + * outside the paginator. + */ + protected _itemIndex: ItemIndex; + /** + * Whether the paginator should maintain interval storage. + * + * Intervals are populated only when a caller provides an `itemIndex` instance. + * Otherwise the paginator behaves as a classic list paginator and mutates + * only `state.items`. */ - protected _itemIndex: ItemIndex | undefined; + protected _usesItemIntervalStorage: boolean; protected _executeQueryDebounced!: DebouncedExecQueryFunction; - protected _isCursorPagination = false; /** Last effective query shape produced by subclass for the most recent request. */ protected _lastQueryShape?: Q; protected _nextQueryShape?: Q; + /** + * Stable, performs purely item data-driven (age, last_message_at, etc.) comparison. + * Used under the hood + * 1. as a fallback by effectiveComparator / boostComparator if boost comparison is not conclusive + * 2. interval comparator + * + * Intervals cannot be sorted using boostComparator, because boosting the interval boundary (top item) + * would lead to the boosting of the entire interval when sorting the intervals. + * + * Sorting within a single interval should be done using effectiveComparator, which by default uses boostComparator. + */ sortComparator: (a: T, b: T) => number; protected _filterFieldToDataResolvers: FieldToDataResolver[]; protected boosts = new Map(); protected _maxBoostSeq = 0; + /** + * Describes how `interval.itemIds` are oriented relative to pagination semantics. + * + * - `true` => `itemIds[0]` is the pagination head edge (default) + * - `false` => `itemIds[itemIds.length - 1]` is the pagination head edge + * + * NOTE: This does not affect the *sorting* of `itemIds` (they are always kept + * in `sortComparator` order). It only affects which side is considered + * "head" for interval ordering and live ingestion decisions. + */ + protected get intervalItemIdsAreHeadFirst(): boolean { + return true; + } + + /** + * Determines the ordering of intervals in the internal interval list. + * + * This controls only the ordering of intervals relative to each other (by comparing + * their head edges using `sortComparator`). It is intentionally decoupled from: + * - the ordering of itemIds inside an interval + * - the meaning of the head edge (controlled by `intervalItemIdsAreHeadFirst`) + */ + protected get intervalSortDirection(): 'asc' | 'desc' { + return 'asc'; + } + protected constructor({ initialCursor, initialOffset, @@ -408,7 +429,8 @@ export abstract class BasePaginator { this.setDebounceOptions({ debounceMs }); this.sortComparator = noOrderChange; this._filterFieldToDataResolvers = []; - this._itemIndex = itemIndex; + this._usesItemIntervalStorage = !!itemIndex; + this._itemIndex = itemIndex ?? new ItemIndex({ getId: this.getItemId.bind(this) }); } // --------------------------------------------------------------------------- @@ -419,12 +441,12 @@ export abstract class BasePaginator { return this.state.getLatestValue().lastQueryError; } - get hasNext() { - return this.state.getLatestValue().hasNext; + get hasMoreTail() { + return this.state.getLatestValue().hasMoreTail; } - get hasPrev() { - return this.state.getLatestValue().hasPrev; + get hasMoreHead() { + return this.state.getLatestValue().hasMoreHead; } get hasResults() { @@ -444,10 +466,14 @@ export abstract class BasePaginator { return false; } + get isCursorPagination() { + return !!this.cursor; + } + get initialState(): PaginatorState { return { - hasNext: true, - hasPrev: true, + hasMoreHead: true, + hasMoreTail: true, isLoading: false, items: undefined, lastQueryError: undefined, @@ -489,6 +515,17 @@ export abstract class BasePaginator { return this.boostComparator; } + get intervalComparator() { + return (a: AnyInterval, b: AnyInterval) => { + const aEdges = this.getIntervalPaginationEdges(a); + const bEdges = this.getIntervalPaginationEdges(b); + if (!aEdges || !bEdges) return 0; + if (!aEdges) return 1; // move interval without bounds to the end + if (!bEdges) return -1; // keep interval a preceding b + return this.compareIntervalHeadEdges(aEdges.head, bEdges.head); + }; + } + get maxBoostSeq() { return this._maxBoostSeq; } @@ -497,20 +534,20 @@ export abstract class BasePaginator { return Array.from(this._itemIntervals.values()); } + protected get usesItemIntervalStorage(): boolean { + return this._usesItemIntervalStorage; + } + protected get liveHeadLogical(): LogicalInterval | undefined { - const itv = this._itemIntervals.get(LIVE_HEAD_INTERVAL_ID); + const itv = this._itemIntervals.get(LOGICAL_HEAD_INTERVAL_ID); return itv && isLiveHeadInterval(itv) ? itv : undefined; } protected get liveTailLogical(): LogicalInterval | undefined { - const itv = this._itemIntervals.get(LIVE_TAIL_INTERVAL_ID); + const itv = this._itemIntervals.get(LOGICAL_TAIL_INTERVAL_ID); return itv && isLiveTailInterval(itv) ? itv : undefined; } - protected get usesItemIntervalStorage(): boolean { - return !!this._itemIndex; - } - // --------------------------------------------------------------------------- // Abstracts // --------------------------------------------------------------------------- @@ -521,14 +558,6 @@ export abstract class BasePaginator { abstract filterQueryResults(items: T[]): T[] | Promise; - /** - * Should be implemented in child classes from the specific sort requirements followed by the child classes. - * Should return a value according to which the given item can be correctly inserted into the target item interval - * based on the current sort rules. - * @param item - */ - abstract computeSortKey(item: T): SortKey; - /** * Subclasses must return the query shape. */ @@ -570,48 +599,6 @@ export abstract class BasePaginator { return typeof id === 'string' ? this._itemIndex?.get(id) : undefined; } - // --------------------------------------------------------------------------- - // Sort key generator (optional helper) - // --------------------------------------------------------------------------- - - /** - * Factory function to create a sort key generator. - * Sort key generation must be consistent with the comparator logic. - * - * The resulting SortKey is an array of numbers, e.g. - * [{last_updated_at}, {}] - */ - makeSortKeyGenerator({ - sort, - resolvePathValue, - }: { - sort: Sort | Sort[]; - resolvePathValue: PathResolver; - }): (item: T) => SortKey { - const normalizedSort = normalizeQuerySort(sort); // [{ field, direction }, ...] - - return (item: T): SortKey => { - const key: SortKey = []; - - for (const { field, direction } of normalizedSort) { - const raw = resolvePathValue(item, field); - const normalized = normalizeForSort(raw); - if (normalized === null) { - // No usable value → push a sentinel that depends on direction. - key.push(direction === 1 ? MISSING_LOW : MISSING_HIGH); - } else if (typeof normalized === 'number') { - key.push(direction === 1 ? normalized : -normalized); - } else { - // string - // If most of your sorts are numeric/date and string sorts are asc-only, - // you can just store the string as-is: - key.push(...encodeStringComponents(normalized, direction)); - } - } - return key; - }; - } - // --------------------------------------------------------------------------- // Boosts // --------------------------------------------------------------------------- @@ -653,6 +640,7 @@ export abstract class BasePaginator { /** * Increases the item's importance when sorting. + * Boost affects position inside an item interval (if used), but should not redefine interval boundaries. * @param itemId * @param opts */ @@ -686,58 +674,376 @@ export abstract class BasePaginator { } // --------------------------------------------------------------------------- - // Interval helpers + // Interval manipulation // --------------------------------------------------------------------------- // eslint-disable-next-line @typescript-eslint/no-unused-vars - generateIntervalId(page: T[]): string { + generateIntervalId(page: (T | string)[]): string { return `interval-${generateUUIDv4()}`; } intervalToItems(interval: Interval | LogicalInterval): T[] { - return interval.itemIds + const items = interval.itemIds .map((id) => this._itemIndex?.get(id)) .filter((item): item is T => !!item); + + // When lockItemOrder is true, we must *not* reflect boosts in state.items. + if (this.config.lockItemOrder) { + return items; + } + + // Visible ordering uses boost-aware comparator + return items.sort(this.effectiveComparator.bind(this)); } makeInterval({ page, isHead, isTail }: MakeIntervalParams): Interval { - const sorted = [...page].sort((a, b) => - compareSortKeys(this.computeSortKey(a), this.computeSortKey(b)), - ); + const sorted = [...page].sort((a, b) => this.sortComparator(a, b)); return { id: this.generateIntervalId(page), + // Default semantics: + // - if interval is known global head/tail, there is no more data in that direction + // - otherwise treat it as unknown => "has more" (until proven otherwise by a query) + hasMoreHead: isHead ? false : true, + hasMoreTail: isTail ? false : true, itemIds: sorted.map(this.getItemId.bind(this)), - startKey: this.computeSortKey(sorted[0]), - endKey: this.computeSortKey(sorted[sorted.length - 1]), - isHead, - isTail, + isHead: !!isHead, + isTail: !!isTail, + }; + } + + protected getCursorFromInterval(interval: Interval): PaginatorCursor { + // Prefer resolving edge items via sort bounds, because: + // - interval ordering can differ from interval sorting (intervalSortDirection) + // - "head" is a semantic concept (where new items appear), not necessarily `itemIds[0]` + // - itemIds are stored in sortComparator order, but we want the *pagination* edges + const edges = this.getIntervalPaginationEdges(interval); + + const fallbackFirstId = interval.itemIds[0] ?? null; + const fallbackLastId = interval.itemIds.slice(-1)[0] ?? null; + + const fallbackHeadId = this.intervalItemIdsAreHeadFirst + ? fallbackFirstId + : fallbackLastId; + const fallbackTailId = this.intervalItemIdsAreHeadFirst + ? fallbackLastId + : fallbackFirstId; + + const headId = edges?.head ? this.getItemId(edges.head) : fallbackHeadId; + const tailId = edges?.tail ? this.getItemId(edges.tail) : fallbackTailId; + + return { + headward: interval.hasMoreHead ? headId : null, + tailward: interval.hasMoreTail ? tailId : null, }; } - protected recomputeIntervalBoundaries(interval: AnyInterval): { - startKey: SortKey; - endKey: SortKey; - } { - // Recompute boundaries from the first and last items in the interval. - // Since ids are kept sorted by effectiveComparator, - // the first and last items define the correct startKey/endKey. + isActiveInterval(interval: AnyInterval): boolean { + return this._activeIntervalId === interval.id; + } + + setActiveInterval(interval: AnyInterval | undefined, opts?: { updateState?: boolean }) { + this._activeIntervalId = interval?.id; + + // Public API expectation: activating an anchored interval should immediately + // reflect its pagination ability in paginator state. + // + // Internal callers that are in the middle of a transactional `state.next()` + // update must pass `{ updateState: false }` and project these flags into the + // state object directly. + if (opts?.updateState === false) return; + if (!interval || isLogicalInterval(interval)) return; + + this.state.partialNext({ + items: this.intervalToItems(interval), + hasMoreHead: interval.hasMoreHead, + hasMoreTail: interval.hasMoreTail, + }); + } + + protected getIntervalSortBounds( + interval: Interval | LogicalInterval, + ): IntervalSortBounds | null { + if (!this.usesItemIntervalStorage) return null; const ids = interval.itemIds; - const first = this.getItem(ids[0]); - const last = this.getItem(ids[ids.length - 1]); + if (!this._itemIndex || ids.length === 0) return null; + const start = this._itemIndex?.get?.(ids[0]); + const end = this._itemIndex?.get?.(ids[ids.length - 1]); + return { start, end } as IntervalSortBounds; + } + + /** + * Returns pagination head/tail edges of an interval. + * + * IMPORTANT: + * - Edges are derived from the *sort bounds* of the interval (min/max under `sortComparator`). + * - Which bound is treated as the pagination "head" is controlled by `intervalItemIdsAreHeadFirst`. + * - This is a semantic notion of head/tail (where new items are expected to appear), + * not necessarily "min/max under sortComparator". + * New items are always expected to appear at the head of the interval. + */ + protected getIntervalPaginationEdges( + interval: Interval | LogicalInterval, + ): IntervalPaginationEdges | null { + if (!this.usesItemIntervalStorage) return null; + const bounds = this.getIntervalSortBounds(interval); + if (!bounds) return null; + return this.intervalItemIdsAreHeadFirst + ? { head: bounds.start, tail: bounds.end } + : { head: bounds.end, tail: bounds.start }; + } + + protected compareIntervalHeadEdges(a: T, b: T): number { + const cmp = this.sortComparator(a, b); + return this.intervalSortDirection === 'asc' ? cmp : -cmp; + } - if (!first || !last) { - throw new Error('Invalid interval to recompute boundaries: empty item array'); + protected aIsMoreHeadwardThanB(a: T, b: T): boolean { + return this.intervalItemIdsAreHeadFirst + ? this.sortComparator(a, b) === ComparisonResult.A_PRECEDES_B + : this.sortComparator(b, a) === ComparisonResult.A_PRECEDES_B; + } + + protected aIsMoreTailwardThanB(a: T, b: T): boolean { + return this.intervalItemIdsAreHeadFirst + ? this.sortComparator(b, a) === ComparisonResult.A_PRECEDES_B + : this.sortComparator(a, b) === ComparisonResult.A_PRECEDES_B; + } + + protected getHeadIntervalFromSortedIntervals( + intervals: AnyInterval[], + ): AnyInterval | undefined { + if (intervals.length === 0) return undefined; + if (intervals.length === 1) return intervals[0]; + + const headIsLowerSortValue = this.intervalItemIdsAreHeadFirst; + const intervalsSortedAsc = this.intervalSortDirection === 'asc'; + + const headIndex = + headIsLowerSortValue === intervalsSortedAsc ? 0 : intervals.length - 1; + return intervals[headIndex]; + } + + protected getTailIntervalFromSortedIntervals( + intervals: AnyInterval[], + ): AnyInterval | undefined { + if (intervals.length === 0) return undefined; + if (intervals.length === 1) return intervals[0]; + + const headIsLowerSortValue = this.intervalItemIdsAreHeadFirst; + const intervalsSortedAsc = this.intervalSortDirection === 'asc'; + + const tailIndex = + headIsLowerSortValue === intervalsSortedAsc ? intervals.length - 1 : 0; + return intervals[tailIndex]; + } + + protected sortIntervals(intervals: I[]): I[] { + const intervalsCopy = [...intervals]; + intervalsCopy.sort(this.intervalComparator.bind(this)); + return intervalsCopy; + } + + protected setIntervals(intervals: AnyInterval[]) { + this._itemIntervals = new Map(intervals.map((i) => [i.id, i])); + } + + protected intervalsStrictlyOverlap(a: AnyInterval, b: AnyInterval): boolean { + const aBounds = this.getIntervalSortBounds(a); + const bBounds = this.getIntervalSortBounds(b); + if (!aBounds || !bBounds) return false; + return ( + this.sortComparator(aBounds.start, bBounds.end) <= 0 && + this.sortComparator(bBounds.start, aBounds.end) <= 0 + ); + } + + /** + * Returns true if intervals A and B should be merged. + * + * 1) Strict overlap (range overlap in `sortComparator` order): + * A.min ≤ B.max AND B.min ≤ A.max + * + * 2) Forced merge (policy: 'auto' only): + * If one interval is marked as `isHead`/`isTail`, treat the other as mergeable + * when it extends beyond that interval's pagination head/tail edge + * (computed via `getIntervalPaginationEdges` + headward/tailward helpers). + * + * In 'strict-overlap-only' policy, only (1) applies. + */ + protected intervalsOverlap( + a: AnyInterval, + b: AnyInterval, + policy: IntervalMergePolicy = 'auto', + ): boolean { + const aBounds = this.getIntervalSortBounds(a); + const bBounds = this.getIntervalSortBounds(b); + if (!aBounds || !bBounds) return false; + + // Strict overlap if: + // a.first <= b.last && b.first <= a.last + if ( + this.sortComparator(aBounds.start, bBounds.end) <= 0 && + this.sortComparator(bBounds.start, aBounds.end) <= 0 + ) + return true; + + // If policy is strict-overlap-only, return false if the intervals do not strictly overlap. + if (policy === 'strict-overlap-only') return false; + + const aIsHead = (a as Interval).isHead; + const bIsHead = (b as Interval).isHead; + const aIsTail = (a as Interval).isTail; + const bIsTail = (b as Interval).isTail; + + const aEdges = this.getIntervalPaginationEdges(a); + const bEdges = this.getIntervalPaginationEdges(b); + if (!aEdges || !bEdges) return false; + + if (bIsHead && this.aIsMoreHeadwardThanB(aEdges.head, bEdges.head)) return true; + if (aIsHead && this.aIsMoreHeadwardThanB(bEdges.head, aEdges.head)) return true; + if (bIsTail && this.aIsMoreTailwardThanB(aEdges.tail, bEdges.tail)) return true; + if (aIsTail && this.aIsMoreTailwardThanB(bEdges.tail, aEdges.tail)) return true; + + return false; + } + + /** + * Whether an item belongs to an anchored interval. + */ + protected belongsToInterval(item: T, interval: AnyInterval): boolean { + const sortBounds = this.getIntervalSortBounds(interval); + if (!sortBounds) return false; + const { start, end } = sortBounds; + if (this.sortComparator(start, item) <= 0 && this.sortComparator(item, end) <= 0) + return true; + + const edges = this.getIntervalPaginationEdges(interval); + if (!edges) return false; + + // Items beyond head/tail edges are considered belonging to the head/tail pages. + if ((interval as Interval).isHead && this.aIsMoreHeadwardThanB(item, edges.head)) + return true; + + return (interval as Interval).isTail && this.aIsMoreTailwardThanB(item, edges.tail); + } + + protected mergeTwoAnchoredIntervals( + preceding: Interval, + following: Interval, + ): Interval { + const mergeIds = (a: string[], b: string[]): string[] => { + const itemIndex = this._itemIndex; + if (!itemIndex) return a; + + const seen = new Set(); + const merged: T[] = []; + const mergedIds: string[] = []; + + const pushId = (id: string) => { + if (seen.has(id)) return; + const item = itemIndex.get(id); + if (!item) return; + seen.add(id); + const { insertionIndex } = binarySearch({ + needle: item, + length: merged.length, + getItemAt: (index: number) => merged[index], + itemIdentityEquals: (item1, item2) => + this.getItemId(item1) === this.getItemId(item2), + // inter-interval operation sorts using the base comparator + compare: this.sortComparator.bind(this), + }); + if (insertionIndex > -1) { + merged.splice(insertionIndex, 0, item); + mergedIds.splice(insertionIndex, 0, this.getItemId(item)); + } + }; + + a.forEach(pushId); + b.forEach(pushId); + + return mergedIds; + }; + + const mergedItemIds = mergeIds(preceding.itemIds, following.itemIds); + + const precedingEdges = this.getIntervalPaginationEdges(preceding); + const followingEdges = this.getIntervalPaginationEdges(following); + + const isHead = preceding.isHead || following.isHead; + const isTail = preceding.isTail || following.isTail; + + // Default conservative merge: + // - if any contributor already concluded "no more" in a direction, keep that + let hasMoreHead = preceding.hasMoreHead && following.hasMoreHead; + let hasMoreTail = preceding.hasMoreTail && following.hasMoreTail; + + if (precedingEdges && followingEdges) { + const headMost = this.aIsMoreHeadwardThanB(precedingEdges.head, followingEdges.head) + ? preceding + : following; + const tailMost = this.aIsMoreTailwardThanB(precedingEdges.tail, followingEdges.tail) + ? preceding + : following; + + hasMoreHead = headMost.hasMoreHead; + hasMoreTail = tailMost.hasMoreTail; } - const startKey = this.computeSortKey(first); - const endKey = first === last ? startKey : this.computeSortKey(last); - return { startKey, endKey }; + return { + ...preceding, + itemIds: mergedItemIds, + // Boundary intervals stay boundaries even if their edge shifts due to forced merges. + hasMoreHead: isHead ? false : hasMoreHead, + hasMoreTail: isTail ? false : hasMoreTail, + isHead, + isTail, + }; + } + + /** + * Merges anchored intervals. Returns null if there are no intervals to merge. + */ + protected mergeAnchoredIntervals( + intervals: Interval[], + baseInterval?: Interval, + ): Interval | null { + if (intervals.length === 0) return null; + + const intervalsCopy = this.sortIntervals(intervals); + + let acc = cloneInterval(baseInterval ?? intervalsCopy[0]); + for (let i = baseInterval ? 0 : 1; i < intervalsCopy.length; i++) { + const next = intervalsCopy[i]; + acc = this.mergeTwoAnchoredIntervals(acc, next); + } + + return acc; } // --------------------------------------------------------------------------- - // Locate items + // Locate items and intervals // --------------------------------------------------------------------------- + protected locateIntervalIndex(interval: Interval): number { + const intervals = this.itemIntervals.filter( + (i) => !isLogicalInterval(i), + ) as Interval[]; + if (intervals.length === 0) return -1; + if (intervals.length === 1) return interval.id === intervals[0].id ? 0 : -1; + + return binarySearch({ + needle: interval, + length: intervals.length, + // eslint-disable-next-line + getItemAt: (index: number) => { + return intervals[index]; + }, + itemIdentityEquals: (item1, item2) => item1.id === item2.id, + compare: this.intervalComparator.bind(this), + plateauScan: true, + }).currentIndex; + } /** * Locate item inside a specific interval using the same logic as locateByItem, * but scoped to interval items. @@ -757,7 +1063,8 @@ export abstract class BasePaginator { getItemAt: (index: number) => this.getItem(ids[index]), itemIdentityEquals: (item1, item2) => this.getItemId(item1) === this.getItemId(item2), - compare: this.effectiveComparator.bind(this), + // items in intervals are not sorted by effectiveComparator + compare: this.sortComparator.bind(this), plateauScan: true, }); } @@ -765,10 +1072,8 @@ export abstract class BasePaginator { protected locateIntervalForItem(item: T): AnyInterval | undefined { if (this._itemIntervals.size === 0) return undefined; - const itemSortKey = this.computeSortKey(item); - for (const itv of this.itemIntervals) { - if (belongsToInterval(itemSortKey, itv)) { + if (this.belongsToInterval(item, itv)) { return itv; } } @@ -801,7 +1106,7 @@ export abstract class BasePaginator { }); } - protected locateByItem = (item: T): ItemCoordinates => { + locateByItem = (item: T): ItemCoordinates => { const result: ItemCoordinates = {}; // 1. Search in visible state.items @@ -811,77 +1116,80 @@ export abstract class BasePaginator { } // 2. Search in intervals if interval-mode is active - if (this.usesItemIntervalStorage) { - const intervalLoc = this.locateByItemInIntervals(item); - if (intervalLoc) { - result.interval = intervalLoc; - } + const intervalLoc = this.locateByItemInIntervals(item); + if (intervalLoc) { + result.interval = intervalLoc; } return result; }; - findItem(needle: T): T | undefined { - const { state, interval } = this.locateByItem(needle); - if (state && state.current > -1) { - return (this.items ?? [])[state.current]; - } else if (interval && interval.current > -1) { - const id = interval.interval.itemIds[interval.current]; - return this.getItem(id); - } - return undefined; - } - // --------------------------------------------------------------------------- // Item ingestion // --------------------------------------------------------------------------- - /** - * Inserts an item ID into the interval in the correct sorted position, - * preserving interval ordering and updating start/end keys. - * Returns unchaged interval if the correct insertion position could not be determined. - */ - protected insertItemIdIntoInterval( - interval: I, - item: T, - ): I { - const id = this.getItemId(item); - const itemLocation = this.locateByItemInInterval({ item, interval }); - - if (!itemLocation) return interval; - - // If already at the correct position, nothing to change - if (itemLocation.current >= 0 && itemLocation.current === itemLocation.expected) { - return interval; - } + protected removeItemIdFromInterval({ + interval, + ...itemLocation + }: ItemIntervalCoordinates): ItemIntervalCoordinates { + if ( + // If already at the correct position, nothing to change + itemLocation.currentIndex >= 0 && + itemLocation.currentIndex === itemLocation.insertionIndex + ) + return { interval, ...itemLocation }; - const ids = [...interval.itemIds]; + const itemIds = [...interval.itemIds]; // Adjust insertion index if we are removing the item before reinserting index. // locateByItemInInterval() computed insertionIndex with the item still in the array. - let insertionIndex = itemLocation.expected; - if (itemLocation.current >= 0 && itemLocation.expected > itemLocation.current) { + let insertionIndex = itemLocation.insertionIndex; + if ( + itemLocation.currentIndex >= 0 && + itemLocation.insertionIndex > itemLocation.currentIndex + ) { insertionIndex--; } // Remove existing occurrence if present - if (itemLocation.current >= 0) { - ids.splice(itemLocation.current, 1); + if (itemLocation.currentIndex >= 0) { + itemIds.splice(itemLocation.currentIndex, 1); } + return { + interval: { ...interval, itemIds }, + currentIndex: itemLocation.currentIndex, + insertionIndex, + }; + } - // Insert at the new position - ids.splice(insertionIndex, 0, id); + /** + * Inserts an item ID into the interval in the correct sorted position. + * Returns unchanged interval if the correct insertion position could not be determined. + */ + protected insertItemIdIntoInterval( + interval: I, + item: T, + ): I { + const itemLocation = this.locateByItemInInterval({ item, interval }); + let insertionIndex = itemLocation?.insertionIndex; + let itemIds = [...interval.itemIds]; - const intervalWithUpdatedIds = { - ...interval, - itemIds: ids, - }; + if (itemLocation && itemLocation.insertionIndex > -1) { + const removal = this.removeItemIdFromInterval({ interval, ...itemLocation }); + insertionIndex = removal.insertionIndex; + itemIds = removal.interval.itemIds; + } - const boundaries = this.recomputeIntervalBoundaries(intervalWithUpdatedIds); + const id = this.getItemId(item); + + // Insert at the new position + if (typeof insertionIndex !== 'undefined' && insertionIndex > -1) { + itemIds.splice(insertionIndex, 0, id); + } return { - ...intervalWithUpdatedIds, - ...boundaries, + ...interval, + itemIds, }; } @@ -904,9 +1212,7 @@ export abstract class BasePaginator { continue; } - const key = this.computeSortKey(item); - - if (belongsToInterval(key, anchored)) mergeIds.push(id); + if (this.belongsToInterval(item, anchored)) mergeIds.push(id); else keepIds.push(id); } @@ -917,26 +1223,28 @@ export abstract class BasePaginator { merged = this.insertItemIdIntoInterval(merged, item); } - const remainingLogical = keepIds.length > 0 ? { ...logical, itemIds: keepIds } : null; - return { mergedAnchored: merged, - remainingLogical: remainingLogical && { - ...remainingLogical, - ...this.recomputeIntervalBoundaries(remainingLogical), - }, + remainingLogical: keepIds.length > 0 ? { ...logical, itemIds: keepIds } : null, }; } /** * Merges all intervals (anchored + logical head/tail). * Returns: - * - merged anchored interval (or null if none) + * - merged anchored interval (or null if none merged) * - possibly reduced logical head / tail intervals */ - protected mergeIntervals(intervals: AnyInterval[]): MergeIntervalsResult { + protected mergeIntervals( + intervals: AnyInterval[], + baseInterval?: Interval, + ): MergeIntervalsResult { let logicalHead: LogicalInterval | null = null; let logicalTail: LogicalInterval | null = null; + + if (intervals.length <= 1 && !baseInterval) + return { logicalHead, merged: null, logicalTail }; + const anchored: Interval[] = []; // Separate logical vs anchored @@ -952,7 +1260,7 @@ export abstract class BasePaginator { } // Merge anchored intervals into one interval (if possible) - const mergedAnchored = mergeAnchoredIntervals(anchored); + const mergedAnchored = this.mergeAnchoredIntervals(anchored, baseInterval); // No anchored intervals → just return logical ones if (!mergedAnchored) { @@ -991,22 +1299,29 @@ export abstract class BasePaginator { /** * Ingests the whole page into intervals and returns the resulting anchored interval. */ - protected ingestPage({ + ingestPage({ page, + policy = 'auto', isHead, isTail, targetIntervalId, + setActive, }: { page: T[]; + /** + * Describes the policy for merging intervals. + * - 'auto' (default): Merge intervals if they overlap. + * - 'strict-overlap-only': Merge intervals only if they strictly overlap. Useful for jumping to a specific message. + * - This is useful for jumping to a specific message. + */ + policy?: IntervalMergePolicy; isHead?: boolean; isTail?: boolean; targetIntervalId?: string; + setActive?: boolean; }): Interval | null { - if (!this._itemIndex || !page?.length) return null; - - for (const item of page) { - this._itemIndex.setOne(item); - } + if (!this.usesItemIntervalStorage) return null; + if (!page?.length) return null; const pageInterval = this.makeInterval({ page, @@ -1014,47 +1329,128 @@ export abstract class BasePaginator { isTail, }); + for (const item of page) { + this._itemIndex.setOne(item); + } + const targetInterval = targetIntervalId ? this._itemIntervals.get(targetIntervalId) - : null; + : undefined; + + // Set the base interval in the following order of importance + // 1. if target interval + // a) is not logical interval and + // b) merge would not lead to corrupted interval sorting + // (pages: [a], [b,c], merging page [x] to [a] -> [a,x], [b,c] or pages: [b,c], [x] and merging [a] to [x] => [b,c], [a,x] ) + // 2. if one of the overlappingLogical is an active interval, use it as a base + // 3. if existing single anchored interval use it as a base + let baseInterval: Interval | undefined; // Find intervals that overlap with this page - const overlapping: Interval[] = []; + const overlappingAnchored: Interval[] = []; + const overlappingLogical: LogicalInterval[] = []; for (const itv of this.itemIntervals) { - // target will be appended separately + // target interval will be used as base if (targetInterval?.id === itv.id) continue; - if (intervalsOverlap(pageInterval, itv)) { - overlapping.push(itv); + if (this.intervalsOverlap(pageInterval, itv, policy)) { + if (this.isActiveInterval(itv) && !isLogicalInterval(itv)) { + baseInterval = itv; + } else { + if (!isLogicalInterval(itv)) overlappingAnchored.push(itv); + else overlappingLogical.push(itv); + } + } else if ( + (isHead && isLiveHeadInterval(itv)) || + (isTail && isLiveTailInterval(itv)) + ) { + overlappingLogical.push(itv); } } - const toMerge: AnyInterval[] = [...overlapping, pageInterval]; - if (targetInterval) { - toMerge.push(targetInterval); + // If caller specifies an anchored target interval, treat it as the merge anchor. + // The role of ingestPage method is to merge intervals that overlap + the target + // interval. Decision, whether target interval is a correct base interval is + // upon the ingestPage method caller, not ingestPage method, because the method + // does not know, in which context it has been invoked and cannot reliably tell, + // whether it is a valid move to merge into the target interval as when + // paginating linearly, the ingested page will never overlap with the previous page. + if (targetInterval && !isLogicalInterval(targetInterval)) { + baseInterval = targetInterval; + } else if (!baseInterval && overlappingAnchored.length === 1) { + baseInterval = overlappingAnchored[0]; + overlappingAnchored.length = 0; } - const { logicalHead, merged, logicalTail } = this.mergeIntervals(toMerge); + const toMerge: AnyInterval[] = [ + ...overlappingLogical, + ...overlappingAnchored, + pageInterval, + ]; + const { logicalHead, merged, logicalTail } = this.mergeIntervals( + toMerge, + baseInterval, + ); + + let resultingInterval = pageInterval; // Remove all intervals that participated - for (const itv of toMerge) { - this._itemIntervals.delete(itv.id); + if (merged) { + resultingInterval = merged; + for (const itv of toMerge) { + if (merged.id === itv.id) continue; + this._itemIntervals.delete(itv.id); + } } - // Decide which anchored interval we keep for this page: - const resultingInterval = merged ?? pageInterval; - this._itemIntervals.set(resultingInterval.id, resultingInterval); - // Store logical head/tail (if any) if (logicalHead) { - this._itemIntervals.set(LIVE_HEAD_INTERVAL_ID, logicalHead); - } else { - this._itemIntervals.delete(LIVE_HEAD_INTERVAL_ID); + // the leftovers that do not pertain to the first page should be migrated to a separate anchored interval + if (merged?.isHead) { + const convertedInterval = { + id: this.generateIntervalId(logicalHead.itemIds), + hasMoreHead: true, + hasMoreTail: true, + itemIds: logicalHead.itemIds, + isHead: false, + isTail: false, + }; + this._itemIntervals.set(convertedInterval.id, convertedInterval); + } else { + this._itemIntervals.set(LOGICAL_HEAD_INTERVAL_ID, logicalHead); + } } if (logicalTail) { - this._itemIntervals.set(LIVE_TAIL_INTERVAL_ID, logicalTail); - } else { - this._itemIntervals.delete(LIVE_TAIL_INTERVAL_ID); + // the leftovers that do not pertain to the last page should be migrated to a separate anchored interval + if (merged?.isTail) { + const convertedInterval = { + id: this.generateIntervalId(logicalTail.itemIds), + hasMoreHead: true, + hasMoreTail: true, + itemIds: logicalTail.itemIds, + isHead: false, + isTail: false, + }; + this._itemIntervals.set(convertedInterval.id, convertedInterval); + } else { + this._itemIntervals.set(LOGICAL_TAIL_INTERVAL_ID, logicalTail); + } + } + + this._itemIntervals.set(resultingInterval.id, resultingInterval); + // keep the intervals sorted + this.setIntervals(this.sortIntervals(this.itemIntervals)); + + if ( + resultingInterval && + setActive // || this.isActiveInterval(resultingInterval) + ) { + this.setActiveInterval(resultingInterval, { updateState: false }); + this.state.partialNext({ + items: this.intervalToItems(resultingInterval), + hasMoreHead: resultingInterval.hasMoreHead, + hasMoreTail: resultingInterval.hasMoreTail, + }); } return resultingInterval; @@ -1072,101 +1468,197 @@ export abstract class BasePaginator { * If no intervals or no itemIndex exist, falls back to the legacy list-based ingestion. */ ingestItem(ingestedItem: T): boolean { - // If we don't have itemIndex, manipulate only items array in paginator state and not intervals - // as intervals do not store the whole items and have to rely on _itemIndex if (!this.usesItemIntervalStorage) { const items = this.items ?? []; - const next = items.slice(); - const { current: existingIndex, expected: insertionIndex } = binarySearch({ - needle: ingestedItem, - length: items.length, - getItemAt: (index: number) => items[index], - itemIdentityEquals: (item1, item2) => - this.getItemId(item1) === this.getItemId(item2), - compare: this.effectiveComparator.bind(this), - plateauScan: true, - }); + const id = this.getItemId(ingestedItem); + const existingIndex = items.findIndex((i) => this.getItemId(i) === id); + const hadItem = existingIndex > -1; + + const nextItems = items.slice(); + if (hadItem) nextItems.splice(existingIndex, 1); + // If it no longer matches the filter, we only commit the removal (if any). if (!this.matchesFilter(ingestedItem)) { - if (existingIndex >= 0) { - next.splice(existingIndex, 1); - this.state.partialNext({ items: next }); - return true; - } - return false; + if (hadItem) this.state.partialNext({ items: nextItems }); + return hadItem; } - // override the existing item even though it already exists to make sure it is up-to-date - if (existingIndex >= 0) { - next.splice(existingIndex, 1); - } + // Determine insertion index against the list without the old snapshot. + const insertionIndex = + binarySearch({ + needle: ingestedItem, + length: nextItems.length, + getItemAt: (index: number) => nextItems[index], + itemIdentityEquals: (item1, item2) => + this.getItemId(item1) === this.getItemId(item2), + compare: this.effectiveComparator.bind(this), + plateauScan: true, + }).insertionIndex ?? -1; + + const keepOrderInState = this.config.lockItemOrder && hadItem; + const insertAt = keepOrderInState ? existingIndex : insertionIndex; + if (insertAt < 0) return false; + + nextItems.splice(insertAt, 0, ingestedItem); + this.state.partialNext({ items: nextItems }); + return true; + } - const insertAt = - this.config.lockItemOrder && existingIndex >= 0 ? existingIndex : insertionIndex; + const id = this.getItemId(ingestedItem); + const previousItem = this._itemIndex.get(id); - next.splice(insertAt, 0, ingestedItem); - this.state.partialNext({ items: next }); - return true; + // 0. PRE-ANALYSIS: capture previous coordinates BEFORE any mutations + const previousCoords = this.locateByItem(previousItem || ingestedItem); + + const originalIndexInState = previousCoords?.state?.currentIndex ?? -1; + const keepOrderInState = this.config.lockItemOrder && originalIndexInState >= 0; + + // 1. Remove the old snapshot from state & intervals. + let removedItemCoordinates: ItemCoordinates | undefined; + if (previousCoords) { + removedItemCoordinates = this.removeItemAtCoordinates(previousCoords); } + const itemHasBeenRemoved = + !!removedItemCoordinates?.state && removedItemCoordinates.state.currentIndex > -1; - // Always update the itemIndex if present - this._itemIndex?.setOne(ingestedItem); + // 2. Update canonical storage (ItemIndex) to the *new* snapshot, + // regardless of filters – this keeps the index authoritative. + this._itemIndex.setOne(ingestedItem); - // Ingestion into anchored intervals - let targetInterval = this.locateIntervalForItem(ingestedItem); + // 3. If it no longer matches the filter, we’re done (it has been removed above). + if (!this.matchesFilter(ingestedItem)) { + return itemHasBeenRemoved; + } - // if no page has been loaded yet or the anchored interval could not be found, - // because the relevant page has not been loaded yet, - // keep the incoming items in logical interval if falls outside of the head and tail boundaries - if (!targetInterval) { - let targetLogical: LogicalInterval | undefined; - // add to head or tail if item exceeds the total bounds - if (this._itemIntervals.size > 0) { - const intervalsArray = this.itemIntervals; - const [firstInterval, lastInterval] = [ - intervalsArray[0], - intervalsArray.slice(-1)[0], - ]; - const itemSortKey = this.computeSortKey(ingestedItem); - if ( - isLiveHeadInterval(firstInterval) && - compareSortKeys(itemSortKey, firstInterval.startKey) <= - ComparisonResult.A_PRECEDES_B - ) { - targetLogical = firstInterval; - } else if ( - isLiveTailInterval(lastInterval) && - compareSortKeys(itemSortKey, lastInterval.endKey) >= - ComparisonResult.A_COMES_AFTER_B - ) { - targetLogical = lastInterval; - } - // ingested item would fall somewhere inside the boundaries but relevant page has not been loaded yet - // and thus the interval is not identifiable - if (!targetLogical) return false; + // If we don't have itemIndex, manipulate only items array in paginator state and not intervals + // as intervals do not store the whole items and have to rely on _itemIndex + // if (!this.usesItemIntervalStorage) { + // const items = this.items ?? []; + // const newItems = items.slice(); + // + // // Recompute insertionIndex for the *new* snapshot against the updated list (original removed). + // const insertionIndex = this.locateItemInState(ingestedItem)?.insertionIndex ?? -1; + // + // const insertAt = keepOrderInState ? originalIndexInState : insertionIndex; + // + // if (insertAt < 0) return false; // corruption guard + // + // newItems.splice(insertAt, 0, ingestedItem); + // this.state.partialNext({ items: newItems }); + // return true; + // } + + const previousInterval = previousCoords?.interval?.interval; + + const onlyLogicalIntervals = + this.itemIntervals.length <= 2 && + this.itemIntervals.every((itv) => isLogicalInterval(itv)); + // IMPORTANT: decide if the new snapshot still belongs to the same anchored interval, + // using the OLD bounds. + const stillBelongsToPreviousAnchoredInterval = + previousInterval && + // 1) If we *only* have logical intervals and the item used to live in one of them, + // keep it there. This prevents items from disappearing on update. + ((onlyLogicalIntervals && isLogicalInterval(previousInterval)) || + // 2) Normal: for anchored intervals, only reuse if the new snapshot is still + // within that interval's sort bounds. + (!isLogicalInterval(previousInterval) && + this.belongsToInterval(ingestedItem, previousInterval))); + + let targetInterval = stillBelongsToPreviousAnchoredInterval + ? previousInterval + : this.locateIntervalForItem(ingestedItem); + const { liveHeadLogical, liveTailLogical } = this; - targetInterval = this.insertItemIdIntoInterval(targetLogical, ingestedItem); - } else { - // no page has been loaded yet + if (!targetInterval) { + // No anchored interval currently contains the new snapshot. + // Decide whether it belongs to logical head, logical tail, + // or to a brand-new anchored interval. + if (this._itemIntervals.size === 0) { + // No pages at all yet → keep in logical head. targetInterval = { - id: LIVE_HEAD_INTERVAL_ID, + id: LOGICAL_HEAD_INTERVAL_ID, itemIds: [this.getItemId(ingestedItem)], - startKey: this.computeSortKey(ingestedItem), - endKey: this.computeSortKey(ingestedItem), }; - if (!this._activeIntervalId) { - this._activeIntervalId = targetInterval.id; + this.setActiveInterval(targetInterval); + } + } else { + const intervals = this.itemIntervals; + const headInterval = this.getHeadIntervalFromSortedIntervals(intervals); + const tailInterval = this.getTailIntervalFromSortedIntervals(intervals); + const headEdges = headInterval && this.getIntervalPaginationEdges(headInterval); + const tailEdges = tailInterval && this.getIntervalPaginationEdges(tailInterval); + + if (headEdges && this.aIsMoreHeadwardThanB(ingestedItem, headEdges.head)) { + // Falls before the loaded head → logical head. + targetInterval = liveHeadLogical + ? this.insertItemIdIntoInterval(liveHeadLogical, ingestedItem) + : { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: [this.getItemId(ingestedItem)], + }; + } else if (tailEdges && this.aIsMoreTailwardThanB(ingestedItem, tailEdges.tail)) { + // Falls after the loaded tail → logical tail. + targetInterval = liveTailLogical + ? this.insertItemIdIntoInterval(liveTailLogical, ingestedItem) + : { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: [this.getItemId(ingestedItem)], + }; + } else { + // Falls somewhere *inside* the global bounds, but we don't have that page loaded. + // We’ve already removed any old occurrence, so from the paginator's perspective + // this item won't be visible again until the relevant page is fetched. + return itemHasBeenRemoved; } } } else { + // Found an anchored interval whose bounds contain the new snapshot. targetInterval = this.insertItemIdIntoInterval(targetInterval, ingestedItem); } + const addedNewInterval = !this._itemIntervals.has(targetInterval.id); this._itemIntervals.set(targetInterval.id, targetInterval); - if (this._activeIntervalId === targetInterval.id) { - this.state.partialNext({ items: this.intervalToItems(targetInterval) }); + if (addedNewInterval) { + this.setIntervals(this.sortIntervals(this.itemIntervals)); + } + + // emit new state if active interval impacted by ingestion + if ( + this._activeIntervalId && + [targetInterval.id, removedItemCoordinates?.interval?.interval.id].includes( + this._activeIntervalId, + ) + ) { + const items = this.items ?? []; + /** + * Having config.lockItemOrder enabled when working with intervals will lead to + * discrepancies once active intervals are switched: + * 1. state.items [a,b,c] intervals [a,b,c], [d] + * 2. a changed and is moved to another interval state.items is now [a,b,c], intervals [b,c,], [d, a] + * 3. jumping / changing active interval to [d,a] - state.items is now [d,a], intervals [b,c], [d,a] + */ + if (keepOrderInState) { + // Item was visible before → reinsert at its old index + const nextView = items.slice(); + const insertAt = Math.min(originalIndexInState, nextView.length); + nextView.splice(insertAt, 0, ingestedItem); + this.state.partialNext({ items: nextView }); + } else { + /** + * Select a correct interval from which the state.items array is derived + */ + this.state.partialNext({ + items: this.intervalToItems( + this._activeIntervalId === removedItemCoordinates?.interval?.interval.id && + this._activeIntervalId !== targetInterval.id + ? removedItemCoordinates.interval.interval + : targetInterval, + ), + }); + } } return true; @@ -1176,36 +1668,80 @@ export abstract class BasePaginator { // Remove / contains // --------------------------------------------------------------------------- - removeItem({ id, item: inputItem }: { id?: string; item?: T }): boolean { - if (!id && !inputItem) return false; - const item = inputItem ?? this.getItem(id); - // not in item index, and no item provided (cannot locate by item), so we will not check intervals, - // only state items and sequentially - if (!this._itemIndex || !item) { - const index = this.items?.findIndex((i) => this.getItemId(i) === id) ?? -1; - if (index === -1) return false; + protected removeItemAtCoordinates(coords: ItemCoordinates): ItemCoordinates { + const { state: stateLocation, interval: intervalLocation } = coords; + + const result: ItemCoordinates = { + state: { currentIndex: -1, insertionIndex: -1 }, + }; + + // 1) Remove from interval, if present + if (intervalLocation && intervalLocation.currentIndex > -1) { + const updatedInterval = this.removeItemIdFromInterval(intervalLocation); + const { interval } = updatedInterval; + if (interval.itemIds.length === 0) { + // Drop empty interval + this._itemIntervals.delete(interval.id); + + // If it was active -> clear active + if (this.isActiveInterval(interval)) { + this.setActiveInterval(undefined); + } + } else { + this._itemIntervals.set(updatedInterval.interval.id, updatedInterval.interval); + } + result.interval = updatedInterval; + } + + // 2) Remove from visible state.items, if present + if (stateLocation && stateLocation.currentIndex > -1) { const newItems = [...(this.items ?? [])]; - newItems.splice(index, 1); + newItems.splice(stateLocation.currentIndex, 1); this.state.partialNext({ items: newItems }); - return true; + + // keep insertionIndex consistent if someone uses it later + if (stateLocation.insertionIndex > stateLocation.currentIndex) { + stateLocation.insertionIndex--; + } + + result.state = stateLocation; } - const { state: stateLocation, interval: intervalLocation } = this.locateByItem(item); + return result; + } + + /** + * Meaning of location values + * - currentIndex === -1 could not be found + * - insertionIndex === -1 insertion index was no intended to be determined + * + * If we are removing the last item from the currently active interval, we do not search for a new active interval. + * If the number of items approach 0 in an active interval, we expect from the UI to load new pages to populate + * the active interval. + */ + removeItem({ id, item: inputItem }: { id?: string; item?: T }): ItemCoordinates { + const noAction = { state: { currentIndex: -1, insertionIndex: -1 } }; + if (!id && !inputItem) return noAction; - if (intervalLocation && intervalLocation.current > -1) { - const itemIds = [...intervalLocation.interval.itemIds]; - itemIds.splice(intervalLocation.current, 1); - const newInterval: AnyInterval = { ...intervalLocation.interval, itemIds }; - const boundaries = this.recomputeIntervalBoundaries(newInterval); - this._itemIntervals.set(newInterval.id, { ...newInterval, ...boundaries }); + const item = inputItem ?? this.getItem(id); + + if (item) { + const coords = this.locateByItem(item); + if (!coords.state && !coords.interval) return noAction; + return this.removeItemAtCoordinates(coords); } - if (stateLocation && stateLocation.current > -1) { + // Fallback for state-only mode (sequential scan in state.items) + if (!this.usesItemIntervalStorage) { + const index = this.items?.findIndex((i) => this.getItemId(i) === id) ?? -1; + if (index === -1) return noAction; const newItems = [...(this.items ?? [])]; - newItems.splice(stateLocation.current, 1); + newItems.splice(index, 1); this.state.partialNext({ items: newItems }); + return { state: { currentIndex: index, insertionIndex: -1 } }; } - return true; + + return noAction; } /** Sets the items in the state. If intervals are kept, the active interval will be updated */ @@ -1234,12 +1770,18 @@ export abstract class BasePaginator { newState.offset = newItems.length; } - const interval = this.ingestPage({ - page: newItems, - isHead: isFirstPage, - isTail: isLastPage, - }); - if (interval) this._activeIntervalId = interval.id; + if (this.usesItemIntervalStorage) { + const interval = this.ingestPage({ + page: newItems, + isHead: isFirstPage, + isTail: isLastPage, + }); + if (interval) { + this.setActiveInterval(interval, { updateState: false }); + newState.hasMoreHead = interval.hasMoreHead; + newState.hasMoreTail = interval.hasMoreTail; + } + } return newState; }); @@ -1266,11 +1808,13 @@ export abstract class BasePaginator { protected canExecuteQuery = ({ direction, reset, - }: { direction: PaginationDirection } & Pick, 'reset'>) => + }: { direction?: PaginationDirection } & Pick, 'reset'>) => !this.isLoading && (reset === 'yes' || - (direction === 'next' && this.hasNext) || - (direction === 'prev' && this.hasPrev)); + // If direction is undefined, we are jumping to a specific message. + typeof direction === 'undefined' || + (direction === 'tailward' && this.hasMoreTail) || + (direction === 'headward' && this.hasMoreHead)); isFirstPageQuery = ( params: { queryShape?: unknown } & Pick, 'reset'>, @@ -1289,8 +1833,14 @@ export abstract class BasePaginator { }; } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + isJumpQueryShape(queryShape: Q): boolean { + return false; + } + protected getStateAfterQuery( stateUpdate: Partial>, + // eslint-disable-next-line @typescript-eslint/no-unused-vars isFirstPage: boolean, ): PaginatorState { const current = this.state.getLatestValue(); @@ -1299,9 +1849,7 @@ export abstract class BasePaginator { lastQueryError: undefined, ...stateUpdate, isLoading: false, - items: isFirstPage - ? stateUpdate.items - : [...(this.items ?? []), ...(stateUpdate.items || [])], + items: stateUpdate.items, }; } @@ -1345,12 +1893,21 @@ export abstract class BasePaginator { } } + /** + * Falsy return value means query was not successful. + * @param direction + * @param forcedQueryShape + * @param reset + * @param retryCount + * @param updateState + */ async executeQuery({ - direction = 'next', + direction, queryShape: forcedQueryShape, reset, retryCount = 0, - }: PaginationQueryParams = {}) { + updateState = true, + }: PaginationQueryParams = {}): Promise | void> { const queryShape = forcedQueryShape ?? this.getNextQueryShape({ direction }); if (!this.canExecuteQuery({ direction, reset })) return; @@ -1379,50 +1936,141 @@ export abstract class BasePaginator { reset, retryCount, }); - this._lastQueryShape = this._nextQueryShape; + + return await this.postQueryReconcile({ + direction, + isFirstPage, + queryShape, + requestedPageSize: this.pageSize, + results, + updateState, + }); + } + + async postQueryReconcile({ + direction, + isFirstPage, + queryShape, + requestedPageSize, + results, + updateState = true, + }: PostQueryReconcileParams): Promise> { + this._lastQueryShape = queryShape; this._nextQueryShape = undefined; + const stateUpdate: Partial> = { + isLoading: false, + }; + if (!results) { - this.state.partialNext({ isLoading: false }); - return; + this.state.partialNext(stateUpdate); + return { stateCandidate: stateUpdate, targetInterval: null }; } - const stateUpdate: Partial> = { - lastQueryError: undefined, - }; + const { items, headward, tailward } = results; + + stateUpdate.lastQueryError = undefined; + const filteredItems = await this.filterQueryResults(items); + stateUpdate.items = filteredItems; + + // State-only mode: merge pages into a single list. + if (!this.usesItemIntervalStorage) { + const currentItems = this.items ?? []; + if (!isFirstPage) { + // In state-only mode we treat pagination as a growing list. + // Both directions extend the same list (cursor semantics are expressed by the cursor, not by list "side"). + stateUpdate.items = [...currentItems, ...filteredItems]; + } + } - const { items, next, prev } = results; - if (isFirstPage && (next || prev)) { - this._isCursorPagination = true; + const isJumpQuery = !!queryShape && this.isJumpQueryShape(queryShape); + const interval = this.usesItemIntervalStorage + ? this.ingestPage({ + page: stateUpdate.items, + policy: isJumpQuery ? 'strict-overlap-only' : 'auto', + // the first page should be always marked as head + isHead: isJumpQuery + ? undefined //head/tail doesn't apply / is unknown for this ingestion + : isFirstPage || + (direction === 'headward' ? requestedPageSize > items.length : undefined), + // even though the page is first, we have to compare the requested vs returned page size + isTail: isJumpQuery + ? undefined //head/tail doesn't apply / is unknown for this ingestion + : isFirstPage || direction === 'tailward' + ? requestedPageSize > items.length + : undefined, + targetIntervalId: isJumpQuery ? undefined : this._activeIntervalId, + }) + : null; + if (interval && updateState) { + this.setActiveInterval(interval, { updateState: false }); + stateUpdate.items = this.intervalToItems(interval); } - if (this._isCursorPagination) { - stateUpdate.cursor = { next: next || null, prev: prev || null }; - stateUpdate.hasNext = !!next; - stateUpdate.hasPrev = !!prev; + /** + * Cursor can be calculated client-side or returned from the server. + * Therefore, the BasePaginator.cursorSource can be 'derived' | 'query' + * - derived - the BasePaginator applies the default client-side logic based on the pagination options (id_lt, id_gt, id_around...) + * - query - BasePaginator.query() resp. BasePaginator.config.doRequest (called inside query()) is expected to provide the cursor and abide by the rules that when the wall is hit in + * a given direction, the cursor will be set to null. + * + * The 'derived' calculation will perform the following steps: + * 1. After ingesting into the parent interval determine the cursor candidate values from the first and the last item in the interval. + * 2. Decide, whether the candidates can be set based on the requested vs real page size + * 3. If the page size from the response is smaller that the requested page size, then in the given direction + * the cursor will be set to null. + */ + if (this.isCursorPagination) { + if (this.config.deriveCursor && interval) { + const { cursor, hasMoreTail, hasMoreHead } = this.config.deriveCursor({ + direction, + interval, + queryShape, + page: results.items, + requestedPageSize, + cursor: this.cursor, + hasMoreHead: this.hasMoreHead, + hasMoreTail: this.hasMoreTail, + }); + stateUpdate.cursor = cursor; + stateUpdate.hasMoreTail = hasMoreTail; + stateUpdate.hasMoreHead = hasMoreHead; + } else { + stateUpdate.cursor = { tailward: tailward || null, headward: headward || null }; + stateUpdate.hasMoreTail = !!tailward; + stateUpdate.hasMoreHead = !!headward; + } } else { + // todo: we could keep the offset in two directions (initial tailward offset would be taken from config.initialOffset) stateUpdate.offset = (this.offset ?? 0) + items.length; - stateUpdate.hasNext = items.length === this.pageSize; + stateUpdate.hasMoreTail = items.length === this.pageSize; } - stateUpdate.items = await this.filterQueryResults(items); - - // ingest page into intervals if itemIndex is present - const interval = this.ingestPage({ - page: stateUpdate.items, - isHead: !stateUpdate.hasNext, - isTail: !stateUpdate.hasPrev, - targetIntervalId: this._activeIntervalId, - }); - // item index is available if an Interval is returned if (interval) { - this._activeIntervalId = interval.id; - stateUpdate.items = this.intervalToItems(interval); + const current = this.state.getLatestValue(); + const resolvedHasMoreHead = + typeof stateUpdate.hasMoreHead === 'boolean' + ? stateUpdate.hasMoreHead + : current.hasMoreHead; + const resolvedHasMoreTail = + typeof stateUpdate.hasMoreTail === 'boolean' + ? stateUpdate.hasMoreTail + : current.hasMoreTail; + + interval.hasMoreHead = resolvedHasMoreHead; + interval.hasMoreTail = resolvedHasMoreTail; + interval.isHead = resolvedHasMoreHead === false; + interval.isTail = resolvedHasMoreTail === false; } const state = this.getStateAfterQuery(stateUpdate, isFirstPage); - this.state.next(state); + if (updateState) this.state.next(state); this.populateOfflineDbAfterQuery({ items: state.items, queryShape }); + + return { + stateCandidate: state, + targetInterval: interval, + }; } // --------------------------------------------------------------------------- @@ -1435,27 +2083,29 @@ export abstract class BasePaginator { resetState() { this.state.next(this.initialState); + this.setIntervals([]); + this.setActiveInterval(undefined); } - next = (params: Omit, 'direction' | 'queryShape'> = {}) => - this.executeQuery({ direction: 'next', ...params }); + toTail = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.executeQuery({ direction: 'tailward', ...params }); - prev = (params: Omit, 'direction' | 'queryShape'> = {}) => - this.executeQuery({ direction: 'prev', ...params }); + toHead = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.executeQuery({ direction: 'headward', ...params }); - nextDebounced = ( + toTailDebounced = ( params: Omit, 'direction' | 'queryShape'> = {}, ) => { - this._executeQueryDebounced({ direction: 'next', ...params }); + this._executeQueryDebounced({ direction: 'tailward', ...params }); }; - prevDebounced = ( + toHeadDebounced = ( params: Omit, 'direction' | 'queryShape'> = {}, ) => { - this._executeQueryDebounced({ direction: 'prev', ...params }); + this._executeQueryDebounced({ direction: 'headward', ...params }); }; reload = async () => { - await this.next({ reset: 'yes' }); + await this.toTail({ reset: 'yes' }); }; } diff --git a/src/pagination/paginators/ChannelPaginator.ts b/src/pagination/paginators/ChannelPaginator.ts index 9d9ca3ea71..49f0a32f17 100644 --- a/src/pagination/paginators/ChannelPaginator.ts +++ b/src/pagination/paginators/ChannelPaginator.ts @@ -5,7 +5,6 @@ import type { PaginatorOptions, PaginatorState, SetPaginatorItemsParams, - SortKey, } from './BasePaginator'; import { BasePaginator } from './BasePaginator'; import type { FilterBuilderOptions } from '../FilterBuilder'; @@ -285,14 +284,6 @@ export class ChannelPaginator extends BasePaginator baseFilters: { ...this.staticFilters }, }); - computeSortKey(item: Channel): SortKey { - const generateSortKey = super.makeSortKeyGenerator({ - sort: this.sort, - resolvePathValue: channelSortPathResolver, - }); - return generateSortKey(item); - } - // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; protected getNextQueryShape(): ChannelQueryShape { const shape: ChannelQueryShape = { diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts new file mode 100644 index 0000000000..ea46d3f1f7 --- /dev/null +++ b/src/pagination/paginators/MessagePaginator.ts @@ -0,0 +1,520 @@ +import type { + AnyInterval, + CursorDerivator, + CursorDeriveResult, + ExecuteQueryReturnValue, + Interval, + PaginationDirection, + PaginationQueryParams, + PaginatorCursor, + PaginatorState, + PostQueryReconcileParams, +} from './BasePaginator'; +import { + BasePaginator, + isLogicalInterval, + type PaginationQueryReturnValue, + type PaginationQueryShapeChangeIdentifier, + type PaginatorOptions, + ZERO_PAGE_CURSOR, +} from './BasePaginator'; +import type { + AscDesc, + LocalMessage, + MessagePaginationOptions, + PinnedMessagePaginationOptions, +} from '../../types'; +import type { Channel } from '../../channel'; +import { StateStore } from '../../store'; +import { formatMessage, generateUUIDv4 } from '../../utils'; +import { makeComparator } from '../sortCompiler'; +import type { FieldToDataResolver } from '../types.normalization'; +import { resolveDotPathValue } from '../utility.normalization'; +import { ItemIndex } from '../ItemIndex'; +import { deriveCreatedAtAroundPaginationFlags } from '../cursorDerivation'; +import { deriveIdAroundPaginationFlags } from '../cursorDerivation/idAroundPaginationFlags'; +import { deriveLinearPaginationFlags } from '../cursorDerivation/linearPaginationFlags'; + +export type JumpToMessageOptions = { pageSize?: number }; + +export type MessagePaginatorSort = { created_at: AscDesc } | { created_at: AscDesc }[]; + +export type MessagePaginatorFilter = { + cid: string; +}; + +const DEFAULT_BACKEND_SORT: MessagePaginatorSort = { + created_at: 1, +}; + +// server's default size is 100 +const DEFAULT_CHANNEL_MESSAGE_LIST_PAGE_SIZE = 100; + +export type MessagePaginatorState = PaginatorState; +export type MessageQueryShape = MessagePaginationOptions | PinnedMessagePaginationOptions; + +/** + * At the moment all the pagination parameters are just different types of cursors, e.g. + * id_lt, id_gt, ... + * But we always paginate within the same list without changing the sorting params. + * It is currently not possible to change the sorting params. + */ +const hasPaginationQueryShapeChanged: PaginationQueryShapeChangeIdentifier< + MessageQueryShape +> = () => false; + +const dataFieldFilterResolver: FieldToDataResolver = { + matchesField: () => true, + resolve: (message, path) => resolveDotPathValue(message, path), +}; + +export type MessagePaginatorOptions = { + channel: Channel; + id?: string; + itemIndex?: ItemIndex; + paginatorOptions?: PaginatorOptions; + /** + * Controls whether `jumpToTheFirstUnreadMessage()` should prefer the `unreadStateSnapshot` + * state over `channel.state.read[...]`. + * + * - 'snapshot' (default): retrieve the first unread message id from the unreadStateSnapshot state when jumping to the first unread message + * - 'read-state-only': retrieve the last read message id from the channel read state when jumping to the first unread message + */ + unreadReferencePolicy?: 'snapshot' | 'read-state-only'; +}; + +export type UnreadSnapshotState = { + lastReadAt: Date | null; + unreadCount: number; + /** + * Snapshot of the first unread message id for the user. + * This is intentionally decoupled from `channel.state.read[...]` because apps + * may mark the channel read immediately on open, while still wanting to render + * UI indicators that jump to the previously-unread location. + */ + firstUnreadMessageId: string | null; + /** + * Snapshot of the last read message id for the user (fallback when first unread + * is not known). + */ + lastReadMessageId: string | null; +}; + +/** + * MessagePaginator does not allow for sorting or filtering the items, because it is based on channe.query() and + * not client.search() calls. So the paginator just updates the cursor. + */ +export class MessagePaginator extends BasePaginator { + private readonly _id: string; + private channel: Channel; + private unreadReferencePolicy: 'snapshot' | 'read-state-only'; + /** + * Independent unread reference state (not tied to `channel.state.read`). + * Consumers may set this right before calling markRead / when opening a channel. + */ + readonly unreadStateSnapshot: StateStore; + protected _sort = DEFAULT_BACKEND_SORT; + protected _nextQueryShape: MessageQueryShape | undefined; + sortComparator: (a: LocalMessage, b: LocalMessage) => number; + /** + * Single source of truth for whether a message should be included in paginator intervals/state. + * Keep this consistent with `filterQueryResults` AND cursor flag derivation. + */ + shouldIncludeMessageInInterval(message: LocalMessage): boolean { + return !message.shadowed; + } + + protected get intervalItemIdsAreHeadFirst(): boolean { + // Messages are stored in chronological order (created_at asc) within an interval. + // Pagination "head" (newest side) is therefore at the END of the `itemIds` array. + return false; + } + + protected get intervalSortDirection(): 'asc' | 'desc' { + // Head edge is newest, but sortComparator is created_at asc => newer head edges + // should come first => reverse interval ordering. + return 'desc'; + } + + constructor({ + channel, + id, + itemIndex = new ItemIndex({ getId: (item) => item.id }), + paginatorOptions, + unreadReferencePolicy = 'snapshot', + }: MessagePaginatorOptions) { + super({ + hasPaginationQueryShapeChanged, + initialCursor: ZERO_PAGE_CURSOR, + itemIndex, + ...paginatorOptions, + pageSize: paginatorOptions?.pageSize ?? DEFAULT_CHANNEL_MESSAGE_LIST_PAGE_SIZE, + }); + this.config.deriveCursor = makeDeriveCursor(this); + this.channel = channel; + this._id = id ?? `message-paginator-${generateUUIDv4()}`; + this._sort = DEFAULT_BACKEND_SORT; + this.unreadReferencePolicy = unreadReferencePolicy; + this.unreadStateSnapshot = new StateStore({ + lastReadAt: null, + firstUnreadMessageId: null, + lastReadMessageId: null, + unreadCount: 0, + }); + this.sortComparator = makeComparator({ + sort: this._sort, + resolvePathValue: resolveDotPathValue, + tiebreaker: (l, r) => { + const leftId = this.getItemId(l); + const rightId = this.getItemId(r); + return leftId < rightId ? -1 : leftId > rightId ? 1 : 0; + }, + }); + this.setFilterResolvers([dataFieldFilterResolver]); + } + + get id() { + return this._id; + } + + get sort() { + return this._sort ?? DEFAULT_BACKEND_SORT; + } + + /** + * Even though we do not send filters object to the server, we need to have filters for client-side item ingestion logic. + */ + buildFilters = (): MessagePaginatorFilter => ({ + cid: this.channel.cid, + }); + + // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; + protected getNextQueryShape({ + direction, + }: Omit< + PaginationQueryParams, + 'isFirstPageQuery' + >): MessageQueryShape { + return { + limit: this.pageSize, + [direction === 'tailward' ? 'id_lt' : 'id_gt']: + direction && this.cursor?.[direction], + }; + } + + getCursorFromQueryResults = ({ + direction, + items, + }: { + direction?: PaginationDirection; + items: LocalMessage[]; + }) => { + if (!items.length) { + return { + tailward: undefined, + headward: undefined, + }; + } + + const start = items[0]; + const end = items[items.length - 1]; + + // Newer side is the pagination head for messages. Which bound is considered "head" + // is determined by intervalItemIdsAreHeadFirst (see BasePaginator.getIntervalPaginationEdges). + const head = this.intervalItemIdsAreHeadFirst ? start : end; + const tail = this.intervalItemIdsAreHeadFirst ? end : start; + + // if there is no direction, then we are jumping, and we want to set both directions in the cursor + return { + tailward: !direction || direction === 'tailward' ? this.getItemId(tail) : undefined, + headward: !direction || direction === 'headward' ? this.getItemId(head) : undefined, + }; + }; + + query = async ({ + direction, + }: PaginationQueryParams): Promise< + PaginationQueryReturnValue + > => { + // get the params only if they were not generated previously + if (!this._nextQueryShape) { + this._nextQueryShape = this.getNextQueryShape({ direction }); + } + + const options = this._nextQueryShape; + let items: LocalMessage[]; + let tailward: string | undefined; + let headward: string | undefined; + if (this.config.doRequest) { + const result = await this.config.doRequest(options); + items = result?.items ?? []; + // if there is no direction, then we are jumping, and we want to set both directions in the cursor + tailward = + !direction || direction === 'tailward' + ? (result.cursor?.tailward ?? undefined) + : undefined; + headward = + !direction || direction === 'headward' + ? (result.cursor?.headward ?? undefined) + : undefined; + } else { + const { messages } = await this.channel.query({ + messages: options, + // todo: why do we query for watchers? + // watchers: { limit: this.pageSize }, + }); + items = messages.map(formatMessage); + const cursor = this.getCursorFromQueryResults({ direction, items }); + tailward = cursor.tailward; + headward = cursor.headward; + } + + return { items, headward, tailward }; + }; + + /** + * Invokes the super.postQueryReconcile() and takes unread state snapshot on the first page query. + * The snapshot has to be taken immediately after the query as the viewed channel is marked read immediately after opening it. + * The snapshot can be used to display unread UI indicators. + */ + async postQueryReconcile( + params: PostQueryReconcileParams, + ): Promise> { + const result = await super.postQueryReconcile(params); + + // Take unread state snapshot + const ownUserId = this.channel.getClient().user?.id; + const ownReadState = ownUserId ? this.channel.state.read[ownUserId] : undefined; + if (ownReadState && params.isFirstPage) { + this.setUnreadSnapshot({ + firstUnreadMessageId: null, + lastReadAt: ownReadState.last_read, + lastReadMessageId: ownReadState.last_read_message_id, + unreadCount: ownReadState.unread_messages, + }); + } + return result; + } + + isJumpQueryShape(queryShape: MessageQueryShape): boolean { + return ( + !!queryShape?.id_around || + !!(queryShape as MessagePaginationOptions)?.created_at_around + ); + } + + jumpToMessage = async ( + messageId: string, + { pageSize }: JumpToMessageOptions = {}, + ): Promise => { + let localMessage = this.getItem(messageId); + let interval: AnyInterval | undefined; + let state: Partial> | undefined; + if (localMessage) { + interval = this.locateIntervalForItem(localMessage); + } + + if (localMessage && interval && !isLogicalInterval(interval)) { + state = { + hasMoreHead: interval.hasMoreHead, + hasMoreTail: interval.hasMoreTail, + cursor: this.getCursorFromInterval(interval), + items: this.intervalToItems(interval), + }; + } else if (!localMessage || !interval || isLogicalInterval(interval)) { + const result = await this.executeQuery({ + queryShape: { id_around: messageId, limit: pageSize }, + updateState: false, + }); + localMessage = this.getItem(messageId); + if (!localMessage || !result || !result.targetInterval) { + this.channel.getClient().notifications.addError({ + message: 'Jump to message unsuccessful', + origin: { emitter: 'MessagePaginator.jumpToMessage', context: { messageId } }, + options: { type: 'api:messages:query:failed' }, + }); + return false; + } + interval = result.targetInterval; + state = isLogicalInterval(interval) + ? result.stateCandidate + : { + ...result.stateCandidate, + hasMoreHead: interval.hasMoreHead, + hasMoreTail: interval.hasMoreTail, + // Prefer the cursor derived during postQueryReconcile, but fall back to + // interval-derived cursor to keep jumps consistent if the stateCandidate + // is partial. + cursor: result.stateCandidate.cursor ?? this.getCursorFromInterval(interval), + items: this.intervalToItems(interval), + }; + } + + if (!this.isActiveInterval(interval)) { + this.setActiveInterval(interval, { updateState: false }); + if (state) this.state.partialNext(state); + } + return true; + }; + + jumpToTheLatestMessage = async (options?: JumpToMessageOptions): Promise => { + let latestMessageId: string | undefined; + const intervals = this.itemIntervals; + if (!(intervals[0] as Interval)?.isHead) { + // get the first page (in case the pagination has not started at the head) + await this.executeQuery({ direction: 'headward', updateState: false }); + } + + const headInterval = intervals[0]; + if ((intervals[0] as Interval)?.isHead) { + latestMessageId = headInterval.itemIds.slice(-1)[0]; + } + + if (!latestMessageId) { + this.channel.getClient().notifications.addError({ + message: 'Jump to latest message unsuccessful', + origin: { emitter: 'MessagePaginator.jumpToTheLatestMessage' }, + options: { type: 'api:message:query:failed' }, + }); + return false; + } + + return await this.jumpToMessage(latestMessageId, options); + }; + + /** + * Jumps to the unread reference message. + * + * IMPORTANT: This intentionally does *not* rely on `channel.state.read[ownUserId]` only, + * because apps may mark a channel read immediately after opening it, while still + * wanting to keep "jump to unread" UI indicators alive (based on a snapshot). + */ + jumpToTheFirstUnreadMessage = async (options?: JumpToMessageOptions) => { + const ownUserId = this.channel.getClient().user?.id; + if (!ownUserId) return false; + + const unreadSnapshot = + this.unreadReferencePolicy === 'snapshot' + ? this.unreadStateSnapshot.getLatestValue() + : { firstUnreadMessageId: null, lastReadMessageId: null }; + const firstUnreadFromSnapshot = unreadSnapshot.firstUnreadMessageId; + const lastReadFromSnapshot = unreadSnapshot.lastReadMessageId; + + const firstUnreadFromReadState = + this.channel.state.read[ownUserId]?.first_unread_message_id ?? null; + const lastReadFromReadState = + this.channel.state.read[ownUserId]?.last_read_message_id ?? null; + + const firstUnreadMessageId = firstUnreadFromSnapshot ?? firstUnreadFromReadState; + if (firstUnreadMessageId) { + return await this.jumpToMessage(firstUnreadMessageId, options); + } + + const lastReadMessageId = lastReadFromSnapshot ?? lastReadFromReadState; + if (!lastReadMessageId) return false; + return await this.jumpToMessage(lastReadMessageId, options); + }; + + setUnreadSnapshot = (next: Partial): UnreadSnapshotState => { + this.unreadStateSnapshot.partialNext(next); + return this.unreadStateSnapshot.getLatestValue(); + }; + + clearUnreadSnapshot = () => { + this.unreadStateSnapshot.next({ + firstUnreadMessageId: null, + lastReadMessageId: null, + lastReadAt: null, + unreadCount: 0, + }); + }; + + filterQueryResults = (items: LocalMessage[]) => + items.filter(this.shouldIncludeMessageInInterval.bind(this)); +} + +const makeDeriveCursor = + (paginator: MessagePaginator): CursorDerivator => + (ctx) => { + // Not included in the interval (filtered out by MessagePaginator.filterQueryResults). + // + // IMPORTANT: We must keep cursor derivation consistent with the ingested interval. + // The interval is built from the filtered page, but ctx.page contains the raw response. + // Around/linear derivators compare page edges and lengths against interval.itemIds. If we + // pass a page that includes locally filtered messages (e.g. shadowed), those comparisons + // can incorrectly conclude that the page is not at the dataset bounds. + const pageWithPermittedMessages: LocalMessage[] = []; + let filteredLocallyCount = 0; + for (const message of ctx.page) { + if (!paginator.shouldIncludeMessageInInterval(message)) { + filteredLocallyCount++; + } else { + pageWithPermittedMessages.push(message); + } + } + + const requestedPageSizeAfterAdjustment = Math.max( + 0, + ctx.requestedPageSize - filteredLocallyCount, + ); + + if ( + ctx.interval && + ctx.interval.itemIds.length + filteredLocallyCount < ctx.page.length + ) { + console.error( + 'error', + 'Corrupted message set state: parent set size < returned page size', + ); + return { + cursor: ctx.cursor, + hasMoreHead: ctx.hasMoreHead, + hasMoreTail: ctx.hasMoreTail, + }; + } + + const injectCursor = ({ + hasMoreHead, + hasMoreTail, + }: { + hasMoreHead: boolean; + hasMoreTail: boolean; + }): CursorDeriveResult => { + const cursor: PaginatorCursor = { + headward: !hasMoreHead ? null : (ctx.interval?.itemIds.slice(-1)[0] ?? null), + tailward: !hasMoreTail ? null : (ctx.interval?.itemIds[0] ?? null), + }; + return { cursor, hasMoreHead, hasMoreTail }; + }; + + if ((ctx.queryShape as MessagePaginationOptions)?.created_at_around) { + return injectCursor( + deriveCreatedAtAroundPaginationFlags< + LocalMessage, + MessagePaginationOptions, + MessagePaginator + >({ + ...ctx, + paginator, + page: pageWithPermittedMessages, + requestedPageSize: requestedPageSizeAfterAdjustment, + }), + ); + } else if (ctx.queryShape?.id_around) { + return injectCursor( + deriveIdAroundPaginationFlags({ + ...ctx, + page: pageWithPermittedMessages, + requestedPageSize: requestedPageSizeAfterAdjustment, + }), + ); + } else { + return injectCursor( + deriveLinearPaginationFlags({ + ...ctx, + page: pageWithPermittedMessages, + requestedPageSize: requestedPageSizeAfterAdjustment, + }), + ); + } + }; diff --git a/src/pagination/paginators/MessageReplyPaginator.ts b/src/pagination/paginators/MessageReplyPaginator.ts new file mode 100644 index 0000000000..415be87d53 --- /dev/null +++ b/src/pagination/paginators/MessageReplyPaginator.ts @@ -0,0 +1,301 @@ +import type { + AnyInterval, + Interval, + PaginationQueryParams, + PaginatorState, +} from './BasePaginator'; +import { isLogicalInterval, ZERO_PAGE_CURSOR } from './BasePaginator'; +import { + BasePaginator, + type PaginationQueryReturnValue, + type PaginationQueryShapeChangeIdentifier, + type PaginatorOptions, +} from './BasePaginator'; +import type { + LocalMessage, + MessagePaginationOptions, + PinnedMessagePaginationOptions, +} from '../../types'; +import type { Channel } from '../../channel'; +import { formatMessage, generateUUIDv4 } from '../../utils'; +import { makeComparator } from '../sortCompiler'; +import { isEqual } from '../../utils/mergeWith/mergeWithCore'; +import type { FieldToDataResolver } from '../types.normalization'; +import { resolveDotPathValue } from '../utility.normalization'; +import type { + JumpToMessageOptions, + MessagePaginatorOptions, + MessagePaginatorSort, +} from './MessagePaginator'; +import { ItemIndex } from '../ItemIndex'; + +export type MessageReplyPaginatorFilter = { + cid: string; + parent_id: string; +}; + +const DEFAULT_PAGE_SIZE = 50; + +const DEFAULT_BACKEND_SORT: MessagePaginatorSort = { + created_at: 1, +}; + +export type MessageReplyQueryShape = { + options: MessagePaginationOptions | PinnedMessagePaginationOptions; + sort: MessagePaginatorSort; +}; + +const getQueryShapeRelevantMessageOptions = ( + options: MessagePaginationOptions, +): Omit => { + const { + /* eslint-disable-next-line @typescript-eslint/no-unused-vars */ + limit: _, + ...relevantOptions + } = options; + return relevantOptions; +}; + +const hasPaginationQueryShapeChanged: PaginationQueryShapeChangeIdentifier< + MessageReplyQueryShape +> = (prevQueryShape, nextQueryShape) => + !isEqual( + { + ...prevQueryShape, + options: getQueryShapeRelevantMessageOptions(prevQueryShape?.options ?? {}), + }, + { + ...nextQueryShape, + options: getQueryShapeRelevantMessageOptions(nextQueryShape?.options ?? {}), + }, + ); + +const dataFieldFilterResolver: FieldToDataResolver = { + matchesField: () => true, + resolve: (message, path) => resolveDotPathValue(message, path), +}; + +export type MessageReplyPaginatorOptions = Omit< + MessagePaginatorOptions, + 'paginatorOptions' +> & { + parentMessageId: string; + paginatorOptions?: PaginatorOptions; +}; + +export class MessageReplyPaginator extends BasePaginator< + LocalMessage, + MessageReplyQueryShape +> { + private readonly _id: string; + private channel: Channel; + protected _parentMessageId: string; + protected _sort = DEFAULT_BACKEND_SORT; + protected _nextQueryShape: MessageReplyQueryShape | undefined; + sortComparator: (a: LocalMessage, b: LocalMessage) => number; + + protected get intervalItemIdsAreHeadFirst(): boolean { + // Replies are stored in chronological order (created_at asc) within an interval. + // Pagination "head" (newest side) is therefore at the END of the `itemIds` array. + return false; + } + + protected get intervalSortDirection(): 'asc' | 'desc' { + // Head edge is newest, but sortComparator is created_at asc => newer head edges + // should come first => reverse interval ordering. + return 'desc'; + } + + constructor({ + channel, + id, + itemIndex = new ItemIndex({ getId: (item) => item.id }), + paginatorOptions, + parentMessageId, + }: MessageReplyPaginatorOptions) { + super({ + hasPaginationQueryShapeChanged, + initialCursor: ZERO_PAGE_CURSOR, + itemIndex, + ...paginatorOptions, + pageSize: paginatorOptions?.pageSize ?? DEFAULT_PAGE_SIZE, + }); + const definedSort = DEFAULT_BACKEND_SORT; + this.channel = channel; + this._parentMessageId = parentMessageId; + this._id = id ?? `message-reply-paginator-${generateUUIDv4()}`; + this._sort = definedSort; + this.sortComparator = makeComparator({ + sort: this._sort, + resolvePathValue: resolveDotPathValue, + tiebreaker: (l, r) => { + const leftId = this.getItemId(l); + const rightId = this.getItemId(r); + return leftId < rightId ? -1 : leftId > rightId ? 1 : 0; + }, + }); + this.setFilterResolvers([dataFieldFilterResolver]); + } + + get id() { + return this._id; + } + + get sort() { + return this._sort ?? DEFAULT_BACKEND_SORT; + } + + /** + * Even though we do not send filters object to the server, we need to have filters for client-side item ingestion logic. + */ + buildFilters = (): MessageReplyPaginatorFilter => ({ + cid: this.channel.cid, + parent_id: this._parentMessageId, + }); + + // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; + protected getNextQueryShape({ + direction, + }: PaginationQueryParams): MessageReplyQueryShape { + return { + options: { + limit: this.pageSize, + [direction === 'tailward' ? 'id_lt' : 'id_gt']: + direction && this.cursor?.[direction], + }, + sort: this._sort, + }; + } + + query = async ({ + direction, + queryShape, + }: PaginationQueryParams): Promise< + PaginationQueryReturnValue + > => { + if (!queryShape) { + queryShape = this.getNextQueryShape({ direction }); + } + const { sort, options } = queryShape; + let items: LocalMessage[]; + let tailward: string | undefined; + let headward: string | undefined; + if (this.config.doRequest) { + const result = await this.config.doRequest({ + options, + sort: Array.isArray(sort) ? sort : [sort], + }); + items = result?.items ?? []; + // if there is no direction, then we are jumping, and we want to set both directions in the cursor + tailward = + !direction || direction === 'tailward' + ? (result.cursor?.tailward ?? undefined) + : undefined; + headward = + !direction || direction === 'headward' + ? (result.cursor?.headward ?? undefined) + : undefined; + } else { + const { messages } = await this.channel.getReplies( + this._parentMessageId, + options, + Array.isArray(sort) ? sort : [sort], + ); + items = messages.map(formatMessage); + // if there is no direction, then we are jumping, and we want to set both directions in the cursor + tailward = !direction || direction === 'tailward' ? messages[0].id : undefined; + headward = + !direction || direction === 'headward' ? messages.slice(-1)[0].id : undefined; + } + + return { items, headward, tailward }; + }; + + isJumpQueryShape(queryShape: MessageReplyQueryShape): boolean { + return ( + !!queryShape?.options?.id_around || + !!(queryShape.options as MessagePaginationOptions)?.created_at_around + ); + } + + /** + * Jump to a message inside thread replies. + * + * Mirrors `MessagePaginator.jumpToMessage` behavior: + * - If the message is already present in the item index and belongs to an existing interval, + * it activates that interval without querying. + * - Otherwise, performs an `id_around` query and ensures the item is present. + */ + jumpToMessage = async ( + messageId: string, + { pageSize }: JumpToMessageOptions = {}, + ): Promise => { + let localMessage = this.getItem(messageId); + let interval: AnyInterval | undefined; + let state: Partial> | undefined; + + if (localMessage) { + interval = this.locateIntervalForItem(localMessage); + } + + if (!localMessage || !interval || isLogicalInterval(interval)) { + const result = await this.executeQuery({ + queryShape: { + options: { id_around: messageId, limit: pageSize }, + sort: this.sort, + }, + updateState: false, + }); + + localMessage = this.getItem(messageId); + if (!localMessage || !result || !result.targetInterval) { + this.channel.getClient().notifications.addError({ + message: 'Jump to message unsuccessful', + origin: { + emitter: 'MessageReplyPaginator.jumpToMessage', + context: { messageId, parentMessageId: this._parentMessageId }, + }, + options: { type: 'api:replies:query:failed' }, + }); + return false; + } + interval = result.targetInterval; + state = result.stateCandidate; + } + + if (!this.isActiveInterval(interval)) { + this.setActiveInterval(interval); + if (state) this.state.partialNext(state); + } + + return true; + }; + + jumpToTheLatestMessage = async (options?: JumpToMessageOptions): Promise => { + let latestMessageId: string | undefined; + const intervals = this.itemIntervals; + + if (!(intervals[0] as Interval)?.isHead) { + // get the first page (in case the pagination has not started at the head) + await this.executeQuery({ updateState: false }); + } + + const headInterval = intervals[0]; + if ((intervals[0] as Interval)?.isHead) { + latestMessageId = headInterval.itemIds.slice(-1)[0]; + } + + if (!latestMessageId) { + this.channel.getClient().notifications.addError({ + message: 'Jump to latest message unsuccessful', + origin: { emitter: 'MessageReplyPaginator.jumpToTheLatestMessage' }, + options: { type: 'api:message:replies:query:failed' }, + }); + return false; + } + + return await this.jumpToMessage(latestMessageId, options); + }; + + filterQueryResults = (items: LocalMessage[]) => items; +} diff --git a/src/pagination/paginators/ReminderPaginator.ts b/src/pagination/paginators/ReminderPaginator.ts index 8cf23b914d..8c50224523 100644 --- a/src/pagination/paginators/ReminderPaginator.ts +++ b/src/pagination/paginators/ReminderPaginator.ts @@ -1,4 +1,4 @@ -import { BasePaginator } from './BasePaginator'; +import { BasePaginator, ZERO_PAGE_CURSOR } from './BasePaginator'; import type { PaginationQueryParams, PaginationQueryReturnValue, @@ -42,7 +42,7 @@ export class ReminderPaginator extends BasePaginator< client: StreamChat, options?: PaginatorOptions, ) { - super(options); + super({ initialCursor: ZERO_PAGE_CURSOR, ...options }); this.client = client; } @@ -66,7 +66,7 @@ export class ReminderPaginator extends BasePaginator< PaginationQueryReturnValue > => { const { reminders: items, next, prev } = await this.client.queryReminders(queryShape); - return { items, next, prev }; + return { items, headward: prev, tailward: next }; }; filterQueryResults = (items: ReminderResponse[]) => items; diff --git a/src/pagination/paginators/index.ts b/src/pagination/paginators/index.ts index 1c5fbb4d44..03cd6bae39 100644 --- a/src/pagination/paginators/index.ts +++ b/src/pagination/paginators/index.ts @@ -1,3 +1,5 @@ export * from './BasePaginator'; export * from './ChannelPaginator'; +export * from './MessagePaginator'; +export * from './MessageReplyPaginator'; export * from './ReminderPaginator'; diff --git a/src/pagination/sortCompiler.ts b/src/pagination/sortCompiler.ts index 15a6c8ccaa..b4b05aaf92 100644 --- a/src/pagination/sortCompiler.ts +++ b/src/pagination/sortCompiler.ts @@ -10,8 +10,8 @@ import type { AscDesc } from '../types'; import type { Comparator, PathResolver } from './types.normalization'; export type ItemLocation = { - expected: number; - current: number; + currentIndex: number; + insertionIndex: number; }; /** @@ -47,7 +47,7 @@ export function binarySearch({ plateauScan?: boolean; }): ItemLocation { // empty array - if (length === 0) return { current: -1, expected: 0 }; + if (length === 0) return { currentIndex: -1, insertionIndex: 0 }; // --- 1) Binary search to find lower bound (insertionIndex) --- let lo = 0; @@ -59,11 +59,10 @@ export function binarySearch({ if (!midItem) { // Corruption: we have an ID but no backing item. // Bail out with "not found". - return { current: -1, expected: -1 }; + return { currentIndex: -1, insertionIndex: -1 }; } - const cmp = compare(midItem, needle); - if (cmp < 0) { + if (compare(midItem, needle) <= 0) { // midItem < needle ⇒ go right lo = mid + 1; } else { @@ -72,14 +71,16 @@ export function binarySearch({ } } - const expected = lo; + const insertionIndex = lo; // item is located where it is expected to be according to the sort - const itemAtExpectedIndex = getItemAt(expected); + const itemAtExpectedIndex = getItemAt(insertionIndex); if (itemAtExpectedIndex && itemIdentityEquals(itemAtExpectedIndex, needle)) { - return { current: expected, expected }; - } else if (!plateauScan) { - return { current: -1, expected }; + return { currentIndex: insertionIndex, insertionIndex }; + } + + if (!plateauScan) { + return { currentIndex: -1, insertionIndex }; } // --- 2) Plateau scan around insertionIndex --- @@ -87,43 +88,33 @@ export function binarySearch({ const checkSide = (atIndex: number) => { const result = { exhausted: false, found: false }; const item = getItemAt(atIndex); - if (!item) { - result.exhausted = true; - } else { - const cmp = compare(item, needle); - if (cmp !== 0) { - result.exhausted = true; - } else { - if (itemIdentityEquals(item, needle)) { - result.found = true; - } - } - } + if (!item) result.exhausted = true; + else if (itemIdentityEquals(item, needle)) result.found = true; return result; }; // Alternating left/right scan - let iLeft = expected - 1; - let iRight = expected + 1; // we've already checked insertionIndex + let iLeft = insertionIndex - 1; + let iRight = insertionIndex + 1; // we've already checked insertionIndex let leftDone = iLeft < 0; let rightDone = iRight >= length; while (!leftDone || !rightDone) { if (!leftDone) { const result = checkSide(iLeft); - if (result.found) return { current: iLeft, expected }; + if (result.found) return { currentIndex: iLeft, insertionIndex }; leftDone = result.exhausted || --iLeft < 0; } if (!rightDone) { const result = checkSide(iRight); - if (result.found) return { current: iRight, expected }; + if (result.found) return { currentIndex: iRight, insertionIndex }; rightDone = result.exhausted || ++iRight >= length; } } // Not found in plateau; insertion index is still the correct lower bound. - return { current: -1, expected }; + return { currentIndex: -1, insertionIndex }; } /** diff --git a/src/reminders/ReminderManager.ts b/src/reminders/ReminderManager.ts index 8c4dac1b5d..95b12fefa5 100644 --- a/src/reminders/ReminderManager.ts +++ b/src/reminders/ReminderManager.ts @@ -287,11 +287,11 @@ export class ReminderManager extends WithSubscriptions { }; queryNextReminders = async () => { - await this.paginator.next(); + await this.paginator.toTail(); }; queryPreviousReminders = async () => { - await this.paginator.prev(); + await this.paginator.toHead(); }; // API calls END // diff --git a/src/thread.ts b/src/thread.ts index bf6f778121..9c0cdcf627 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -15,11 +15,17 @@ import type { ThreadResponse, UserResponse, } from './types'; -import type { Channel } from './channel'; +import type { + Channel, + SendMessageWithStateUpdateParams, + UpdateMessageWithStateUpdateParams, +} from './channel'; import type { StreamChat } from './client'; import type { CustomThreadData } from './custom_types'; import { MessageComposer } from './messageComposer'; +import { MessageOperations } from './messageOperations'; import { WithSubscriptions } from './utils/WithSubscriptions'; +import { MessagePaginator } from './pagination'; type QueryRepliesOptions = { sort?: { created_at: AscDesc }[]; @@ -113,6 +119,8 @@ export class Thread extends WithSubscriptions { public readonly state: StateStore; public readonly id: string; public readonly messageComposer: MessageComposer; + public readonly messagePaginator: MessagePaginator; + public readonly messageOperations: MessageOperations; private client: StreamChat; private failedRepliesMap: Map = new Map(); @@ -175,11 +183,62 @@ export class Thread extends WithSubscriptions { this.id = threadData.parent_message_id; this.client = client; + this.messagePaginator = new MessagePaginator({ channel: this.channel }); // todo: pass Thread instance this.messageComposer = new MessageComposer({ client, composition: threadData.draft, compositionContext: this, }); + + this.messageOperations = new MessageOperations({ + ingest: (m) => this.messagePaginator.ingestItem(m), + get: (id) => this.messagePaginator.getItem(id), + normalizeOutgoingMessage: (m) => ({ + ...m, + parent_id: this.id, + }), + handlers: () => { + const { requestHandlers } = this.channel.configState.getLatestValue(); + const sendMessageRequest = requestHandlers?.sendMessageRequest; + const retrySendMessageRequest = requestHandlers?.retrySendMessageRequest; + const updateMessageRequest = requestHandlers?.updateMessageRequest; + return { + send: sendMessageRequest + ? (p) => + sendMessageRequest({ + localMessage: p.localMessage, + message: p.message, + options: p.options, + }) + : undefined, + retry: retrySendMessageRequest + ? (p) => + retrySendMessageRequest({ + localMessage: p.localMessage, + message: p.message, + options: p.options, + }) + : undefined, + update: updateMessageRequest + ? (p) => + updateMessageRequest({ + localMessage: p.localMessage, + options: p.options, + }) + : undefined, + }; + }, + defaults: { + send: async (m, o) => { + const result = await this.channel.sendMessage(m, o); + return { message: result.message }; + }, + update: async (m, o) => { + const result = await this.channel.getClient().updateMessage(m, undefined, o); + return { message: result.message }; + }, + }, + }); } get channel() { @@ -489,6 +548,7 @@ export class Thread extends WithSubscriptions { const formattedMessage = formatMessage(message); + // todo: do we really need to keep the failedRepliesMap? if (message.status === 'failed') { // store failed reply so that it's not lost when reloading or hydrating this.failedRepliesMap.set(formattedMessage.id, formattedMessage); @@ -529,6 +589,57 @@ export class Thread extends WithSubscriptions { } }; + /** + * Sends a message with optimistic local state update. + */ + async sendMessageWithLocalUpdate({ + localMessage, + message, + options, + sendMessageRequestFn, + }: SendMessageWithStateUpdateParams): Promise { + await this.messageOperations.send( + { + localMessage, + message, + options, + }, + sendMessageRequestFn, + ); + } + + /** + * Retry sending a failed message. + */ + async retrySendMessageWithLocalUpdate( + params: Omit, + ) { + await this.messageOperations.retry( + { + localMessage: { ...params.localMessage, type: 'regular' }, + options: params.options, + }, + params.sendMessageRequestFn, + ); + } + + /** + * Updates a message with optimistic local state update. + * + * NOTE: This updates message state via `messagePaginator` only. If you still rely on + * `Thread.state.replies` as UI source of truth, make sure it is wired to paginator updates + * (or keep upserting separately until migration is complete). + */ + async updateMessageWithLocalUpdate(params: UpdateMessageWithStateUpdateParams) { + await this.messageOperations.update( + { + localMessage: params.localMessage, + options: params.options, + }, + params.updateMessageRequestFn, + ); + } + public markAsRead = async ({ force = false }: { force?: boolean } = {}) => { if (this.ownUnreadCount === 0 && !force) { return null; diff --git a/test/unit/ChannelPaginatorsOrchestrator.test.ts b/test/unit/ChannelPaginatorsOrchestrator.test.ts index 28bde42cd1..dc50dc8cd9 100644 --- a/test/unit/ChannelPaginatorsOrchestrator.test.ts +++ b/test/unit/ChannelPaginatorsOrchestrator.test.ts @@ -188,26 +188,26 @@ describe('ChannelPaginatorsOrchestrator', () => { ownershipResolver: [p2.id], }); - await Promise.all([p1, p2].map((p) => p.next())); + await Promise.all([p1, p2].map((p) => p.toTail())); await vi.waitFor(() => { expect(p1.items).toHaveLength(0); // even though ownership claimed by p2, it is still possible to request next page. - expect(p1.hasNext).toBe(true); + expect(p1.hasMoreTail).toBe(true); expect(p2.items).toHaveLength(1); expect(p2.items).toStrictEqual([ch1]); - expect(p2.hasNext).toBe(true); + expect(p2.hasMoreTail).toBe(true); }); queryChannelSpy.mockResolvedValue([ch2]); - await Promise.all([p1, p2].map((p) => p.next())); + await Promise.all([p1, p2].map((p) => p.toTail())); await vi.waitFor(() => { expect(p1.items).toHaveLength(0); - expect(p1.hasNext).toBe(true); + expect(p1.hasMoreTail).toBe(true); expect(p2.items).toHaveLength(2); expect(p2.items).toStrictEqual([ch1, ch2]); - expect(p2.hasNext).toBe(true); + expect(p2.hasMoreTail).toBe(true); }); }); }); @@ -702,8 +702,12 @@ describe('ChannelPaginatorsOrchestrator', () => { const p1 = new ChannelPaginator({ client }); const p2 = new ChannelPaginator({ client }); p1.state.partialNext({ items: [ch] }); - vi.spyOn(p1, 'findItem').mockReturnValue(ch); - vi.spyOn(p2, 'findItem').mockReturnValue(undefined); + vi.spyOn(p1, 'locateByItem').mockReturnValue({ + state: { currentIndex: 0, insertionIndex: 1 }, + }); + vi.spyOn(p2, 'locateByItem').mockReturnValue({ + state: { currentIndex: -1, insertionIndex: 1 }, + }); const partialNextSpy1 = vi.spyOn(p1.state, 'partialNext'); const partialNextSpy2 = vi.spyOn(p2.state, 'partialNext'); @@ -737,7 +741,9 @@ describe('ChannelPaginatorsOrchestrator', () => { const p = new ChannelPaginator({ client }); const matchesFilterSpy = vi.spyOn(p, 'matchesFilter').mockReturnValue(true); const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); - const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const removeItemSpy = vi + .spyOn(p, 'removeItem') + .mockReturnValue({ state: { currentIndex: 0, insertionIndex: 1 } }); orchestrator.insertPaginator({ paginator: p }); orchestrator.registerSubscriptions(); @@ -762,7 +768,9 @@ describe('ChannelPaginatorsOrchestrator', () => { const orchestrator = new ChannelPaginatorsOrchestrator({ client }); const p = new ChannelPaginator({ client }); - const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const removeItemSpy = vi + .spyOn(p, 'removeItem') + .mockReturnValue({ state: { currentIndex: 0, insertionIndex: -1 } }); const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); vi.spyOn(p, 'matchesFilter').mockReturnValue(true); orchestrator.insertPaginator({ paginator: p }); @@ -793,7 +801,9 @@ describe('ChannelPaginatorsOrchestrator', () => { const p = new ChannelPaginator({ client }); - const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const removeItemSpy = vi + .spyOn(p, 'removeItem') + .mockReturnValue({ state: { currentIndex: 0, insertionIndex: -1 } }); const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); vi.spyOn(p, 'matchesFilter').mockReturnValue(true); @@ -817,7 +827,9 @@ describe('ChannelPaginatorsOrchestrator', () => { const p = new ChannelPaginator({ client }); - const removeItemSpy = vi.spyOn(p, 'removeItem').mockReturnValue(true); + const removeItemSpy = vi + .spyOn(p, 'removeItem') + .mockReturnValue({ state: { currentIndex: 0, insertionIndex: -1 } }); const ingestItemSpy = vi.spyOn(p, 'ingestItem').mockReturnValue(true); vi.spyOn(p, 'matchesFilter').mockReturnValue(false); diff --git a/test/unit/EventHandlerPipeline.test.ts b/test/unit/EventHandlerPipeline.test.ts index de47aaf6f3..8ac43ee7cf 100644 --- a/test/unit/EventHandlerPipeline.test.ts +++ b/test/unit/EventHandlerPipeline.test.ts @@ -547,9 +547,9 @@ describe('EventHandlerPipeline', () => { }, }; const head = { - id: 'head', + id: 'isHead', handle: () => { - order.push('head'); + order.push('isHead'); }, }; const inserter = { @@ -561,9 +561,9 @@ describe('EventHandlerPipeline', () => { }, }; const tail = { - id: 'tail', + id: 'isTail', handle: () => { - order.push('tail'); + order.push('isTail'); }, }; @@ -574,14 +574,14 @@ describe('EventHandlerPipeline', () => { // @ts-expect-error passing custom event type await pipeline.run(makeEvt('e1'), ctx); // 'late' must NOT run for e1 - expect(order).toEqual(['head', 'inserter', 'tail']); + expect(order).toEqual(['isHead', 'inserter', 'isTail']); order.length = 0; // @ts-expect-error passing custom event type await pipeline.run(makeEvt('e2'), ctx); // For the next event, late is present - expect(order).toEqual(['head', 'inserter', 'tail', 'late']); + expect(order).toEqual(['isHead', 'inserter', 'isTail', 'late']); }); }); }); diff --git a/test/unit/MessageComposer/messageComposer.test.ts b/test/unit/MessageComposer/messageComposer.test.ts index dbf03325d7..724b968e2c 100644 --- a/test/unit/MessageComposer/messageComposer.test.ts +++ b/test/unit/MessageComposer/messageComposer.test.ts @@ -5,6 +5,7 @@ import { ChannelAPIResponse, ChannelConfigWithInfo, ChannelResponse, + DEFAULT_COMPOSER_CONFIG, LocalMessage, MessageComposerConfig, StaticLocationPayload, @@ -15,6 +16,7 @@ import { DeepPartial } from '../../../src/types.utility'; import { MessageComposer } from '../../../src/messageComposer/messageComposer'; import { DraftResponse, MessageResponse } from '../../../src/types'; import { MockOfflineDB } from '../offline-support/MockOfflineDB'; +import { generateMsg } from '../test-utils/generateMessage'; const generateUuidV4Output = 'test-uuid'; // Mock dependencies @@ -168,7 +170,7 @@ describe('MessageComposer', () => { const { messageComposer, mockChannel } = setup(); expect(messageComposer).toBeDefined(); expect(messageComposer.channel).toBe(mockChannel); - expect(messageComposer.config).toBeDefined(); + expect(messageComposer.config).toStrictEqual(DEFAULT_COMPOSER_CONFIG); expect(messageComposer.attachmentManager).toBeDefined(); expect(messageComposer.linkPreviewsManager).toBeDefined(); expect(messageComposer.textComposer).toBeDefined(); @@ -178,16 +180,45 @@ describe('MessageComposer', () => { it('should initialize with custom config', () => { const customConfig: DeepPartial = { + attachments: { + maxNumberOfFilesPerMessage: 1, + }, + drafts: { enabled: true }, + linkPreviews: { debounceURLEnrichmentMs: 20 }, + location: { enabled: false }, text: { maxLengthOnEdit: 1000, publishTypingEvents: false, }, + sendMessageRequestFn: () => Promise.resolve({ message: generateMsg() }), }; const { messageComposer } = setup({ config: customConfig }); - expect(messageComposer.config.text.publishTypingEvents).toBe(false); - expect(messageComposer.config.text?.maxLengthOnEdit).toBe(1000); + expect(messageComposer.config).toStrictEqual({ + attachments: { + acceptedFiles: DEFAULT_COMPOSER_CONFIG.attachments.acceptedFiles, + fileUploadFilter: DEFAULT_COMPOSER_CONFIG.attachments.fileUploadFilter, + maxNumberOfFilesPerMessage: + customConfig.attachments!.maxNumberOfFilesPerMessage, + }, + drafts: customConfig.drafts, + linkPreviews: { + debounceURLEnrichmentMs: customConfig.linkPreviews!.debounceURLEnrichmentMs, + enabled: DEFAULT_COMPOSER_CONFIG.linkPreviews.enabled, + findURLFn: DEFAULT_COMPOSER_CONFIG.linkPreviews.findURLFn, + }, + location: { + enabled: customConfig.location!.enabled, + getDeviceId: DEFAULT_COMPOSER_CONFIG.location!.getDeviceId, + }, + sendMessageRequestFn: customConfig.sendMessageRequestFn, + text: { + enabled: DEFAULT_COMPOSER_CONFIG.text.enabled, + maxLengthOnEdit: customConfig.text!.maxLengthOnEdit, + publishTypingEvents: customConfig.text!.publishTypingEvents, + }, + }); }); it('should initialize with custom config overridden with back-end configuration', () => { @@ -1024,6 +1055,28 @@ describe('MessageComposer', () => { expect(result).toBeUndefined(); }); + describe('sendMessage', () => { + it.fails('performs optimistic update before sending the message'); + it.fails( + 'updates the message in state after successful response if message has not arrived over WS', + ); + it.fails( + 'does not update the message in state after successful response if message has arrived over WS and the update timestamp is <= existing message timestamp', + ); + it.fails( + 'does not update the message in state if it already exists on the server and in the local state as not delivered', + ); + it.fails( + 'does not update the message in state if it already exists on the server and in the local state as not failed', + ); + it.fails( + 'updates the message in state if it already exists on the server and in the local state with status sending', + ); + it.fails( + 'updates the message in state if it does not exist on the server and the send request failed', + ); + }); + it('should compose draft', async () => { const { messageComposer } = setup(); const mockResult = { diff --git a/test/unit/messageOperations/MessageOperations.test.ts b/test/unit/messageOperations/MessageOperations.test.ts new file mode 100644 index 0000000000..2f588bcd7c --- /dev/null +++ b/test/unit/messageOperations/MessageOperations.test.ts @@ -0,0 +1,203 @@ +import { describe, expect, it } from 'vitest'; +import { MessageOperations } from '../../../src/messageOperations/MessageOperations'; +import type { LocalMessage, Message, MessageResponse } from '../../../src/types'; + +type Store = Map; + +const makeLocalMessage = (overrides?: Partial): LocalMessage => + ({ + attachments: [], + created_at: new Date(), + deleted_at: null, + id: 'm1', + mentioned_users: [], + pinned_at: null, + reaction_groups: null, + status: 'failed', + text: 'hi', + type: 'regular', + updated_at: new Date(), + ...overrides, + }) as LocalMessage; + +const makeMessageResponse = (overrides?: Partial): MessageResponse => + ({ + id: 'm1', + text: 'hi', + type: 'regular', + created_at: new Date().toISOString(), + updated_at: new Date().toISOString(), + ...overrides, + }) as MessageResponse; + +describe('MessageOperations', () => { + it('marks optimistic message as sending, then ingests received response', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'failed' }); + await ops.send({ localMessage }); + + expect(store.get('m1')?.status).toBe('received'); + }); + + it('uses per-call requestFn override for send', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1' }); + + await ops.send({ localMessage }, async () => ({ + message: makeMessageResponse({ id: 'm1', text: 'override' }), + })); + + expect(store.get('m1')?.text).toBe('override'); + }); + + it('marks as received on duplicate send error (already exists)', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => { + throw Object.assign(new Error('message already exists'), { code: 4 }); + }, + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'failed' }); + + await expect(ops.send({ localMessage })).rejects.toThrow(); + expect(store.get('m1')?.status).toBe('received'); + }); + + it('marks as failed on non-duplicate error', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => { + throw new Error('nope'); + }, + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'failed' }); + + await expect(ops.send({ localMessage })).rejects.toThrow('nope'); + expect(store.get('m1')?.status).toBe('failed'); + }); + + it('normalizes outgoing message for send', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + normalizeOutgoingMessage: (m) => ({ ...m, parent_id: 't1' }), + handlers: () => ({ + send: async (p) => { + expect(p.message?.parent_id).toBe('t1'); + return { message: makeMessageResponse({ id: p.localMessage.id }) }; + }, + }), + defaults: { + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1' }); + const message = { id: 'm1', text: 'hi' } as unknown as Message; + + await ops.send({ localMessage, message }); + expect(store.get('m1')?.status).toBe('received'); + }); + + it('update passes only supported options (skip_enrich_url / skip_push) to defaults.update', async () => { + const store: Store = new Map(); + + let seenOptions: unknown = 'unset'; + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async (_m, options) => { + seenOptions = options; + return { message: makeMessageResponse({ id: 'm1' }) }; + }, + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'received' }); + + await ops.update({ + localMessage, + options: { + // known fields + skip_enrich_url: true, + skip_push: false, + // @ts-expect-error extra fields should be dropped by MessageOperations.update + force_moderation: true, + }, + }); + + expect(seenOptions).toEqual({ + skip_enrich_url: true, + skip_push: false, + }); + }); + + it('update passes undefined options to defaults.update when params.options is undefined', async () => { + const store: Store = new Map(); + + let seenOptions: unknown = 'unset'; + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async (_m, options) => { + seenOptions = options; + return { message: makeMessageResponse({ id: 'm1' }) }; + }, + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'received' }); + + await ops.update({ localMessage }); + expect(seenOptions).toBeUndefined(); + }); +}); diff --git a/test/unit/pagination/BasePaginator.test.ts b/test/unit/pagination/BasePaginator.test.ts deleted file mode 100644 index f82516e31e..0000000000 --- a/test/unit/pagination/BasePaginator.test.ts +++ /dev/null @@ -1,1544 +0,0 @@ -import { describe, expect, it, vi } from 'vitest'; -import { - AscDesc, - BasePaginator, - DEFAULT_PAGINATION_OPTIONS, - PaginationQueryParams, - PaginationQueryReturnValue, - PaginatorCursor, - type PaginatorOptions, - PaginatorState, - PrimitiveFilter, - QueryFilter, - QueryFilters, - RequireOnlyOne, -} from '../../../src'; -import { sleep } from '../../../src/utils'; -import { makeComparator } from '../../../src/pagination/sortCompiler'; -import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../../src/constants'; - -const toNextTick = async () => { - const sleepPromise = sleep(0); - vi.advanceTimersByTime(0); - await sleepPromise; -}; - -type TestItem = { - id: string; - name?: string; - teams?: string[]; - blocked?: boolean; - createdAt?: string; // date string - age?: number; -}; - -type QueryShape = { - filters: { - [Key in keyof TestItem]: - | RequireOnlyOne> - | PrimitiveFilter; - }; - sort: { [Key in keyof TestItem]?: AscDesc }; -}; - -class IncompletePaginator extends BasePaginator { - sort: QueryFilters | undefined; - sortComparator: (a: TestItem, b: TestItem) => number = vi.fn(); - queryResolve: Function = vi.fn(); - queryReject: Function = vi.fn(); - queryPromise: Promise> | null = null; - mockClientQuery = vi.fn(); - - constructor(options: PaginatorOptions = {}) { - super(options); - } - - query( - params: PaginationQueryParams, - ): Promise> { - const promise = new Promise>( - (queryResolve, queryReject) => { - this.queryResolve = queryResolve; - this.queryReject = queryReject; - }, - ); - this.mockClientQuery(params); - this.queryPromise = promise; - return promise; - } - - filterQueryResults(items: TestItem[]): TestItem[] | Promise { - return items; - } -} - -const defaultNextQueryShape: QueryShape = { filters: { id: 'test-id' }, sort: { id: 1 } }; - -class Paginator extends IncompletePaginator { - constructor(options: PaginatorOptions = {}) { - super(options); - } - - getNextQueryShape = vi.fn().mockReturnValue(defaultNextQueryShape); -} - -describe('BasePaginator', () => { - describe('constructor', () => { - it('initiates with the defaults', () => { - const paginator = new Paginator(); - expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, - isLoading: false, - items: undefined, - lastQueryError: undefined, - cursor: undefined, - offset: 0, - }); - expect(paginator.isInitialized).toBe(false); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(0); - expect(paginator.config.initialCursor).toBeUndefined(); - expect(paginator.config.initialOffset).toBeUndefined(); - expect(paginator.config.throwErrors).toBe(false); - expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); - expect(paginator.config.debounceMs).toBe(DEFAULT_PAGINATION_OPTIONS.debounceMs); - expect(paginator.config.lockItemOrder).toBe( - DEFAULT_PAGINATION_OPTIONS.lockItemOrder, - ); - expect(paginator.config.hasPaginationQueryShapeChanged).toBe( - DEFAULT_PAGINATION_OPTIONS.hasPaginationQueryShapeChanged, - ); - }); - - it('initiates with custom options', () => { - const options: PaginatorOptions = { - debounceMs: DEFAULT_PAGINATION_OPTIONS.debounceMs - 100, - doRequest: () => Promise.resolve({ items: [{ id: 'test-id' }] }), - hasPaginationQueryShapeChanged: () => true, - initialCursor: { next: 'next', prev: 'prev' }, - initialOffset: 10, - lockItemOrder: !DEFAULT_PAGINATION_OPTIONS.lockItemOrder, - pageSize: DEFAULT_PAGINATION_OPTIONS.pageSize - 1, - throwErrors: true, - }; - const paginator = new Paginator(options); - expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, - isLoading: false, - items: undefined, - lastQueryError: undefined, - cursor: options.initialCursor, - offset: options.initialOffset, - }); - expect(paginator.isInitialized).toBe(false); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(0); - expect(paginator.config.initialCursor).toStrictEqual(options.initialCursor); - expect(paginator.config.initialOffset).toStrictEqual(options.initialOffset); - expect(paginator.config.throwErrors).toBe(options.throwErrors); - expect(paginator.pageSize).toBe(options.pageSize); - expect(paginator.config.hasPaginationQueryShapeChanged).toStrictEqual( - options.hasPaginationQueryShapeChanged, - ); - expect(paginator.config.debounceMs).toBe(options.debounceMs); - expect(paginator.config.lockItemOrder).toBe(options.lockItemOrder); - }); - }); - - describe('pagination API', () => { - it('throws is the paginator does implement own getNextQueryShape', () => { - const paginator = new IncompletePaginator(); - // @ts-expect-error accessing protected property - expect(paginator.getNextQueryShape).toThrow( - 'Paginator.getNextQueryShape() is not implemented', - ); - }); - - describe('shouldResetStateBeforeQuery', () => { - const stateBeforeQuery: PaginatorState = { - hasNext: true, - hasPrev: true, - isLoading: false, - items: [{ id: 'test-item' }], - lastQueryError: undefined, - cursor: { next: 'next', prev: 'prev' }, - offset: 10, - }; - - const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; - const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; - - it('resets the state before a query when querying the first page', () => { - const paginator = new Paginator(); - const initialState = { ...stateBeforeQuery, items: undefined }; - paginator.state.next(initialState); - expect(paginator.state.getLatestValue()).toEqual(initialState); - // @ts-expect-error accessing protected property - expect(paginator.shouldResetStateBeforeQuery()).toBe(true); - }); - - it('resets the state before a query when query shape changed', () => { - const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; - const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; - const paginator = new Paginator(); - expect( - // @ts-expect-error accessing protected property - paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), - ).toBe(true); - expect( - // @ts-expect-error accessing protected property - paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), - ).toBe(false); - }); - - it('determines whether pagination state should be reset before a query using custom logic', () => { - const options = { - hasPaginationQueryShapeChanged: vi.fn().mockReturnValue(true), - }; - const paginator = new Paginator(options); - expect( - // @ts-expect-error accessing protected property - paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), - ).toBe(true); - expect( - // @ts-expect-error accessing protected property - paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), - ).toBe(true); - expect(options.hasPaginationQueryShapeChanged).toHaveBeenCalledTimes(2); - }); - }); - - it('paginates to next pages (cursor)', async () => { - const paginator = new Paginator(); - let nextPromise = paginator.next(); - // wait for the DB data first page load - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'next', - queryShape: defaultNextQueryShape, - reset: undefined, - retryCount: 0, - }); - - nextPromise = paginator.next(); - expect(paginator.isLoading).toBe(true); - paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2', prev: 'prev2' }); - await nextPromise; - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toEqual({ next: 'next2', prev: 'prev2' }); - - nextPromise = paginator.next(); - paginator.queryResolve({ items: [] }); - await nextPromise; - expect(paginator.hasNext).toBe(false); - expect(paginator.hasPrev).toBe(false); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toEqual({ next: null, prev: null }); - - paginator.next(); - expect(paginator.isLoading).toBe(false); - expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); - }); - - it('paginates to next pages (offset)', async () => { - const paginator = new Paginator({ pageSize: 1 }); - let nextPromise = paginator.next(); - // wait for the DB data first page load - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(1); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'next', - queryShape: defaultNextQueryShape, - reset: undefined, - retryCount: 0, - }); - - nextPromise = paginator.next(); - expect(paginator.isLoading).toBe(true); - paginator.queryResolve({ items: [{ id: 'id2' }] }); - await nextPromise; - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(2); - - nextPromise = paginator.next(); - paginator.queryResolve({ items: [] }); - await nextPromise; - expect(paginator.hasNext).toBe(false); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(2); - - paginator.next(); - expect(paginator.isLoading).toBe(false); - expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); - }); - - it('paginates to next pages debounced', async () => { - vi.useFakeTimers(); - const paginator = new Paginator({ debounceMs: 2000 }); - - paginator.nextDebounced(); - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - vi.advanceTimersByTime(2000); - // await first page load from the DB - await toNextTick(); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await paginator.queryPromise; - await toNextTick(); - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'next', - queryShape: defaultNextQueryShape, - reset: undefined, - retryCount: 0, - }); - - vi.useRealTimers(); - }); - - it('paginates to a previous page', async () => { - const paginator = new Paginator(); - let nextPromise = paginator.prev(); - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'prev', - queryShape: defaultNextQueryShape, - reset: undefined, - retryCount: 0, - }); - - nextPromise = paginator.prev(); - expect(paginator.isLoading).toBe(true); - paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2', prev: 'prev2' }); - await nextPromise; - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toEqual({ next: 'next2', prev: 'prev2' }); - - nextPromise = paginator.prev(); - paginator.queryResolve({ items: [] }); - await nextPromise; - expect(paginator.hasNext).toBe(false); - expect(paginator.hasPrev).toBe(false); - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.cursor).toEqual({ next: null, prev: null }); - - paginator.prev(); - expect(paginator.isLoading).toBe(false); - }); - - it('debounces the pagination to a previous page', async () => { - vi.useFakeTimers(); - const paginator = new Paginator({ debounceMs: 2000 }); - - paginator.prevDebounced(); - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - vi.advanceTimersByTime(2000); - await toNextTick(); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await paginator.queryPromise; - await toNextTick(); - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'prev', - queryShape: defaultNextQueryShape, - reset: undefined, - retryCount: 0, - }); - vi.useRealTimers(); - }); - - it('prevents pagination if another query is in progress', async () => { - const paginator = new Paginator(); - const nextPromise1 = paginator.next(); - // wait for the first page load from the DB - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); - const nextPromise2 = paginator.next(); - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await Promise.all([nextPromise1, nextPromise2]); - expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); - }); - - it('resets the state if the query shape changed', async () => { - const paginator = new Paginator({ pageSize: 1 }); - let nextPromise = paginator.next(); - await sleep(0); - paginator.queryResolve({ items: [{ id: 'id1' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(1); - - paginator.getNextQueryShape.mockReturnValueOnce({ - filters: { id: 'test' }, - sort: { id: -1 }, - }); - nextPromise = paginator.next(); - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.items).toBeUndefined(); - expect(paginator.offset).toBe(0); - paginator.queryResolve({ items: [{ id: 'id2' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.items).toEqual([{ id: 'id2' }]); - expect(paginator.offset).toBe(1); - }); - - it('resets the state if forced', async () => { - const paginator = new Paginator({ pageSize: 1 }); - let nextPromise = paginator.next(); - await sleep(0); - paginator.queryResolve({ items: [{ id: 'id1' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(1); - - nextPromise = paginator.next({ reset: 'yes' }); - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.items).toBeUndefined(); - expect(paginator.offset).toBe(0); - paginator.queryResolve({ items: [{ id: 'id2' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.items).toEqual([{ id: 'id2' }]); - expect(paginator.offset).toBe(1); - }); - - it('does not reset the state if forced', async () => { - const paginator = new Paginator({ pageSize: 1 }); - let nextPromise = paginator.next(); - await sleep(0); - paginator.queryResolve({ items: [{ id: 'id1' }] }); - await nextPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(1); - - paginator.getNextQueryShape.mockReturnValueOnce({ - filters: { id: 'test' }, - sort: { id: -1 }, - }); - nextPromise = paginator.next({ reset: 'no' }); - await sleep(0); - expect(paginator.items).toStrictEqual([{ id: 'id1' }]); - expect(paginator.offset).toBe(1); - paginator.queryResolve({ items: [{ id: 'id2' }] }); - await nextPromise; - expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); - expect(paginator.offset).toBe(2); - }); - - it('stores lastQueryError and clears it with the next successful query', async () => { - const paginator = new Paginator(); - let nextPromise = paginator.next(); - // wait for the first page load from DB - await sleep(0); - const error = new Error('Failed'); - paginator.queryReject(error); - // hand over to finish the cleanup and state update after the query execution - await sleep(0); - expect(paginator.lastQueryError).toEqual(error); - expect(paginator.isLoading).toEqual(false); - - nextPromise = paginator.next(); - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await nextPromise; - expect(paginator.lastQueryError).toBeUndefined(); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - }); - - it('throws error if enabled', async () => { - const paginator = new Paginator({ throwErrors: true }); - let nextPromise = paginator.next(); - // wait for the first page load from DB - await sleep(0); - const error = new Error('Failed'); - paginator.queryReject(error); - await expect(nextPromise).rejects.toThrowError(error); - // hand over to finish the cleanup and state update after the query execution - await sleep(0); - expect(paginator.lastQueryError).toEqual(error); - expect(paginator.isLoading).toEqual(false); - - nextPromise = paginator.next(); - // wait for the first page load from DB - await sleep(0); - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await nextPromise; - expect(paginator.lastQueryError).toBeUndefined(); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - }); - - it('retries the query', async () => { - vi.useFakeTimers(); - const paginator = new Paginator(); - let nextPromise = paginator.next({ retryCount: 2 }); - // wait for the first page load from DB - await toNextTick(); - const error = new Error('Failed'); - paginator.queryReject(error); - // hand over to finish the cleanup and state update after the query execution - await toNextTick(); - expect(paginator.lastQueryError).toEqual(error); - vi.advanceTimersByTime(DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES); - await toNextTick(); - - paginator.queryResolve({ items: [{ id: 'id1' }], next: 'next1', prev: 'prev1' }); - await nextPromise; - expect(paginator.lastQueryError).toBeUndefined(); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toEqual({ next: 'next1', prev: 'prev1' }); - vi.useRealTimers(); - }); - }); - - describe('item management', () => { - const item: TestItem = { - id: 'id1', - name: 'test', - age: 100, - teams: ['abc', 'efg'], - }; - - const item2 = { - ...item, - id: 'id2', - name: 'test2', - age: 101, - }; - - const item3 = { - ...item, - id: 'id3', - name: 'test3', - age: 102, - }; - - describe('matchesFilter', () => { - it('returns true if no filter is provided', async () => { - const paginator = new Paginator(); - expect(paginator.matchesFilter(item)).toBeTruthy(); - }); - it('returns false if does not match the filter', async () => { - const paginator = new Paginator(); - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - name: { $eq: 'test1' }, - }); - expect(paginator.matchesFilter(item)).toBeFalsy(); - }); - it('returns true if item matches the filter', async () => { - const paginator = new Paginator(); - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - $or: [{ name: { $eq: 'test1' } }, { teams: { $contains: 'abc' } }], - }); - expect(paginator.matchesFilter(item)).toBeTruthy(); - }); - }); - - describe('ingestItem', () => { - it.each([ - ['on lockItemOrder: false', false], - ['on lockItemOrder: true', true], - ])( - 'exists but does not match the filter anymore removes the item %s', - (_, lockItemOrder) => { - const paginator = new Paginator({ lockItemOrder }); - paginator.state.partialNext({ - items: [item3, item2, item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams - }); - - const adjustedItem = { - ...item, - teams: ['efg'], // removed from the team abc - }; - - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item removed - expect(paginator.items).toHaveLength(2); - }, - ); - - it.each([ - [' adjusts the order on lockItemOrder: false', false], - [' does not adjust the order on lockItemOrder: true', true], - ])('exists and matches the filter updates the item and %s', (_, lockItemOrder) => { - const paginator = new Paginator({ lockItemOrder }); - paginator.state.partialNext({ - items: [item, item2, item3], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - age: { $gt: 100 }, - }); - - paginator.sort = { age: 1 }; - - const adjustedItem = { - ...item, - age: 103, - }; - - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated - expect(paginator.items).toHaveLength(3); - - if (lockItemOrder) { - expect(paginator.items).toStrictEqual([adjustedItem, item2, item3]); - } else { - expect(paginator.items).toStrictEqual([item2, item3, adjustedItem]); - } - }); - - it.each([ - ['on lockItemOrder: false', false], - ['on lockItemOrder: true', true], - ])( - 'does not exist and does not match the filter results in no action %s', - (_, lockItemOrder) => { - const paginator = new Paginator({ lockItemOrder }); - paginator.state.partialNext({ - items: [item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - age: { $gt: 100 }, - }); - - const adjustedItem = { - ...item, - id: 'id2', - name: 'test2', - }; - - expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action - expect(paginator.items).toStrictEqual([item]); - }, - ); - - it.each([ - ['on lockItemOrder: false', false], - ['on lockItemOrder: true', true], - ])( - 'does not exist and matches the filter inserts according to default sort order (append) %s', - (_, lockItemOrder) => { - const paginator = new Paginator({ lockItemOrder }); - paginator.state.partialNext({ - items: [item3, item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - teams: { $contains: 'abc' }, - }); - - expect(paginator.ingestItem(item2)).toBeTruthy(); - expect(paginator.items).toStrictEqual([item3, item, item2]); - }, - ); - - it.each([ - ['on lockItemOrder: false', false], - ['on lockItemOrder: true', true], - ])( - 'does not exist and matches the filter inserts according to sort order %s', - (_, lockItemOrder) => { - const paginator = new Paginator({ lockItemOrder }); - paginator.state.partialNext({ - items: [item3, item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - teams: { $contains: 'abc' }, - }); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ sort: { age: -1 } }); - - expect(paginator.ingestItem(item2)).toBeTruthy(); - expect(paginator.items).toHaveLength(3); - expect(paginator.items![0]).toStrictEqual(item3); - expect(paginator.items![1]).toStrictEqual(item2); - expect(paginator.items![2]).toStrictEqual(item); - }, - ); - - it('reflects the boost priority on lockItemOrder: false for newly ingested items', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item3, item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - teams: { $contains: 'abc' }, - }); - - paginator.boost(item2.id); - expect(paginator.ingestItem(item2)).toBeTruthy(); - expect(paginator.items).toStrictEqual([item2, item3, item]); - }); - - it('reflects the boost priority on lockItemOrder: false for existing items recently boosted', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item, item2, item3], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - age: { $gt: 100 }, - }); - - paginator.sort = { age: 1 }; - - const adjustedItem = { - ...item2, - age: 103, - }; - paginator.boost(item2.id); - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated - expect(paginator.items).toHaveLength(3); - - expect(paginator.items).toStrictEqual([adjustedItem, item, item3]); - }); - - it('does not reflect the boost priority on lockItemOrder: true', () => { - const paginator = new Paginator({ lockItemOrder: true }); - paginator.state.partialNext({ - items: [item, item2, item3], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - age: { $gt: 100 }, - }); - - paginator.sort = { age: 1 }; - - const adjustedItem = { - ...item2, - age: 103, - }; - paginator.boost(item2.id); - expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item updated - expect(paginator.items).toHaveLength(3); - - expect(paginator.items).toStrictEqual([item, adjustedItem, item3]); - }); - - it('reflects the boost priority on lockItemOrder: true when ingesting a new item', () => { - const paginator = new Paginator({ lockItemOrder: true }); - paginator.state.partialNext({ - items: [item3, item], - }); - - // @ts-expect-error accessing protected property - paginator.buildFilters = () => ({ - teams: { $contains: 'abc' }, - }); - - paginator.boost(item2.id); - expect(paginator.ingestItem(item2)).toBeTruthy(); - expect(paginator.items).toStrictEqual([item2, item3, item]); - }); - }); - - describe('removeItem', () => { - it('removes existing item', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item3, item2, item], - }); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - expect(paginator.removeItem({ item: item3 })).toBeTruthy(); - expect(paginator.items).toHaveLength(2); - expect(paginator.items![0]).toStrictEqual(item2); - expect(paginator.items![1]).toStrictEqual(item); - }); - - it('results in no action for non-existent item', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item2, item], - }); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - expect(paginator.removeItem({ item: item3 })).toBeFalsy(); - expect(paginator.items).toHaveLength(2); - expect(paginator.items![0]).toStrictEqual(item2); - expect(paginator.items![1]).toStrictEqual(item); - }); - }); - - describe('setItems', () => { - it('overrides all the items in the state with provided value', () => { - const paginator = new Paginator(); - const items1 = [{ id: 'test-item1' }]; - const items2 = [{ id: 'test-item2' }]; - paginator.setItems(items1); - expect(paginator.items).toStrictEqual(items1); - paginator.setItems(items2); - expect(paginator.items).toStrictEqual(items2); - }); - - const items = [{ id: 'test-item1' }]; - const expectedStateEmissions = [ - { - cursor: undefined, - hasNext: true, - hasPrev: true, - isLoading: false, - items: undefined, - lastQueryError: undefined, - offset: 0, - }, - { - cursor: undefined, - hasNext: true, - hasPrev: true, - isLoading: false, - items, - lastQueryError: undefined, - offset: 1, - }, - ]; - - it('emits state change as long as the items are not the same', () => { - const paginator = new Paginator(); - const subscriptionHandler = vi.fn(); - const unsubscribe = paginator.state.subscribe(subscriptionHandler); - expect(subscriptionHandler).toHaveBeenCalledTimes(1); - expect(subscriptionHandler).toHaveBeenCalledWith( - expectedStateEmissions[0], - undefined, - ); - - paginator.setItems(items); - expect(paginator.items).toStrictEqual(items); - expect(subscriptionHandler).toHaveBeenCalledTimes(2); - expect(subscriptionHandler).toHaveBeenCalledWith( - expectedStateEmissions[1], - expectedStateEmissions[0], - ); - - // setting an object with the same reference - paginator.setItems(items); - expect(paginator.items).toStrictEqual(items); - expect(subscriptionHandler).toHaveBeenCalledTimes(2); - expect(subscriptionHandler).toHaveBeenCalledWith( - expectedStateEmissions[1], - expectedStateEmissions[0], - ); - - unsubscribe(); - }); - - it('emits state change as long as the state factory returns objects with different reference', () => { - const paginator = new Paginator(); - const subscriptionHandler = vi.fn(); - const unsubscribe = paginator.state.subscribe(subscriptionHandler); - - paginator.setItems(() => items); - expect(paginator.items).toStrictEqual(items); - // first call is on subscribe - expect(subscriptionHandler).toHaveBeenCalledTimes(2); - expect(subscriptionHandler).toHaveBeenCalledWith( - expectedStateEmissions[1], - expectedStateEmissions[0], - ); - - // setting an object with the same reference - paginator.setItems(() => items); - expect(paginator.items).toStrictEqual(items); - expect(subscriptionHandler).toHaveBeenCalledTimes(2); - expect(subscriptionHandler).toHaveBeenCalledWith( - expectedStateEmissions[1], - expectedStateEmissions[0], - ); - - unsubscribe(); - }); - - it('updates the cursor if provided', () => { - const paginator = new Paginator(); - const cursors: PaginatorCursor[] = [ - { next: 'next1', prev: 'prev1' }, - { next: 'next2', prev: 'prev1' }, - ]; - const subscriptionHandler = vi.fn(); - const unsubscribe = paginator.state.subscribe(subscriptionHandler); - - paginator.setItems(items, cursors[0]); - expect(subscriptionHandler).toHaveBeenCalledTimes(2); - expect(subscriptionHandler).toHaveBeenCalledWith( - { ...expectedStateEmissions[1], cursor: cursors[0], offset: 0 }, - { ...expectedStateEmissions[0], cursor: undefined, offset: 0 }, - ); - - unsubscribe(); - }); - }); - - describe('reload', () => { - it('starts the ended pagination from the beginning', async () => { - const paginator = new Paginator({ pageSize: 2 }); - paginator.state.next({ - hasNext: false, - hasPrev: false, - isLoading: false, - items: [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }], - offset: 4, - }); - let reloadPromise = paginator.reload(); - // wait for the DB data first page load - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id1' }] }); - await reloadPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(false); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toEqual([{ id: 'id1' }]); - expect(paginator.cursor).toBeUndefined(); - expect(paginator.offset).toBe(1); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'next', - queryShape: defaultNextQueryShape, - reset: 'yes', - retryCount: 0, - }); - - reloadPromise = paginator.reload(); - // wait for the DB data first page load - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - - paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2' }); - await reloadPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(false); - expect(paginator.items).toEqual([{ id: 'id2' }]); - expect(paginator.cursor).toStrictEqual({ next: 'next2', prev: null }); - expect(paginator.offset).toBe(0); - expect(paginator.mockClientQuery).toHaveBeenCalledWith({ - direction: 'next', - queryShape: defaultNextQueryShape, - reset: 'yes', - retryCount: 0, - }); - - // reset in another direction - reloadPromise = paginator.reload(); - // wait for the DB data first page load - await sleep(0); - expect(paginator.isLoading).toBe(true); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(true); - expect(paginator.items).toBe(undefined); - - paginator.queryResolve({ items: [{ id: 'id2' }], next: 'next2' }); - await reloadPromise; - expect(paginator.isLoading).toBe(false); - expect(paginator.hasNext).toBe(true); - expect(paginator.hasPrev).toBe(false); - expect(paginator.items).toEqual([{ id: 'id2' }]); - expect(paginator.cursor).toStrictEqual({ next: 'next2', prev: null }); - expect(paginator.offset).toBe(0); - }); - }); - - describe('contains', () => { - it('returns true if the item exists', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item3, item2, item], - }); - expect(paginator.contains(item3)).toBeTruthy(); - }); - - it('returns false if the items does not exist', () => { - const paginator = new Paginator(); - paginator.state.partialNext({ - items: [item2, item], - }); - expect(paginator.contains(item3)).toBeFalsy(); - }); - }); - - describe('locateByItem', () => { - const a: TestItem = { id: 'a', age: 30, name: 'A' }; - const b: TestItem = { id: 'b', age: 25, name: 'B' }; - const c: TestItem = { id: 'c', age: 25, name: 'C' }; - const d: TestItem = { id: 'd', age: 20, name: 'D' }; - - const tieBreakerById = (l: TestItem, r: TestItem) => - l.id < r.id ? -1 : l.id > r.id ? 1 : 0; - - it('returns {index:-1, insertionIndex:0} for empty list', () => { - const paginator = new Paginator(); - const res = paginator.locateByItem(a); - expect(res).toEqual({ index: -1, insertionIndex: 0 }); - }); - - it('finds an existing item on a tie plateau (no ID tiebreaker)', () => { - const paginator = new Paginator(); - // comparator: age desc only (ties produce a plateau) - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - // items are already sorted by age desc - paginator.state.partialNext({ items: [a, b, c, d] }); - - const res = paginator.locateByItem(c); - expect(res.index).toBe(2); // c is at index 2 in [a, b, c, d] - // insertionIndex for identical key (age 25) is after the plateau - expect(res.insertionIndex).toBe(3); - }); - - it('returns insertion index when not found on a tie plateau (no ID tiebreaker)', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - // same sort keys as b/c but different id; not present - const x: TestItem = { id: 'x', age: 25, name: 'X' }; - const res = paginator.locateByItem(x); - // insertion point should be after the 25-plateau (after c at index 2) - expect(res.index).toBe(-1); - expect(res.insertionIndex).toBe(3); - }); - - it('finds exact index with ID tiebreaker in comparator (pure O(log n))', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - // tie-breaker on id asc guarantees a total order - tiebreaker: tieBreakerById, - }); - - // With tiebreaker, the order within age==25 is by id asc: b (id 'b'), then c (id 'c') - paginator.state.partialNext({ items: [a, b, c, d] }); - - const res = paginator.locateByItem(c); - expect(res.index).toBe(2); - // In this setting the insertionIndex is deterministic but not strictly needed when found - expect(res.insertionIndex).toBeGreaterThanOrEqual(2); - }); - - it('computes insertion at the beginning when needle sorts before all items', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - tiebreaker: tieBreakerById, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - const z: TestItem = { id: 'z', age: 40, name: 'Z' }; // highest age → goes to front - const res = paginator.locateByItem(z); - expect(res.index).toBe(-1); - expect(res.insertionIndex).toBe(0); - }); - - it('computes insertion at the end when needle sorts after all items', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - tiebreaker: tieBreakerById, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - const z: TestItem = { id: 'z', age: 10, name: 'Z' }; // lowest age → goes to end - const res = paginator.locateByItem(z); - expect(res.index).toBe(-1); - expect(res.insertionIndex).toBe(4); - }); - - it('checks both immediate neighbors before plateau scan (fast path)', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - // needle equal to left neighbor of insertionIndex - const resLeftNeighbor = paginator.locateByItem(c); - expect(resLeftNeighbor.index).toBe(2); - - // needle equal to right neighbor (craft by duplicating c’s sort but different id not present) - const y: TestItem = { id: 'y', age: 25, name: 'Y' }; - const resRightNeighbor = paginator.locateByItem(y); - expect(resRightNeighbor.index).toBe(-1); - expect(resRightNeighbor.insertionIndex).toBe(3); - }); - }); - - describe('findItem', () => { - const a: TestItem = { id: 'a', age: 30 }; - const b: TestItem = { id: 'b', age: 25 }; - const c: TestItem = { id: 'c', age: 25 }; - const d: TestItem = { id: 'd', age: 20 }; - - it('returns the exact item instance when present', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - // Same identity object: - expect(paginator.findItem(c)).toBe(c); - - // Same identity by id but different object reference still matches by locateByItem: - const cClone = { ...c }; - expect(paginator.findItem(cClone)).toBe(c); - }); - - it('returns undefined when not present', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - }); - paginator.state.partialNext({ items: [a, b, d] }); - - const needle: TestItem = { id: 'x', age: 25 }; - expect(paginator.findItem(needle)).toBeUndefined(); - }); - - it('works with an ID tie-breaker comparator as well', () => { - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: -1 }, - tiebreaker: (l: TestItem, r: TestItem) => - l.id < r.id ? -1 : l.id > r.id ? 1 : 0, - }); - paginator.state.partialNext({ items: [a, b, c, d] }); - - expect(paginator.findItem(c)).toBe(c); - const x: TestItem = { id: 'x', age: 25 }; - expect(paginator.findItem(x)).toBeUndefined(); - }); - - it('handles empty list', () => { - const paginator = new Paginator(); - expect(paginator.findItem({ id: 'z' })).toBeUndefined(); - }); - }); - - describe('filter resolvers', () => { - const resolvers1 = [{ matchesField: () => true, resolve: () => 'abc' }]; - const resolvers2 = [ - { matchesField: () => false, resolve: () => 'efg' }, - { matchesField: () => true, resolve: () => 'hij' }, - ]; - it('get overridden with setFilterResolvers', () => { - const paginator = new Paginator(); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(0); - - paginator.setFilterResolvers(resolvers1); - - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers1.length); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); - - paginator.setFilterResolvers(resolvers2); - - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers2.length); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers2); - - paginator.setFilterResolvers([]); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toHaveLength(0); - }); - - it('get expanded with addFilterResolvers', () => { - const paginator = new Paginator(); - paginator.addFilterResolvers(resolvers1); - - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); - - paginator.addFilterResolvers(resolvers2); - - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toStrictEqual([ - ...resolvers1, - ...resolvers2, - ]); - - paginator.addFilterResolvers([]); - // @ts-expect-error accessing protected property - expect(paginator._filterFieldToDataResolvers).toStrictEqual([ - ...resolvers1, - ...resolvers2, - ]); - }); - }); - - describe('item boosting', () => { - const a = { id: 'a', age: 10, name: 'A' } as TestItem; - const b = { id: 'b', age: 20, name: 'B' } as TestItem; - const c = { id: 'c', age: 30, name: 'C' } as TestItem; - - const byIdAsc = (l: TestItem, r: TestItem) => - l.id < r.id ? -1 : l.id > r.id ? 1 : 0; - - describe('clearExpiredBoosts', () => { - it('removes expired boosts and updates maxBoostSeq', () => { - const paginator = new Paginator(); - // @ts-expect-error accessing protected property - paginator.boosts.clear(); - const now = 1000000; - - paginator.boost('fresh', { until: now + 1000, seq: 1 }); - paginator.boost('stale', { until: now - 1, seq: 5 }); - - // @ts-expect-error accessing protected method - paginator.clearExpiredBoosts(now); - - // @ts-expect-error accessing protected property - expect(Array.from(paginator.boosts.keys())).toEqual(['fresh']); - expect(paginator.maxBoostSeq).toBe(1); - }); - - it('sets maxBoostSeq to 0 when no boosts remain', () => { - const paginator = new Paginator(); - // two expired boosts at "now" - paginator.boost('x', { until: 1000, seq: 1 }); - paginator.boost('y', { until: 1500, seq: 3 }); - - // @ts-expect-error accessing protected method - paginator.clearExpiredBoosts(10000); - - // @ts-expect-error accessing protected property - expect(paginator.boosts.size).toBe(0); - expect(paginator.maxBoostSeq).toBe(0); - }); - }); - - describe('boostComparator', () => { - it('prioritizes boosted over non-boosted', () => { - vi.useFakeTimers(); - const now = new Date('2025-01-01T00:00:00Z'); - vi.setSystemTime(now); - - const paginator = new Paginator(); - paginator.sortComparator = byIdAsc; - - // Boost only "a" - paginator.boost('b', { ttlMs: 10000, seq: 0 }); - - // @ts-expect-error: protected method - expect(paginator.boostComparator(a, b)).toBe(1); // a after b - // @ts-expect-error - expect(paginator.boostComparator(b, a)).toBe(-1); // b stays before a - - // Let boost expire - vi.setSystemTime(new Date(now.getTime() + 11000)); - // @ts-expect-error - expect(paginator.boostComparator(a, b)).toBe(-1); // fallback to byIdAsc - vi.useRealTimers(); - }); - - it('when both boosted, higher seq comes first; ties fall back to sortComparator', () => { - vi.useFakeTimers(); - const now = new Date('2025-01-01T00:00:00Z'); - vi.setSystemTime(now); - - const paginator = new Paginator(); - // Fallback comparator id asc - paginator.sortComparator = byIdAsc; - - paginator.boost('a', { ttlMs: 60000, seq: 1 }); - paginator.boost('b', { ttlMs: 60000, seq: 3 }); - - // b has higher seq → should come first → comparator(a,b) > 0 - // @ts-expect-error - expect(paginator.boostComparator(a, b)).toBe(1); - // reverse check - // @ts-expect-error - expect(paginator.boostComparator(b, a)).toBe(-1); - - // Equal seq → fall back to sortComparator (id asc => a before b) - paginator.boost('a', { ttlMs: 60000, seq: 2 }); - paginator.boost('b', { ttlMs: 60000, seq: 2 }); - // @ts-expect-error - expect(paginator.boostComparator(a, b)).toBe(-1); - - vi.useRealTimers(); - }); - - it('ignores expired boosts automatically during comparison', () => { - vi.useFakeTimers(); - const now = new Date('2025-01-01T00:00:00Z'); - vi.setSystemTime(now); - - const paginator = new Paginator(); - paginator.sortComparator = byIdAsc; - - paginator.boost('b', { ttlMs: 5000, seq: 10 }); - // Initially boosted - // @ts-expect-error - expect(paginator.boostComparator(a, b)).toBe(1); - - // Advance beyond TTL so boost is expired; comparator should fall back - vi.setSystemTime(new Date(now.getTime() + 6000)); - // @ts-expect-error - expect(paginator.boostComparator(a, b)).toBe(-1); // byIdAsc, not boost - vi.useRealTimers(); - }); - }); - - describe('boost', () => { - it('assigns default TTL (15s) and default seq=0; updates maxBoostSeq only upward', () => { - vi.useFakeTimers(); - const now = new Date('2025-01-01T00:00:00Z'); - vi.setSystemTime(now); - - const paginator = new Paginator(); - - paginator.boost('k'); // default 15s, seq 0 - const b1 = paginator.getBoost('k')!; - expect(b1.seq).toBe(0); - expect(b1.until).toBe(now.getTime() + 15000); - expect(paginator.maxBoostSeq).toBe(0); - - // Raise max seq - paginator.boost('m', { ttlMs: 1000, seq: 5 }); - expect(paginator.maxBoostSeq).toBe(5); - - // Lower seq should NOT decrease maxBoostSeq - paginator.boost('n', { ttlMs: 1000, seq: 2 }); - expect(paginator.maxBoostSeq).toBe(5); - - vi.useRealTimers(); - }); - - it('accepts explicit until and seq', () => { - const paginator = new Paginator(); - paginator.boost('z', { until: 42, seq: 7 }); - const b = paginator.getBoost('z')!; - expect(b.until).toBe(42); - expect(b.seq).toBe(7); - expect(paginator.maxBoostSeq).toBe(7); - }); - }); - - describe('getBoost', () => { - it('returns the boost record when present; otherwise undefined', () => { - const paginator = new Paginator(); - expect(paginator.getBoost('missing')).toBeUndefined(); - paginator.boost('a', { ttlMs: 1000, seq: 1 }); - const b = paginator.getBoost('a'); - expect(b).toBeDefined(); - expect(b!.seq).toBe(1); - }); - }); - - describe('removeBoost', () => { - it('removes a boost and recalculates maxBoostSeq', () => { - const paginator = new Paginator(); - paginator.boost('a', { ttlMs: 60000, seq: 1 }); - paginator.boost('b', { ttlMs: 60000, seq: 5 }); - paginator.boost('c', { ttlMs: 60000, seq: 2 }); - expect(paginator.maxBoostSeq).toBe(5); - - paginator.removeBoost('b'); // remove current max - expect(paginator.getBoost('b')).toBeUndefined(); - expect(paginator.maxBoostSeq).toBe(2); - - paginator.removeBoost('c'); - expect(paginator.getBoost('c')).toBeUndefined(); - expect(paginator.maxBoostSeq).toBe(1); - - paginator.removeBoost('a'); - expect(paginator.getBoost('a')).toBeUndefined(); - expect(paginator.maxBoostSeq).toBe(0); - }); - }); - - describe('isBoosted', () => { - it('returns true when boost exists and now <= until; false otherwise', () => { - vi.useFakeTimers(); - const now = new Date('2025-01-01T00:00:00Z'); - vi.setSystemTime(now); - - const paginator = new Paginator(); - expect(paginator.isBoosted('x')).toBe(false); - - paginator.boost('x', { ttlMs: 5000, seq: 0 }); - expect(paginator.isBoosted('x')).toBe(true); - - // Exactly at until is still considered boosted per <= check - vi.setSystemTime(new Date(now.getTime() + 5000)); - expect(paginator.isBoosted('x')).toBe(true); - - // After until → false - vi.setSystemTime(new Date(now.getTime() + 5001)); - expect(paginator.isBoosted('x')).toBe(false); - - vi.useRealTimers(); - }); - }); - - describe('integration: ingestion respects boostComparator implicitly', () => { - it('newly ingested boosted items float above non-boosted regardless of fallback sort', () => { - vi.useFakeTimers(); - vi.setSystemTime(new Date('2025-01-01T00:00:00Z')); - - const paginator = new Paginator(); - paginator.sortComparator = makeComparator< - TestItem, - Partial> - >({ - sort: { age: 1 }, // ascending age (so normally a < b < c by age) - }); - paginator.state.partialNext({ items: [a, b] }); - - // Boost "c" before ingest → it should be placed ahead of non-boosted even though age is highest - paginator.boost('c', { ttlMs: 60000, seq: 1 }); - expect(paginator.ingestItem(c)).toBeTruthy(); - - // c should be first due to boost, then a, then b (fallback sort would place c last otherwise) - expect(paginator.items!.map((i) => i.id)).toEqual(['c', 'a', 'b']); - - vi.useRealTimers(); - }); - }); - }); - }); -}); diff --git a/test/unit/pagination/ItemIndex.test.ts b/test/unit/pagination/ItemIndex.test.ts new file mode 100644 index 0000000000..1c57f3deec --- /dev/null +++ b/test/unit/pagination/ItemIndex.test.ts @@ -0,0 +1,175 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { ItemIndex } from '../../../src/pagination/ItemIndex'; + +interface TestItem { + id: string; + value: number; +} + +describe('ItemIndex', () => { + let itemIndex: ItemIndex; + const getId = (item: TestItem) => item.id; + + beforeEach(() => { + itemIndex = new ItemIndex({ getId }); + }); + + describe('constructor', () => { + it('should initialize with an empty index', () => { + expect(itemIndex.entries()).toEqual([]); + }); + + it('should accept a custom getId function', () => { + const customIndex = new ItemIndex<{ key: string }>({ getId: (item) => item.key }); + const item = { key: '123' }; + customIndex.setOne(item); + expect(customIndex.get('123')).toBe(item); + }); + }); + + describe('setOne', () => { + it('should add a single item', () => { + const item: TestItem = { id: '1', value: 10 }; + itemIndex.setOne(item); + expect(itemIndex.get('1')).toBe(item); + expect(itemIndex.has('1')).toBe(true); + }); + + it('should overwrite an existing item with the same ID', () => { + const item1: TestItem = { id: '1', value: 10 }; + const item2: TestItem = { id: '1', value: 20 }; + + itemIndex.setOne(item1); + expect(itemIndex.get('1')).toBe(item1); + + itemIndex.setOne(item2); + expect(itemIndex.get('1')).toBe(item2); + expect(itemIndex.get('1')?.value).toBe(20); + }); + }); + + describe('setMany', () => { + it('should add multiple items', () => { + const items: TestItem[] = [ + { id: '1', value: 10 }, + { id: '2', value: 20 }, + { id: '3', value: 30 }, + ]; + + itemIndex.setMany(items); + + expect(itemIndex.get('1')).toBe(items[0]); + expect(itemIndex.get('2')).toBe(items[1]); + expect(itemIndex.get('3')).toBe(items[2]); + expect(itemIndex.entries().length).toBe(3); + }); + + it('should handle empty array', () => { + itemIndex.setMany([]); + expect(itemIndex.entries().length).toBe(0); + }); + + it('should overwrite existing items when setting many', () => { + const item1: TestItem = { id: '1', value: 10 }; + itemIndex.setOne(item1); + + const newItems: TestItem[] = [ + { id: '1', value: 99 }, + { id: '2', value: 20 }, + ]; + + itemIndex.setMany(newItems); + + expect(itemIndex.get('1')?.value).toBe(99); + expect(itemIndex.get('2')?.value).toBe(20); + }); + }); + + describe('get', () => { + it('should return undefined for non-existent item', () => { + expect(itemIndex.get('non-existent')).toBeUndefined(); + }); + + it('should return the correct item for existing ID', () => { + const item: TestItem = { id: 'abc', value: 123 }; + itemIndex.setOne(item); + expect(itemIndex.get('abc')).toBe(item); + }); + }); + + describe('has', () => { + it('should return false for non-existent item', () => { + expect(itemIndex.has('non-existent')).toBe(false); + }); + + it('should return true for existing item', () => { + const item: TestItem = { id: 'abc', value: 123 }; + itemIndex.setOne(item); + expect(itemIndex.has('abc')).toBe(true); + }); + }); + + describe('remove', () => { + it('should remove an existing item', () => { + const item: TestItem = { id: '1', value: 10 }; + itemIndex.setOne(item); + expect(itemIndex.has('1')).toBe(true); + + itemIndex.remove('1'); + expect(itemIndex.has('1')).toBe(false); + expect(itemIndex.get('1')).toBeUndefined(); + }); + + it('should do nothing when removing non-existent item', () => { + // Should not throw + itemIndex.remove('non-existent'); + expect(itemIndex.entries().length).toBe(0); + }); + }); + + describe('entries', () => { + it('should return all entries as an array of [id, item] tuples', () => { + const items: TestItem[] = [ + { id: '1', value: 10 }, + { id: '2', value: 20 }, + ]; + itemIndex.setMany(items); + + const entries = itemIndex.entries(); + expect(entries).toHaveLength(2); + expect(entries).toEqual( + expect.arrayContaining([ + ['1', items[0]], + ['2', items[1]], + ]), + ); + }); + + it('should return empty array for empty index', () => { + expect(itemIndex.entries()).toEqual([]); + }); + }); + + describe('values', () => { + it('should return all values as an array of items', () => { + const items: TestItem[] = [ + { id: '1', value: 10 }, + { id: '2', value: 20 }, + ]; + itemIndex.setMany(items); + + const entries = itemIndex.values(); + expect(entries).toHaveLength(2); + expect(entries).toEqual( + expect.arrayContaining([ + { id: '1', value: 10 }, + { id: '2', value: 20 }, + ]), + ); + }); + + it('should return empty array for empty index', () => { + expect(itemIndex.values()).toEqual([]); + }); + }); +}); diff --git a/test/unit/pagination/paginators/BasePaginator.test.ts b/test/unit/pagination/paginators/BasePaginator.test.ts new file mode 100644 index 0000000000..4e05c82052 --- /dev/null +++ b/test/unit/pagination/paginators/BasePaginator.test.ts @@ -0,0 +1,3639 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; +import { + AscDesc, + BasePaginator, + DEFAULT_PAGINATION_OPTIONS, + ItemCoordinates, + LOGICAL_HEAD_INTERVAL_ID, + LOGICAL_TAIL_INTERVAL_ID, + PaginationQueryParams, + PaginationQueryReturnValue, + PaginatorCursor, + type PaginatorOptions, + PaginatorState, + PrimitiveFilter, + QueryFilter, + QueryFilters, + RequireOnlyOne, + ZERO_PAGE_CURSOR, +} from '../../../../src'; +import { sleep } from '../../../../src/utils'; +import { makeComparator } from '../../../../src/pagination/sortCompiler'; +import { DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES } from '../../../../src/constants'; +import { ItemIndex } from '../../../../src/pagination/ItemIndex'; + +const toNextTick = async () => { + const sleepPromise = sleep(0); + vi.advanceTimersByTime(0); + await sleepPromise; +}; + +type TestItem = { + id: string; + name?: string; + teams?: string[]; + blocked?: boolean; + createdAt?: string; // date string + age?: number; +}; + +type QueryShape = { + filters: { + [Key in keyof TestItem]: + | RequireOnlyOne> + | PrimitiveFilter; + }; + sort: { [Key in keyof TestItem]?: AscDesc }; +}; + +class IncompletePaginator extends BasePaginator { + sort: QueryFilters | undefined; + // @ts-ignore + sortComparator: (a: TestItem, b: TestItem) => number = vi.fn().mockReturnValue(0); // BasePaginator implementation + queryResolve: Function = vi.fn(); + queryReject: Function = vi.fn(); + queryPromise: Promise> | null = null; + mockClientQuery = vi.fn(); + + constructor(options: PaginatorOptions = {}) { + super(options); + } + + query( + params: PaginationQueryParams, + ): Promise> { + const promise = new Promise>( + (queryResolve, queryReject) => { + this.queryResolve = queryResolve; + this.queryReject = queryReject; + }, + ); + this.mockClientQuery(params); + this.queryPromise = promise; + return promise; + } + + filterQueryResults(items: TestItem[]): TestItem[] | Promise { + return items; + } +} + +const defaultNextQueryShape: QueryShape = { filters: { id: 'test-id' }, sort: { id: 1 } }; + +class Paginator extends IncompletePaginator { + constructor(options: PaginatorOptions = {}) { + super(options); + } + + getNextQueryShape = vi.fn().mockReturnValue(defaultNextQueryShape); +} + +const itemIndex = new ItemIndex({ getId: ({ id }) => id }); +const a: TestItem = { id: 'a', age: 30, name: 'A' }; +const b: TestItem = { id: 'b', age: 25, name: 'B' }; +const c: TestItem = { id: 'c', age: 25, name: 'C' }; +const d: TestItem = { id: 'd', age: 20, name: 'D' }; + +const v: TestItem = { id: 'v', age: 10, name: 'V' }; +const x: TestItem = { id: 'x', age: 5, name: 'x' }; +const y: TestItem = { id: 'y', age: 4, name: 'Y' }; +const z: TestItem = { id: 'z', age: 1, name: 'Z' }; + +describe('BasePaginator', () => { + describe('constructor', () => { + it('initiates with the defaults', () => { + const paginator = new Paginator(); + expect(paginator.state.getLatestValue()).toEqual({ + hasMoreTail: true, + hasMoreHead: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: undefined, + offset: 0, + }); + expect(paginator.isInitialized).toBe(false); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + expect(paginator.config.initialCursor).toBeUndefined(); + expect(paginator.config.initialOffset).toBeUndefined(); + expect(paginator.config.throwErrors).toBe(false); + expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); + expect(paginator.config.debounceMs).toBe(DEFAULT_PAGINATION_OPTIONS.debounceMs); + expect(paginator.config.lockItemOrder).toBe( + DEFAULT_PAGINATION_OPTIONS.lockItemOrder, + ); + expect(paginator.config.hasPaginationQueryShapeChanged).toBe( + DEFAULT_PAGINATION_OPTIONS.hasPaginationQueryShapeChanged, + ); + }); + + it('initiates with custom options', () => { + const options: PaginatorOptions = { + debounceMs: DEFAULT_PAGINATION_OPTIONS.debounceMs - 100, + doRequest: () => Promise.resolve({ items: [{ id: 'test-id' }] }), + hasPaginationQueryShapeChanged: () => true, + initialCursor: { tailward: 'tailward', headward: 'headward' }, + initialOffset: 10, + lockItemOrder: !DEFAULT_PAGINATION_OPTIONS.lockItemOrder, + pageSize: DEFAULT_PAGINATION_OPTIONS.pageSize - 1, + throwErrors: true, + }; + const paginator = new Paginator(options); + expect(paginator.state.getLatestValue()).toEqual({ + hasMoreTail: true, + hasMoreHead: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + cursor: options.initialCursor, + offset: options.initialOffset, + }); + expect(paginator.isInitialized).toBe(false); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + expect(paginator.config.initialCursor).toStrictEqual(options.initialCursor); + expect(paginator.config.initialOffset).toStrictEqual(options.initialOffset); + expect(paginator.config.throwErrors).toBe(options.throwErrors); + expect(paginator.pageSize).toBe(options.pageSize); + expect(paginator.config.hasPaginationQueryShapeChanged).toStrictEqual( + options.hasPaginationQueryShapeChanged, + ); + expect(paginator.config.debounceMs).toBe(options.debounceMs); + expect(paginator.config.lockItemOrder).toBe(options.lockItemOrder); + }); + }); + + describe('pagination API', () => { + it('throws is the paginator does implement own getNextQueryShape', () => { + const paginator = new IncompletePaginator(); + // @ts-expect-error accessing protected property + expect(paginator.getNextQueryShape).toThrow( + 'Paginator.getNextQueryShape() is not implemented', + ); + }); + + describe('shouldResetStateBeforeQuery', () => { + const stateBeforeQuery: PaginatorState = { + hasMoreTail: true, + hasMoreHead: true, + isLoading: false, + items: [{ id: 'test-item' }], + lastQueryError: undefined, + cursor: { tailward: 'tailward', headward: 'headward' }, + offset: 10, + }; + + const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; + const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; + + it('resets the state before a query when querying the first page', () => { + const paginator = new Paginator(); + const initialState = { ...stateBeforeQuery, items: undefined }; + paginator.state.next(initialState); + expect(paginator.state.getLatestValue()).toEqual(initialState); + // @ts-expect-error accessing protected property + expect(paginator.shouldResetStateBeforeQuery()).toBe(true); + }); + + it('resets the state before a query when query shape changed', () => { + const prevQueryShape: QueryShape = { filters: { id: 'a' }, sort: { id: 1 } }; + const nextQueryShape: QueryShape = { filters: { id: 'b' }, sort: { id: 1 } }; + const paginator = new Paginator(); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), + ).toBe(true); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), + ).toBe(false); + }); + + it('determines whether pagination state should be reset before a query using custom logic', () => { + const options = { + hasPaginationQueryShapeChanged: vi.fn().mockReturnValue(true), + }; + const paginator = new Paginator(options); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, nextQueryShape), + ).toBe(true); + expect( + // @ts-expect-error accessing protected property + paginator.shouldResetStateBeforeQuery(prevQueryShape, prevQueryShape), + ).toBe(true); + expect(options.hasPaginationQueryShapeChanged).toHaveBeenCalledTimes(2); + }); + }); + + it('paginates to next pages (cursor)', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + let nextPromise = paginator.toTail(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + nextPromise = paginator.toTail(); + expect(paginator.isLoading).toBe(true); + paginator.queryResolve({ + items: [{ id: 'id2' }], + tailward: 'next2', + headward: 'prev2', + }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toEqual({ tailward: 'next2', headward: 'prev2' }); + + nextPromise = paginator.toTail(); + paginator.queryResolve({ items: [] }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(false); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toEqual({ tailward: null, headward: null }); + + paginator.toTail(); + expect(paginator.isLoading).toBe(false); + expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); + }); + + it('paginates to next pages (offset)', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.toTail(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + nextPromise = paginator.toTail(); + expect(paginator.isLoading).toBe(true); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(2); + + nextPromise = paginator.toTail(); + paginator.queryResolve({ items: [] }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(2); + + paginator.toTail(); + expect(paginator.isLoading).toBe(false); + expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); + }); + + it('paginates to next pages debounced (cursor)', async () => { + vi.useFakeTimers(); + const paginator = new Paginator({ + debounceMs: 2000, + initialCursor: ZERO_PAGE_CURSOR, + pageSize: 1, + }); + + paginator.toTailDebounced(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + vi.advanceTimersByTime(2000); + // await first page load from the DB + await toNextTick(); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await paginator.queryPromise; + await toNextTick(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + vi.useRealTimers(); + }); + + it('paginates to next pages debounced (offset)', async () => { + vi.useFakeTimers(); + const paginator = new Paginator({ debounceMs: 2000, pageSize: 1 }); + + paginator.toTailDebounced(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + vi.advanceTimersByTime(2000); + // await first page load from the DB + await toNextTick(); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + }); + await paginator.queryPromise; + await toNextTick(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + vi.useRealTimers(); + }); + + it('paginates to a previous page (cursor only)', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + let nextPromise = paginator.toHead(); + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'headward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + nextPromise = paginator.toHead(); + expect(paginator.isLoading).toBe(true); + paginator.queryResolve({ + items: [{ id: 'id2' }], + tailward: 'next2', + headward: 'prev2', + }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toEqual({ tailward: 'next2', headward: 'prev2' }); + + nextPromise = paginator.toHead(); + paginator.queryResolve({ items: [] }); + await nextPromise; + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(false); + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.cursor).toEqual({ tailward: null, headward: null }); + + paginator.toHead(); + expect(paginator.isLoading).toBe(false); + }); + + it('debounces the pagination to a previous page (cursor only)', async () => { + vi.useFakeTimers(); + const paginator = new Paginator({ + debounceMs: 2000, + initialCursor: ZERO_PAGE_CURSOR, + }); + + paginator.toHeadDebounced(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + vi.advanceTimersByTime(2000); + await toNextTick(); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await paginator.queryPromise; + await toNextTick(); + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'headward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + vi.useRealTimers(); + }); + + it('cancelScheduledQuery cancels a pending debounced query', async () => { + vi.useFakeTimers(); + const paginator = new Paginator({ debounceMs: 2000 }); + + paginator.toTailDebounced(); + paginator.cancelScheduledQuery(); + + vi.advanceTimersByTime(2000); + await toNextTick(); + + expect(paginator.isLoading).toBe(false); + expect(paginator.mockClientQuery).not.toHaveBeenCalled(); + + vi.useRealTimers(); + }); + + it('prevents pagination if another query is in progress', async () => { + const paginator = new Paginator(); + const nextPromise1 = paginator.toTail(); + // wait for the first page load from the DB + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); + const nextPromise2 = paginator.toTail(); + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await Promise.all([nextPromise1, nextPromise2]); + expect(paginator.mockClientQuery).toHaveBeenCalledTimes(1); + }); + + it('resets the state if the query shape changed', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.toTail(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + paginator.getNextQueryShape.mockReturnValueOnce({ + filters: { id: 'test' }, + sort: { id: -1 }, + }); + nextPromise = paginator.toTail(); + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.items).toBeUndefined(); + expect(paginator.offset).toBe(0); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.offset).toBe(1); + }); + + it('resets the state if forced', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.toTail(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + nextPromise = paginator.toTail({ reset: 'yes' }); + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.items).toBeUndefined(); + expect(paginator.offset).toBe(0); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.offset).toBe(1); + }); + + it('does not reset the state if forced', async () => { + const paginator = new Paginator({ pageSize: 1 }); + let nextPromise = paginator.toTail(); + await sleep(0); + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await nextPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + + paginator.getNextQueryShape.mockReturnValueOnce({ + filters: { id: 'test' }, + sort: { id: -1 }, + }); + nextPromise = paginator.toTail({ reset: 'no' }); + await sleep(0); + expect(paginator.items).toStrictEqual([{ id: 'id1' }]); + expect(paginator.offset).toBe(1); + paginator.queryResolve({ items: [{ id: 'id2' }] }); + await nextPromise; + expect(paginator.items).toEqual([{ id: 'id1' }, { id: 'id2' }]); + expect(paginator.offset).toBe(2); + }); + + it('stores lastQueryError and clears it with the next successful query', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + let nextPromise = paginator.toTail(); + // wait for the first page load from DB + await sleep(0); + const error = new Error('Failed'); + paginator.queryReject(error); + // hand over to finish the cleanup and state update after the query execution + await sleep(0); + expect(paginator.lastQueryError).toEqual(error); + expect(paginator.isLoading).toEqual(false); + + nextPromise = paginator.toTail(); + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.lastQueryError).toBeUndefined(); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + }); + + it('throws error if enabled', async () => { + const paginator = new Paginator({ + initialCursor: ZERO_PAGE_CURSOR, + throwErrors: true, + }); + let nextPromise = paginator.toTail(); + // wait for the first page load from DB + await sleep(0); + const error = new Error('Failed'); + paginator.queryReject(error); + await expect(nextPromise).rejects.toThrowError(error); + // hand over to finish the cleanup and state update after the query execution + await sleep(0); + expect(paginator.lastQueryError).toEqual(error); + expect(paginator.isLoading).toEqual(false); + + nextPromise = paginator.toTail(); + // wait for the first page load from DB + await sleep(0); + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.lastQueryError).toBeUndefined(); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + }); + + it('retries the query', async () => { + vi.useFakeTimers(); + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + let nextPromise = paginator.toTail({ retryCount: 2 }); + // wait for the first page load from DB + await toNextTick(); + const error = new Error('Failed'); + paginator.queryReject(error); + // hand over to finish the cleanup and state update after the query execution + await toNextTick(); + expect(paginator.lastQueryError).toEqual(error); + vi.advanceTimersByTime(DEFAULT_QUERY_CHANNELS_MS_BETWEEN_RETRIES); + await toNextTick(); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.lastQueryError).toBeUndefined(); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + vi.useRealTimers(); + }); + + it('executeQuery uses explicit queryShape and does not call getNextQueryShape', async () => { + const paginator = new Paginator(); + const forcedShape: QueryShape = { + filters: { id: 'forced' }, + sort: { id: -1 }, + }; + + const promise = paginator.executeQuery({ + direction: 'tailward', + queryShape: forcedShape, + }); + + await sleep(0); + paginator.queryResolve({ items: [] }); + await promise; + + expect(paginator.getNextQueryShape).not.toHaveBeenCalled(); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: forcedShape, + reset: undefined, + retryCount: 0, + }); + }); + + it.todo( + 'prevents setting active interval and emitting new state whe updateState === false', + () => {}, + ); + }); + + describe('item management', () => { + const item1: TestItem = { + id: 'id1', + name: 'test', + age: 100, + teams: ['abc', 'efg'], + }; + + const item2 = { + ...item1, + id: 'id2', + name: 'test2', + age: 101, + }; + + const item3 = { + ...item1, + id: 'id3', + name: 'test3', + age: 102, + }; + + it('hasResults reflects whether items have been set', () => { + const paginator = new Paginator(); + expect(paginator.hasResults).toBe(false); + + paginator.state.partialNext({ items: [] }); + expect(paginator.hasResults).toBe(true); + + paginator.resetState(); + expect(paginator.hasResults).toBe(false); + }); + + describe('matchesFilter', () => { + it('returns true if no filter is provided', async () => { + const paginator = new Paginator(); + expect(paginator.matchesFilter(item1)).toBeTruthy(); + }); + it('returns false if does not match the filter', async () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + name: { $eq: 'test1' }, + }); + expect(paginator.matchesFilter(item1)).toBeFalsy(); + }); + it('returns true if item matches the filter', async () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + $or: [{ name: { $eq: 'test1' } }, { teams: { $contains: 'abc' } }], + }); + expect(paginator.matchesFilter(item1)).toBeTruthy(); + }); + }); + + describe('locateByItem', () => { + afterEach(() => itemIndex.clear()); + + const tieBreakerById = (l: TestItem, r: TestItem) => + l.id < r.id ? -1 : l.id > r.id ? 1 : 0; + + it('returns -1 for empty list', () => { + const paginator = new Paginator(); + const res = paginator.locateByItem(a); + expect(res).toEqual({ + state: { currentIndex: -1, insertionIndex: 0 }, + } as ItemCoordinates); + }); + + it('finds an existing item on a tie plateau (no ID tiebreaker)', () => { + const paginator = new Paginator(); + // comparator: age desc only (ties produce a plateau) + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + // items are already sorted by age desc + paginator.state.partialNext({ items: [a, b, c, d] }); + + const location = paginator.locateByItem(c); + // c is at index 2 in [a, b, c, d] + // insertionIndex for identical key (age 25) is after the plateau + expect(location).toStrictEqual({ + state: { currentIndex: 2, insertionIndex: 3 }, + }); + }); + + it('finds an existing item on a tie plateau (no ID tiebreaker) with itemIndex', () => { + const paginator = new Paginator({ itemIndex }); + // comparator: age desc only (ties produce a plateau) + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + // items are already sorted by age desc + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + + const location = paginator.locateByItem(c); + expect(location).toStrictEqual({ + state: { currentIndex: 2, insertionIndex: 3 }, + interval: { + currentIndex: 2, + insertionIndex: 3, + interval: { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'b', 'c', 'd'], + }, + }, + }); + }); + + it('returns insertion index when not found on a tie plateau (no ID tiebreaker)', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + // same sort keys as b/c but different id; not present + const x: TestItem = { id: 'x', age: 25, name: 'X' }; + const { state } = paginator.locateByItem(x); + // insertion point should be after the 25-plateau (after c at index 2) + expect(state?.currentIndex).toBe(-1); + expect(state?.insertionIndex).toBe(3); + }); + + it('returns insertion index when not found on a tie plateau (no ID tiebreaker) with itemIndex', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + + // same sort keys as b/c but different id; not present + const x: TestItem = { id: 'x', age: 25, name: 'X' }; + const location = paginator.locateByItem(x); + // insertion point should be after the 25-plateau (after c at index 2) + expect(location).toStrictEqual({ + state: { currentIndex: -1, insertionIndex: 3 }, + interval: { + currentIndex: -1, + insertionIndex: 3, + interval: { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'b', 'c', 'd'], + }, + }, + }); + }); + + it('finds exact index with ID tiebreaker in comparator (pure O(log n))', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + // tie-breaker on id asc guarantees a total order + tiebreaker: tieBreakerById, + }); + + // With tiebreaker, the order within age==25 is by id asc: b (id 'b'), then c (id 'c') + paginator.state.partialNext({ items: [a, b, c, d] }); + + const { state } = paginator.locateByItem(c); + expect(state?.currentIndex).toBe(2); + // In this setting the insertionIndex is deterministic but not strictly needed when found + expect(state?.insertionIndex).toBe(3); + }); + + it('finds exact index with ID tiebreaker in comparator (pure O(log n)) with itemIndex', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + // tie-breaker on id asc guarantees a total order + tiebreaker: tieBreakerById, + }); + + // With tiebreaker, the order within age==25 is by id asc: b (id 'b'), then c (id 'c') + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + const location = paginator.locateByItem(c); + expect(location).toStrictEqual({ + state: { currentIndex: 2, insertionIndex: 3 }, + interval: { + currentIndex: 2, + insertionIndex: 3, + interval: { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'b', 'c', 'd'], + }, + }, + }); + }); + + it('computes insertion for state at the beginning when needle sorts before all items', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + const z: TestItem = { id: 'z', age: 40, name: 'Z' }; // highest age → goes to front + const { state } = paginator.locateByItem(z); + expect(state?.currentIndex).toBe(-1); + expect(state?.insertionIndex).toBe(0); + }); + + it('computes insertion for state at the beginning when needle sorts before all items with itemIndex', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + + const z: TestItem = { id: 'z', age: 40, name: 'Z' }; // highest age → goes to front + const location = paginator.locateByItem(z); + // interval does not exist so it is not included in the search result + expect(location).toStrictEqual({ + state: { currentIndex: -1, insertionIndex: 0 }, + }); + }); + + it('computes insertion for state at the end when needle sorts after all items', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.state.partialNext({ items: [a, b, c, d] }); + + const z: TestItem = { id: 'z', age: 10, name: 'Z' }; // lowest age → goes to end + const { state } = paginator.locateByItem(z); + expect(state?.currentIndex).toBe(-1); + expect(state?.insertionIndex).toBe(4); + }); + + it('computes insertion for state at the end when needle sorts after all items with item index', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + + const z: TestItem = { id: 'z', age: 10, name: 'Z' }; // lowest age → goes to end + const location = paginator.locateByItem(z); + // interval does not exist so it is not included in the search result + expect(location).toStrictEqual({ + state: { currentIndex: -1, insertionIndex: 4 }, + }); + }); + + it('locates the correct interval when multiple intervals exist', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + tiebreaker: tieBreakerById, + }); + paginator.ingestPage({ page: [a, b, c, d], setActive: true }); + paginator.ingestPage({ page: [v, x, y, z], setActive: true }); + + const location = paginator.locateByItem(z); + // interval does not exist so it is not included in the search result + expect(location).toStrictEqual({ + state: { currentIndex: 3, insertionIndex: 4 }, + interval: { + interval: { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['v', 'x', 'y', 'z'], + }, + currentIndex: 3, + insertionIndex: 4, + }, + }); + }); + }); + + describe('ingestPage', () => { + let paginator: Paginator; + beforeEach(() => { + paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + }); + + it('postQueryReconcile treats jump query as non-directional (direction undefined)', async () => { + class JumpAwarePaginator extends Paginator { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + isJumpQueryShape(_queryShape: QueryShape): boolean { + return true; + } + } + + const jumpPaginator = new JumpAwarePaginator({ itemIndex }); + jumpPaginator.sortComparator = paginator.sortComparator; + + const ingestSpy = vi.spyOn(jumpPaginator, 'ingestPage'); + + await jumpPaginator.postQueryReconcile({ + direction: undefined, + isFirstPage: true, + queryShape: defaultNextQueryShape, + requestedPageSize: 10, + results: { items: [a] }, + updateState: false, + }); + + expect(ingestSpy).toHaveBeenCalledWith( + expect.objectContaining({ + policy: 'strict-overlap-only', + isHead: undefined, + isTail: undefined, + targetIntervalId: undefined, + }), + ); + }); + + it('sorts items according to effectiveSortComparator', () => { + paginator.ingestPage({ page: [c, a, b, d, b, c, a], setActive: true }); + // sorts by age, not id + expect(paginator.items).toStrictEqual([a, a, c, b, b, c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'a', 'c', 'b', 'b', 'c', 'd'], + }, + ]); + }); + + it('sets items in intervals only', () => { + paginator.ingestPage({ page: [c, a, b, d, b, c, a] }); + expect(paginator.items).toBeUndefined(); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'a', 'c', 'b', 'b', 'c', 'd'], + }, + ]); + }); + + it('ingests into the anchored head interval', () => { + paginator.ingestPage({ page: [c, d], isHead: true, setActive: true }); + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: true, + isHead: true, + isTail: false, + itemIds: ['c', 'd'], + }, + ]); + + paginator.ingestPage({ page: [a] }); + // ingestPage without setActive does not emit state.items + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: true, + isHead: true, + isTail: false, + itemIds: ['a', 'c', 'd'], + }, + ]); + }); + + it('does not force-merge into head interval under strict-overlap-only policy', () => { + paginator.ingestPage({ page: [c, d], isHead: true, setActive: true }); + + // Under default ('auto') policy, ingesting [a] would be merged into the head interval + // even though the sort bounds do not overlap. + paginator.ingestPage({ page: [a], policy: 'strict-overlap-only' }); + + // @ts-expect-error accessing protected property _itemIntervals + const intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toHaveLength(2); + + const headInterval = intervals.find( + (itv) => 'isHead' in itv && (itv as { isHead: boolean }).isHead, + ); + expect(headInterval).toBeTruthy(); + expect(headInterval).toMatchObject({ + isHead: true, + isTail: false, + itemIds: ['c', 'd'], + }); + + const otherInterval = intervals.find( + (itv) => !('isHead' in itv) || !(itv as { isHead: boolean }).isHead, + ); + expect(otherInterval).toBeTruthy(); + expect(otherInterval).toMatchObject({ + isHead: false, + isTail: false, + itemIds: ['a'], + }); + }); + + it('merges intervals when they strictly overlap under strict-overlap-only policy', () => { + paginator.ingestPage({ page: [b, c], setActive: true }); + paginator.ingestPage({ page: [c, d], policy: 'strict-overlap-only' }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b', 'c', 'd'], + }, + ]); + }); + + it('prepends and appends a page', () => { + paginator.ingestPage({ page: [b, c], setActive: true }); + expect(paginator.items).toStrictEqual([b, c]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b', 'c'], + }, + ]); + + paginator.ingestPage({ page: [a] }); + expect(paginator.items).toStrictEqual([b, c]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(2); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b', 'c'], + }, + ]); + + paginator.ingestPage({ page: [d] }); + expect(paginator.items).toStrictEqual([b, c]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(3); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b', 'c'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['d'], + }, + ]); + }); + + it('ingests into the anchored tail interval', () => { + paginator.ingestPage({ page: [b, c], isTail: true, setActive: true }); + expect(paginator.items).toStrictEqual([b, c]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: false, + isHead: false, + isTail: true, + itemIds: ['b', 'c'], + }, + ]); + + paginator.ingestPage({ page: [d] }); + // ingestPage without setActive does not emit state.items + expect(paginator.items).toStrictEqual([b, c]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: false, + isHead: false, + isTail: true, + itemIds: ['b', 'c', 'd'], + }, + ]); + }); + + it('merges all the overlapping anchored intervals, parts of logical intervals with target interval', () => { + let keys: string[] = []; + paginator.ingestPage({ page: [c, d], setActive: true }); + paginator.ingestPage({ page: [b] }); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], // b.age === c.age => merged + }, + ]); + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + + paginator.ingestItem(a); // leads to creation of logical head + // ingestItem does not emit into state.items if active interval isn't affected + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: keys[0], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], + }, + ]); + + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + + paginator.ingestItem(z); // leads to creation of logical tail + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(3); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: keys[1], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['z'], + }, + ]); + + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + + paginator.ingestPage({ page: [x] }); + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(4); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: keys[1], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], + }, + { + id: expect.any(String), // new interval with new id + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['x'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['z'], + }, + ]); + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + + paginator.ingestPage({ page: [y], targetIntervalId: keys[2] }); + expect(paginator.items).toStrictEqual([c, d]); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(4); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: keys[1], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], + }, + { + id: keys[2], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['x', 'y'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['z'], + }, + ]); + + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + const previousAnchoredPageId = keys[1]; + + paginator.ingestPage({ page: [a, b, z] }); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(1); + // @ts-expect-error accessing protected property _itemIntervals + const currentAnchoredPageId = Array.from(paginator._itemIntervals.keys())[0]; + expect(previousAnchoredPageId).toBe(currentAnchoredPageId); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: currentAnchoredPageId, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + // original interval (containing 'c') served as a base, therefore 'b' is merged after 'c' + itemIds: ['a', 'c', 'b', 'd', 'x', 'y', 'z'], + }, + ]); + }); + + it('marks head and tail anchored intervals and removes existing logical intervals', () => { + paginator.ingestItem(b); + paginator.ingestPage({ page: [d] }); + paginator.ingestItem(y); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['b'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['d'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['y'], + }, + ]); + + paginator.ingestPage({ page: [a], isHead: true }); + paginator.ingestPage({ page: [z], isTail: true }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: true, + isHead: true, + isTail: false, + itemIds: ['a'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['d'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['y'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: false, + isHead: false, + isTail: true, + itemIds: ['z'], + }, + ]); + }); + + it('merges incomplete head intervals with existing logical intervals and sorts their items', () => { + paginator.ingestItem(a); // logical head + paginator.ingestPage({ page: [d] }); // anchored interval + paginator.ingestItem(y); // logical tail + + paginator.ingestPage({ page: [c], isHead: true }); + paginator.ingestPage({ page: [x], isTail: true }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: true, + isHead: true, + isTail: false, + itemIds: ['a', 'c'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['d'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: false, + isHead: false, + isTail: true, + itemIds: ['x', 'y'], + }, + ]); + }); + + it('ignores targetInterval if it is a logical interval', () => { + paginator.ingestItem(a); // logical head + paginator.ingestPage({ page: [c, d] }); // anchored interval + paginator.ingestPage({ page: [b], targetIntervalId: LOGICAL_HEAD_INTERVAL_ID }); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], // according to sort c === b and thus b is inserted at the next free slot + }, + ]); + + paginator.ingestPage({ page: [x], targetIntervalId: LOGICAL_HEAD_INTERVAL_ID }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: ['a'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['c', 'b', 'd'], + }, + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['x'], + }, + ]); + }); + + it('merges to target interval within the neighbour interval bounds (does not overlap with neighbours) - paginator.toTail()', () => { + paginator.ingestPage({ page: [a, b] }); + paginator.ingestPage({ page: [x, y] }); + // @ts-expect-error accessing protected property _itemIntervals + const keys = Array.from(paginator._itemIntervals.keys()); + paginator.ingestPage({ page: [c, d], targetIntervalId: keys[0] }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: keys[0], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'b', 'c', 'd'], + }, + { + id: keys[1], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['x', 'y'], + }, + ]); + }); + + it('uses anchored target interval as base even if non-overlapping', () => { + paginator.ingestPage({ page: [a] }); + paginator.ingestPage({ page: [b, d] }); + // @ts-expect-error accessing protected property _itemIntervals + const keys = Array.from(paginator._itemIntervals.keys()); + paginator.ingestPage({ page: [c], targetIntervalId: keys[0] }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: keys[0], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'b', 'c', 'd'], + }, + ]); + }); + + it('merges page into anchored target interval even if disjoint', () => { + paginator.ingestPage({ page: [a] }); + paginator.ingestPage({ page: [b, d] }); + // @ts-expect-error accessing protected property _itemIntervals + let keys = Array.from(paginator._itemIntervals.keys()); + paginator.ingestPage({ page: [x], targetIntervalId: keys[0] }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: keys[0], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['a', 'x'], + }, + { + id: keys[1], + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['b', 'd'], + }, + ]); + + paginator.resetState(); + paginator.ingestPage({ page: [a] }); + paginator.ingestPage({ page: [d] }); + paginator.ingestPage({ page: [x] }); + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(3); + + // @ts-expect-error accessing protected property _itemIntervals + keys = Array.from(paginator._itemIntervals.keys()); + // ingesting into the interval with x will merge b into x + paginator.ingestPage({ page: [b], targetIntervalId: keys[2] }); + + // @ts-expect-error accessing protected property _itemIntervals + expect(paginator._itemIntervals.size).toBe(3); + // @ts-expect-error accessing protected property _itemIntervals + const values = Array.from(paginator._itemIntervals.values()); + expect(values).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + id: keys[0], + isHead: false, + isTail: false, + itemIds: ['a'], + }), + expect.objectContaining({ + isHead: false, + isTail: false, + itemIds: ['d'], + }), + expect.objectContaining({ + id: keys[2], + isHead: false, + isTail: false, + itemIds: ['b', 'x'], + }), + ]), + ); + }); + + it('does not ingest if itemIndex is not available', () => { + paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + expect(paginator.items).toBeUndefined(); + paginator.ingestPage({ page: [a] }); + expect(paginator.items).toBeUndefined(); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([]); + }); + + it('does not ingest if page has no items', () => { + paginator.ingestPage({ page: [] }); + expect(paginator.items).toBeUndefined(); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([]); + }); + }); + + describe('ingestItem to state only', () => { + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'item exists but does not match the filter anymore removes the item %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + + paginator.state.partialNext({ + items: [item3, item2, item1], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams + }); + + const adjustedItem = { + ...item1, + teams: ['efg'], // removed from the team abc + }; + + expect(paginator.ingestItem(adjustedItem)).toBeTruthy(); // item removed + expect(paginator.items).toStrictEqual([item3, item2]); + }, + ); + + it.each([ + [' adjusts the order on lockItemOrder: false', false], + [' does not adjust the order on lockItemOrder: true', true], + ])('exists and matches the filter updates the item and %s', (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item1, item2, item3], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem1 = { + ...item1, + age: 103, + }; + + expect(paginator.ingestItem(adjustedItem1)).toBeTruthy(); // item updated + + if (lockItemOrder) { + expect(paginator.items).toStrictEqual([adjustedItem1, item2, item3]); + } else { + expect(paginator.items).toStrictEqual([item2, item3, adjustedItem1]); + } + }); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and does not match the filter results in no action %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item1], // age: 100 + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem = { + ...item1, + id: 'id2', + name: 'test2', + }; + + expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action + expect(paginator.items).toStrictEqual([item1]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to default sort order (append) %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item3, item1], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item1, item2]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to sort order %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder }); + paginator.state.partialNext({ + items: [item3, item1], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item2, item1]); + }, + ); + + it('reflects the boost priority on lockItemOrder: false for newly ingested items', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item1], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item2, item3, item1]); + }); + + it('reflects the boost priority on lockItemOrder: false for existing items recently boosted', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item1, item2, item3], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem2 = { + ...item2, + age: 103, + }; + paginator.boost(item2.id); + expect(paginator.ingestItem(adjustedItem2)).toBeTruthy(); // item updated + expect(paginator.items).toStrictEqual([adjustedItem2, item1, item3]); + }); + + it('does not reflect the boost priority on lockItemOrder: true', () => { + const paginator = new Paginator({ lockItemOrder: true }); + paginator.state.partialNext({ + items: [item1, item2, item3], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); // item updated + expect(paginator.items).toStrictEqual([item1, item2, item3]); + }); + + it('reflects the boost priority on lockItemOrder: true when ingesting a new item', () => { + const paginator = new Paginator({ lockItemOrder: true }); + paginator.state.partialNext({ + items: [item3, item1], + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item2, item3, item1]); + }); + }); + + describe('ingestItem with itemIndex', () => { + beforeEach(() => { + itemIndex.clear(); + }); + + it('updates an item that lives only in the logical head interval re-inserts the item back to logical interval', () => { + const paginator = new Paginator({ itemIndex }); + + // Sort by age desc so we can create "head" and "tail" logically + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + // First ingestion: item2 (age 101) → logical head + expect(paginator.ingestItem(item2)).toBe(true); + + // Second ingestion: item1 (age 100, younger than item2) → logical tail + expect(paginator.ingestItem(item1)).toBe(true); + + // We should now have only logical intervals: head + tail + // @ts-expect-error accessing protected property + let intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: [item2.id], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: [item1.id], + }, + ]); + + // Update the tail item snapshot (change sort-relevant field) + const updatedHead: TestItem = { + ...item2, + age: 150, // arbitrary change + }; + + expect(paginator.ingestItem(updatedHead)).toBe(true); + + // ItemIndex snapshot for id1 is updated + // @ts-expect-error accessing protected property + expect(paginator._itemIndex!.get(item2.id)).toStrictEqual(updatedHead); + + // We still have exactly the same logical head + tail intervals by ID and membership + // (the "still belongs to previous logical interval when only logical intervals exist" rule) + // @ts-expect-error accessing protected property + intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: [item2.id], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: [item1.id], + }, + ]); + }); + + it('keeps updated existing item in logical tail when only logical intervals exist', () => { + const paginator = new Paginator({ itemIndex }); + + // Sort by age desc so we can create "head" and "tail" logically + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + // First ingestion: item2 (age 101) → logical head + expect(paginator.ingestItem(item2)).toBe(true); + + // Second ingestion: item1 (age 100, younger than item2) → logical tail + expect(paginator.ingestItem(item1)).toBe(true); + + // We should now have only logical intervals: head + tail + // @ts-expect-error accessing protected property + let intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: [item2.id], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: [item1.id], + }, + ]); + + // Update the tail item snapshot (change sort-relevant field) + const updatedTail: TestItem = { + ...item1, + age: 50, // arbitrary change + }; + + expect(paginator.ingestItem(updatedTail)).toBe(true); + + // ItemIndex snapshot for id1 is updated + // @ts-expect-error accessing protected property + expect(paginator._itemIndex!.get(item1.id)).toStrictEqual(updatedTail); + + // We still have exactly the same logical head + tail intervals by ID and membership + // (the "still belongs to previous logical interval when only logical intervals exist" rule) + // @ts-expect-error accessing protected property + intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toStrictEqual([ + { + id: LOGICAL_HEAD_INTERVAL_ID, + itemIds: [item2.id], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: [item1.id], + }, + ]); + }); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'item exists but does not match the filter anymore removes the item %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ itemIndex, lockItemOrder }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $eq: ['abc', 'efg'] }, // required membership in these two teams + }); + + paginator.ingestPage({ + page: [item3, item2, item1], + setActive: true, + }); + + // @ts-expect-error accessing protected property _itemIndex + expect(Array.from(paginator._itemIndex!.values())).toStrictEqual([ + item3, + item2, + item1, + ]); + + const adjustedItem1 = { + ...item1, + teams: ['efg'], // removed from the team abc + }; + + expect(paginator.ingestItem(adjustedItem1)).toBeTruthy(); // item removed + expect(paginator.items).toStrictEqual([item3, item2]); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id2'], + }, + ]); + + // item index keeps the reference + // @ts-expect-error accessing protected property _itemIndex + expect(Array.from(paginator._itemIndex!.values())).toStrictEqual([ + item3, + item2, + adjustedItem1, + ]); + }, + ); + + it.each([ + [' does not adjust the order on lockItemOrder: true', true], + [' adjusts the order on lockItemOrder: false', false], + ])('exists and matches the filter updates the item and %s', (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder, itemIndex }); + paginator.ingestPage({ + page: [item1, item2, item3], + setActive: true, + }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: 1 } }); + + const adjustedItem1 = { + ...item1, + age: 103, + }; + + expect(paginator.ingestItem(adjustedItem1)).toBeTruthy(); // item updated + + if (lockItemOrder) { + expect(paginator.items).toStrictEqual([adjustedItem1, item2, item3]); + } else { + // moved to next page that may be disjoint and would be retrieved by pagination + expect(paginator.items).toStrictEqual([item2, item3]); + } + + // intervals are independent of the UI layer in state.items + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id2', 'id3'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['id1'], + }, + ]); + + // item index keeps the reference + // @ts-expect-error accessing protected property _itemIndex + expect(Array.from(paginator._itemIndex!.values())).toStrictEqual([ + adjustedItem1, + item2, + item3, + ]); + }); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and does not match the filter results in no action %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder, itemIndex }); + paginator.ingestPage({ + page: [item1], // age: 100 + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + const adjustedItem = { + ...item1, + id: 'id2', + name: 'test2', + }; + + expect(paginator.ingestItem(adjustedItem)).toBeFalsy(); // no action + expect(paginator.items).toStrictEqual([item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id1'], + }, + ]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to default sort order (append) %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder, itemIndex }); + paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item1, item2]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1', 'id2'], + }, + ]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist and matches the filter inserts according to sort order %s', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder, itemIndex }); + paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toHaveLength(3); + expect(paginator.items).toStrictEqual([item3, item2, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id2', 'id1'], + }, + ]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'does not exist, matches the filter, is out of the current interval bounds, inserts according to sort order %s to a new interval', + (_, lockItemOrder) => { + const paginator = new Paginator({ lockItemOrder, itemIndex }); + paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const item4 = { + id: 'id4', + name: 'test', + age: 99, + teams: ['abc'], + }; + expect(paginator.ingestItem(item4)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['id4'], + }, + ]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + '%s is not reflected in a previously non-active interval we jump to', + (_, lockItemOrder) => { + const paginator = new Paginator({ itemIndex, lockItemOrder }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const firstPage = paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + const item4 = { + id: 'id4', + name: 'test', + age: 96, + teams: ['abc'], + }; + const item5 = { + id: 'id5', + name: 'test', + age: 97, + teams: ['abc'], + }; + const item6 = { + id: 'id6', + name: 'test', + age: 98, + teams: ['abc'], + }; + const secondPage = paginator.ingestPage({ + page: [item6, item5, item4], + setActive: true, + }); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id5', 'id4'], + }, + ]); + + const adjustedItem4 = { + ...item4, + age: 98, + }; + expect(paginator.ingestItem(adjustedItem4)).toBeTruthy(); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id4', 'id5'], + }, + ]); + expect( + // @ts-expect-error accessing protected property _itemIntervals + paginator.intervalToItems(paginator._itemIntervals.get(secondPage!.id)!), + ).toStrictEqual([item6, adjustedItem4, item5]); + }, + ); + + it.each([ + ['on lockItemOrder: false', false], + ['on lockItemOrder: true', true], + ])( + 'existing item with changed sort-relevant properties is removed altogether if falls between existing intervals', + (_, lockItemOrder) => { + const paginator = new Paginator({ itemIndex, lockItemOrder }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const firstPage = paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + const item4 = { + id: 'id4', + name: 'test', + age: 96, + teams: ['abc'], + }; + const item5 = { + id: 'id5', + name: 'test', + age: 97, + teams: ['abc'], + }; + const item6 = { + id: 'id6', + name: 'test', + age: 98, + teams: ['abc'], + }; + const secondPage = paginator.ingestPage({ + page: [item6, item5, item4], + setActive: true, + }); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id5', 'id4'], + }, + ]); + + const adjustedItem5 = { + ...item5, + age: 99, + }; + expect(paginator.ingestItem(adjustedItem5)).toBeTruthy(); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id4'], + }, + ]); + expect( + // @ts-expect-error accessing protected property _itemIntervals + paginator.intervalToItems(paginator._itemIntervals.get(secondPage!.id)!), + ).toStrictEqual([item6, item4]); + }, + ); + + it.each([ + ['on lockItemOrder: false', 'is', false], + ['on lockItemOrder: true', 'is not', true], + ])( + '%s boost %s reflected in a previously non-active interval we jump to', + (_, __, lockItemOrder) => { + const paginator = new Paginator({ itemIndex, lockItemOrder }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const firstPage = paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + + const item4 = { + id: 'id4', + name: 'test', + age: 97, + teams: ['abc'], + }; + const item5 = { + id: 'id5', + name: 'test', + age: 98, + teams: ['abc'], + }; + const item6 = { + id: 'id6', + name: 'test', + age: 99, + teams: ['abc'], + }; + const secondPage = paginator.ingestPage({ + page: [item6, item5, item4], + setActive: true, + }); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id5', 'id4'], + }, + ]); + + paginator.boost(item5.id, { until: 9999999999999999 }); + expect(paginator.ingestItem(item5)).toBeTruthy(); + + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: firstPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: secondPage!.id, + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id6', 'id5', 'id4'], + }, + ]); + if (lockItemOrder) { + expect( + // @ts-expect-error accessing protected property _itemIntervals + paginator.intervalToItems(paginator._itemIntervals.get(secondPage!.id)!), + ).toStrictEqual([item6, item5, item4]); + } else { + expect( + // @ts-expect-error accessing protected property _itemIntervals + paginator.intervalToItems(paginator._itemIntervals.get(secondPage!.id)!), + ).toStrictEqual([item5, item6, item4]); + } + }, + ); + + it('reflects the boost priority on lockItemOrder: false for newly ingested items in state.items only', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item2, item3, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id2', 'id1'], + }, + ]); + }); + + it('reflects the boost priority on lockItemOrder: false for newly ingested items ingested outside the existing interval only in state.items', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ + page: [item3, item1], + setActive: true, + }); + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const item4 = { + id: 'id4', + name: 'test', + age: 99, + teams: ['abc'], + }; + paginator.boost(item4.id, { until: 9999999999999999 }); + expect(paginator.ingestItem(item4)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['id4'], + }, + ]); + + const item5 = { + id: 'id5', + name: 'test', + age: 98, + teams: ['abc'], + }; + paginator.boost(item5.id, { until: 9999999999999999, seq: 1 }); + expect(paginator.ingestItem(item5)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id1'], + }, + { + id: LOGICAL_TAIL_INTERVAL_ID, + itemIds: ['id4', 'id5'], + }, + ]); + expect( + paginator.intervalToItems( + // @ts-expect-error accessing protected property _itemIntervals + paginator._itemIntervals.get(LOGICAL_TAIL_INTERVAL_ID)!, + ), + ).toStrictEqual([item5, item4]); + }); + + it('boosted existing item in an anchored interval moves ahead of non-boosted items (lockItemOrder: false) only in state.items', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ + page: [item1, item2, item3], + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: 1 }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); // item updated + expect(paginator.items).toStrictEqual([item2, item1, item3]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id1', 'id2', 'id3'], + }, + ]); + }); + + it('does not reflect the boost priority of existing on lockItemOrder: true', () => { + const paginator = new Paginator({ itemIndex, lockItemOrder: true }); + paginator.ingestPage({ + page: [item1, item2, item3], + setActive: true, + }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + age: { $gt: 100 }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: 1 }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); // item updated + expect(paginator.items).toStrictEqual([item1, item2, item3]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id1', 'id2', 'id3'], + }, + ]); + }); + + it('does not reflect the boost priority on lockItemOrder: true when ingesting a new item only in state.items', () => { + const paginator = new Paginator({ itemIndex, lockItemOrder: true }); + paginator.ingestPage({ page: [item3, item1], setActive: true }); + + // @ts-expect-error accessing protected property + paginator.buildFilters = () => ({ + teams: { $contains: 'abc' }, + }); + + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + + paginator.boost(item2.id); + expect(paginator.ingestItem(item2)).toBeTruthy(); + expect(paginator.items).toStrictEqual([item3, item2, item1]); + // @ts-expect-error accessing protected property _itemIntervals + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: true, + hasMoreTail: true, + isHead: false, + isTail: false, + itemIds: ['id3', 'id2', 'id1'], + }, + ]); + }); + }); + + describe('removeItem', () => { + it('removes existing item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item3, item2, item1], + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + expect(paginator.removeItem({ item: item3 })).toStrictEqual({ + state: { currentIndex: 0, insertionIndex: 0 }, + }); + expect(paginator.items).toHaveLength(2); + expect(paginator.items![0]).toStrictEqual(item2); + expect(paginator.items![1]).toStrictEqual(item1); + }); + + it('results in no action for non-existent item', () => { + const paginator = new Paginator(); + paginator.state.partialNext({ + items: [item2, item1], + }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: -1 }, + }); + expect(paginator.removeItem({ item: item3 })).toStrictEqual({ + state: { currentIndex: -1, insertionIndex: -1 }, + }); + expect(paginator.items).toHaveLength(2); + expect(paginator.items![0]).toStrictEqual(item2); + expect(paginator.items![1]).toStrictEqual(item1); + }); + + it('removes item from both state and anchored intervals when itemIndex is present', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + paginator.ingestPage({ page: [item3, item2, item1], setActive: true }); + + const result = paginator.removeItem({ id: item2.id }); + + expect(result.state?.currentIndex).toBe(1); + // Interval no longer contains id2 + // @ts-expect-error accessing protected property + const intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toHaveLength(1); + expect(intervals[0].itemIds).toEqual(['id3', 'id1']); + + expect(paginator.items!.map((i) => i.id)).toEqual(['id3', 'id1']); + }); + + it('falls back to linear scan by id when no itemIndex is provided', () => { + const paginator = new Paginator(); // no itemIndex + paginator.state.partialNext({ items: [item3, item2, item1] }); + + const res = paginator.removeItem({ id: item2.id }); + + expect(res).toEqual({ state: { currentIndex: 1, insertionIndex: -1 } }); + expect(paginator.items!.map((i) => i.id)).toEqual(['id3', 'id1']); + }); + + it('removeItem is a no-op when itemIndex exists but does not have the interval for the given id', () => { + const paginator = new Paginator({ itemIndex }); + paginator.state.partialNext({ items: [item1] }); + + const res = paginator.removeItem({ id: 'missing' }); + + expect(res).toEqual({ state: { currentIndex: -1, insertionIndex: -1 } }); + expect(paginator.items).toEqual([item1]); + // @ts-expect-error accessing protected property + expect(paginator._itemIntervals.size).toBe(0); + }); + + it('removeItem is a no-op when itemIndex exists and has the interval but id is unknown', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ page: [item1], setActive: true }); + + const res = paginator.removeItem({ id: 'missing' }); + + expect(res).toEqual({ state: { currentIndex: -1, insertionIndex: -1 } }); + expect(paginator.items).toEqual([item1]); + // @ts-expect-error accessing protected property + expect(paginator._itemIntervals.size).toBe(1); + }); + + it('removes last item and removes the parent interval', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ page: [item1], setActive: true }); + + const res = paginator.removeItem({ id: item1.id }); + + expect(res).toEqual({ + state: { currentIndex: 0, insertionIndex: 0 }, + interval: { + interval: res.interval!.interval, + currentIndex: 0, + insertionIndex: 0, + }, + }); + // we are not returning to undefined as a sign that we have not reset the pagination + expect(paginator.items).toStrictEqual([]); + // @ts-expect-error accessing protected property + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([]); + }); + + it('removes last item and removes the parent interval from a non-active page', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ page: [item1] }); + + const res = paginator.removeItem({ id: item1.id }); + + expect(res).toEqual({ + // the state has no data so we get -1 for indices + state: { currentIndex: -1, insertionIndex: -1 }, + interval: { + interval: res.interval!.interval, + currentIndex: 0, + insertionIndex: 0, + }, + }); + expect(paginator.items).toBeUndefined(); + // @ts-expect-error accessing protected property + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([]); + }); + }); + + describe('setItems', () => { + it('overrides all the items in the state with provided value', () => { + const paginator = new Paginator(); + const items1 = [{ id: 'test-item1' }]; + const items2 = [{ id: 'test-item2' }]; + paginator.setItems({ valueOrFactory: items1 }); + expect(paginator.items).toStrictEqual(items1); + paginator.setItems({ valueOrFactory: items2 }); + expect(paginator.items).toStrictEqual(items2); + }); + + const items = [{ id: 'test-item1' }]; + const expectedStateEmissions = [ + { + cursor: undefined, + hasMoreTail: true, + hasMoreHead: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + offset: 0, + }, + { + cursor: undefined, + hasMoreTail: true, + hasMoreHead: true, + isLoading: false, + items, + lastQueryError: undefined, + offset: 1, + }, + ]; + + it('emits state change as long as the items are not the same', () => { + const paginator = new Paginator(); + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + expect(subscriptionHandler).toHaveBeenCalledTimes(1); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[0], + undefined, + ); + + paginator.setItems({ valueOrFactory: items }); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + // setting an object with the same reference + paginator.setItems({ valueOrFactory: items }); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + unsubscribe(); + }); + + it('emits state change as long as the state factory returns objects with different reference', () => { + const paginator = new Paginator(); + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + + paginator.setItems({ valueOrFactory: () => items }); + expect(paginator.items).toStrictEqual(items); + // first call is on subscribe + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + // setting an object with the same reference + paginator.setItems({ valueOrFactory: () => items }); + expect(paginator.items).toStrictEqual(items); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + expectedStateEmissions[1], + expectedStateEmissions[0], + ); + + unsubscribe(); + }); + + it('updates the cursor if provided', () => { + const paginator = new Paginator(); + const cursors: PaginatorCursor[] = [ + { tailward: 'next1', headward: 'prev1' }, + { tailward: 'next2', headward: 'prev1' }, + ]; + const subscriptionHandler = vi.fn(); + const unsubscribe = paginator.state.subscribe(subscriptionHandler); + + paginator.setItems({ valueOrFactory: items, cursor: cursors[0] }); + expect(subscriptionHandler).toHaveBeenCalledTimes(2); + expect(subscriptionHandler).toHaveBeenCalledWith( + { ...expectedStateEmissions[1], cursor: cursors[0], offset: 0 }, + { ...expectedStateEmissions[0], cursor: undefined, offset: 0 }, + ); + + unsubscribe(); + }); + + it('prioritizes isFirstPage: true and isLastPage: true', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const page = [item2, item1]; + + paginator.setItems({ + valueOrFactory: page, + isFirstPage: true, + isLastPage: true, + }); + + // @ts-expect-error accessing protected property + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: false, + isHead: true, + isTail: true, + itemIds: ['id2', 'id1'], + }, + ]); + + paginator.setItems({ + valueOrFactory: [item3], + isFirstPage: false, + isLastPage: false, + }); + + // @ts-expect-error accessing protected property + expect(Array.from(paginator._itemIntervals.values())).toStrictEqual([ + { + id: expect.any(String), + hasMoreHead: false, + hasMoreTail: false, + isHead: true, + isTail: true, + itemIds: ['id3', 'id2', 'id1'], + }, + ]); + }); + + it('does not reflect on isFirstPage and isLastPage when item interval storage is disabled', () => { + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const page = [item2, item1]; + + paginator.setItems({ + valueOrFactory: page, + isFirstPage: true, + isLastPage: true, + }); + + // @ts-expect-error accessing protected property + expect(paginator._itemIntervals.size).toBe(0); + expect(paginator.items).toStrictEqual([item2, item1]); + + paginator.setItems({ + valueOrFactory: [item3], + isFirstPage: false, + isLastPage: false, + }); + + // @ts-expect-error accessing protected property + expect(paginator._itemIntervals.size).toBe(0); + expect(paginator.items).toStrictEqual([item3]); + }); + + it('with itemIndex creates an anchored interval and sets it active', () => { + const paginator = new Paginator({ itemIndex }); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ sort: { age: -1 } }); + + const page = [item3, item1]; + + paginator.setItems({ + valueOrFactory: page, + isFirstPage: true, + isLastPage: false, + }); + + expect(paginator.items).toEqual(page); + expect(paginator.offset).toBe(page.length); + + // @ts-expect-error accessing protected property + const intervals = Array.from(paginator._itemIntervals.values()); + expect(intervals).toHaveLength(1); + expect(intervals[0]).toMatchObject({ + isHead: true, + isTail: false, + itemIds: ['id3', 'id1'], + }); + + // @ts-expect-error accessing protected property + expect(paginator._activeIntervalId).toBe(intervals[0].id); + }); + }); + + describe('reload', () => { + it('starts the ended pagination from the beginning [offset pagination]', async () => { + const paginator = new Paginator({ pageSize: 2 }); + paginator.state.next({ + hasMoreTail: false, + hasMoreHead: false, + isLoading: false, + items: [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }], + offset: 4, + }); + let reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + + reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id2' }], tailward: 'next2' }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.cursor).toBeUndefined(); + expect(paginator.offset).toBe(1); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + }); + it('starts the ended pagination from the beginning [cursor pagination]', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR, pageSize: 2 }); + paginator.state.next({ + hasMoreTail: false, + hasMoreHead: false, + isLoading: false, + items: [{ id: 'a' }, { id: 'b' }, { id: 'c' }, { id: 'd' }], + cursor: { tailward: 'tailward1', headward: 'headward1' }, + }); + let reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id1' }] }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(false); + expect(paginator.items).toEqual([{ id: 'id1' }]); + expect(paginator.cursor).toStrictEqual({ tailward: null, headward: null }); + expect(paginator.offset).toBe(0); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + + reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + + paginator.queryResolve({ items: [{ id: 'id2' }], tailward: 'tailward2' }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(false); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.cursor).toStrictEqual({ tailward: 'tailward2', headward: null }); + expect(paginator.offset).toBe(0); + expect(paginator.mockClientQuery).toHaveBeenCalledWith({ + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: 'yes', + retryCount: 0, + }); + + // reset in another direction + reloadPromise = paginator.reload(); + // wait for the DB data first page load + await sleep(0); + expect(paginator.isLoading).toBe(true); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toBe(undefined); + + paginator.queryResolve({ items: [{ id: 'id2' }], headward: 'headward2' }); + await reloadPromise; + expect(paginator.isLoading).toBe(false); + expect(paginator.hasMoreTail).toBe(false); + expect(paginator.hasMoreHead).toBe(true); + expect(paginator.items).toEqual([{ id: 'id2' }]); + expect(paginator.cursor).toStrictEqual({ headward: 'headward2', tailward: null }); + expect(paginator.offset).toBe(0); + }); + }); + + describe('resetState', () => { + it('restores initial state and clears intervals', () => { + const paginator = new Paginator({ itemIndex }); + paginator.ingestPage({ page: [item3, item2], setActive: true }); + + // Sanity: mutated state + intervals + expect(paginator.items).toEqual([item3, item2]); + // @ts-expect-error + expect(paginator._itemIntervals.size).toBe(1); + + paginator.resetState(); + + expect(paginator.state.getLatestValue()).toEqual(paginator.initialState); + // @ts-expect-error + expect(paginator._itemIntervals.size).toBe(0); + }); + }); + + describe('filter resolvers', () => { + const resolvers1 = [{ matchesField: () => true, resolve: () => 'abc' }]; + const resolvers2 = [ + { matchesField: () => false, resolve: () => 'efg' }, + { matchesField: () => true, resolve: () => 'hij' }, + ]; + it('get overridden with setFilterResolvers', () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + + paginator.setFilterResolvers(resolvers1); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers1.length); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); + + paginator.setFilterResolvers(resolvers2); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(resolvers2.length); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers2); + + paginator.setFilterResolvers([]); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(0); + }); + + it('get expanded with addFilterResolvers', () => { + const paginator = new Paginator(); + paginator.addFilterResolvers(resolvers1); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual(resolvers1); + + paginator.addFilterResolvers(resolvers2); + + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual([ + ...resolvers1, + ...resolvers2, + ]); + + paginator.addFilterResolvers([]); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toStrictEqual([ + ...resolvers1, + ...resolvers2, + ]); + }); + }); + + describe('item boosting', () => { + const a = { id: 'a', age: 10, name: 'A' } as TestItem; + const b = { id: 'b', age: 20, name: 'B' } as TestItem; + const c = { id: 'c', age: 30, name: 'C' } as TestItem; + + const byIdAsc = (l: TestItem, r: TestItem) => + l.id < r.id ? -1 : l.id > r.id ? 1 : 0; + + describe('clearExpiredBoosts', () => { + it('removes expired boosts and updates maxBoostSeq', () => { + const paginator = new Paginator(); + // @ts-expect-error accessing protected property + paginator.boosts.clear(); + const now = 1000000; + + paginator.boost('fresh', { until: now + 1000, seq: 1 }); + paginator.boost('stale', { until: now - 1, seq: 5 }); + + // @ts-expect-error accessing protected method + paginator.clearExpiredBoosts(now); + + // @ts-expect-error accessing protected property + expect(Array.from(paginator.boosts.keys())).toEqual(['fresh']); + expect(paginator.maxBoostSeq).toBe(1); + }); + + it('sets maxBoostSeq to 0 when no boosts remain', () => { + const paginator = new Paginator(); + // two expired boosts at "now" + paginator.boost('x', { until: 1000, seq: 1 }); + paginator.boost('y', { until: 1500, seq: 3 }); + + // @ts-expect-error accessing protected method + paginator.clearExpiredBoosts(10000); + + // @ts-expect-error accessing protected property + expect(paginator.boosts.size).toBe(0); + expect(paginator.maxBoostSeq).toBe(0); + }); + }); + + describe('boostComparator', () => { + it('prioritizes boosted over non-boosted', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + paginator.sortComparator = byIdAsc; + + // Boost only "a" + paginator.boost('b', { ttlMs: 10000, seq: 0 }); + + // @ts-expect-error: protected method + expect(paginator.boostComparator(a, b)).toBe(1); // a after b + // @ts-expect-error + expect(paginator.boostComparator(b, a)).toBe(-1); // b stays before a + + // Let boost expire + vi.setSystemTime(new Date(now.getTime() + 11000)); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); // fallback to byIdAsc + vi.useRealTimers(); + }); + + it('when both boosted, higher seq comes first; ties fall back to sortComparator', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + // Fallback comparator id asc + paginator.sortComparator = byIdAsc; + + paginator.boost('a', { ttlMs: 60000, seq: 1 }); + paginator.boost('b', { ttlMs: 60000, seq: 3 }); + + // b has higher seq → should come first → comparator(a,b) > 0 + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(1); + // reverse check + // @ts-expect-error + expect(paginator.boostComparator(b, a)).toBe(-1); + + // Equal seq → fall back to sortComparator (id asc => a before b) + paginator.boost('a', { ttlMs: 60000, seq: 2 }); + paginator.boost('b', { ttlMs: 60000, seq: 2 }); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); + + vi.useRealTimers(); + }); + + it('ignores expired boosts automatically during comparison', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + paginator.sortComparator = byIdAsc; + + paginator.boost('b', { ttlMs: 5000, seq: 10 }); + // Initially boosted + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(1); + + // Advance beyond TTL so boost is expired; comparator should fall back + vi.setSystemTime(new Date(now.getTime() + 6000)); + // @ts-expect-error + expect(paginator.boostComparator(a, b)).toBe(-1); // byIdAsc, not boost + vi.useRealTimers(); + }); + }); + + describe('boost', () => { + it('assigns default TTL (15s) and default seq=0; updates maxBoostSeq only upward', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + + paginator.boost('k'); // default 15s, seq 0 + const b1 = paginator.getBoost('k')!; + expect(b1.seq).toBe(0); + expect(b1.until).toBe(now.getTime() + 15000); + expect(paginator.maxBoostSeq).toBe(0); + + // Raise max seq + paginator.boost('m', { ttlMs: 1000, seq: 5 }); + expect(paginator.maxBoostSeq).toBe(5); + + // Lower seq should NOT decrease maxBoostSeq + paginator.boost('n', { ttlMs: 1000, seq: 2 }); + expect(paginator.maxBoostSeq).toBe(5); + + vi.useRealTimers(); + }); + + it('accepts explicit until and seq', () => { + const paginator = new Paginator(); + paginator.boost('z', { until: 42, seq: 7 }); + const b = paginator.getBoost('z')!; + expect(b.until).toBe(42); + expect(b.seq).toBe(7); + expect(paginator.maxBoostSeq).toBe(7); + }); + }); + + describe('getBoost', () => { + it('returns the boost record when present; otherwise undefined', () => { + const paginator = new Paginator(); + expect(paginator.getBoost('missing')).toBeUndefined(); + paginator.boost('a', { ttlMs: 1000, seq: 1 }); + const b = paginator.getBoost('a'); + expect(b).toBeDefined(); + expect(b!.seq).toBe(1); + }); + }); + + describe('removeBoost', () => { + it('removes a boost and recalculates maxBoostSeq', () => { + const paginator = new Paginator(); + paginator.boost('a', { ttlMs: 60000, seq: 1 }); + paginator.boost('b', { ttlMs: 60000, seq: 5 }); + paginator.boost('c', { ttlMs: 60000, seq: 2 }); + expect(paginator.maxBoostSeq).toBe(5); + + paginator.removeBoost('b'); // remove current max + expect(paginator.getBoost('b')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(2); + + paginator.removeBoost('c'); + expect(paginator.getBoost('c')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(1); + + paginator.removeBoost('a'); + expect(paginator.getBoost('a')).toBeUndefined(); + expect(paginator.maxBoostSeq).toBe(0); + }); + }); + + describe('isBoosted', () => { + it('returns true when boost exists and now <= until; false otherwise', () => { + vi.useFakeTimers(); + const now = new Date('2025-01-01T00:00:00Z'); + vi.setSystemTime(now); + + const paginator = new Paginator(); + expect(paginator.isBoosted('x')).toBe(false); + + paginator.boost('x', { ttlMs: 5000, seq: 0 }); + expect(paginator.isBoosted('x')).toBe(true); + + // Exactly at until is still considered boosted per <= check + vi.setSystemTime(new Date(now.getTime() + 5000)); + expect(paginator.isBoosted('x')).toBe(true); + + // After until → false + vi.setSystemTime(new Date(now.getTime() + 5001)); + expect(paginator.isBoosted('x')).toBe(false); + + vi.useRealTimers(); + }); + }); + + describe('integration: ingestion respects boostComparator implicitly', () => { + it('newly ingested boosted items float above non-boosted regardless of fallback sort', () => { + vi.useFakeTimers(); + vi.setSystemTime(new Date('2025-01-01T00:00:00Z')); + + const paginator = new Paginator(); + paginator.sortComparator = makeComparator< + TestItem, + Partial> + >({ + sort: { age: 1 }, // ascending age (so normally a < b < c by age) + }); + paginator.state.partialNext({ items: [a, b] }); + + // Boost "c" before ingest → it should be placed ahead of non-boosted even though age is highest + paginator.boost('c', { ttlMs: 60000, seq: 1 }); + expect(paginator.ingestItem(c)).toBeTruthy(); + + // c should be first due to boost, then a, then b (fallback sort would place c last otherwise) + expect(paginator.items!.map((i) => i.id)).toEqual(['c', 'a', 'b']); + + vi.useRealTimers(); + }); + }); + }); + }); +}); diff --git a/test/unit/pagination/ChannelPaginator.test.ts b/test/unit/pagination/paginators/ChannelPaginator.test.ts similarity index 97% rename from test/unit/pagination/ChannelPaginator.test.ts rename to test/unit/pagination/paginators/ChannelPaginator.test.ts index 9ce1c0564a..bd5cd225e4 100644 --- a/test/unit/pagination/ChannelPaginator.test.ts +++ b/test/unit/pagination/paginators/ChannelPaginator.test.ts @@ -9,10 +9,10 @@ import { type FilterBuilderGenerators, PaginatorCursor, type StreamChat, -} from '../../../src'; -import { getClientWithUser } from '../test-utils/getClient'; -import type { FieldToDataResolver } from '../../../src/pagination/types.normalization'; -import { MockOfflineDB } from '../offline-support/MockOfflineDB'; +} from '../../../../src'; +import { getClientWithUser } from '../../test-utils/getClient'; +import type { FieldToDataResolver } from '../../../../src/pagination/types.normalization'; +import { MockOfflineDB } from '../../offline-support/MockOfflineDB'; const user = { id: 'custom-id' }; @@ -38,8 +38,8 @@ describe('ChannelPaginator', () => { const paginator = new ChannelPaginator({ client }); expect(paginator.pageSize).toBe(DEFAULT_PAGINATION_OPTIONS.pageSize); expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, + hasMoreTail: true, + hasMoreHead: true, isLoading: false, items: undefined, lastQueryError: undefined, @@ -83,7 +83,7 @@ describe('ChannelPaginator', () => { debounceMs: 45000, doRequest, hasPaginationQueryShapeChanged, - initialCursor: { prev: 'prev', next: '' }, + initialCursor: { headward: 'headward', tailward: '' }, initialOffset: 10, lockItemOrder: true, pageSize: 2, @@ -104,8 +104,8 @@ describe('ChannelPaginator', () => { }); expect(paginator.pageSize).toBe(2); expect(paginator.state.getLatestValue()).toEqual({ - hasNext: true, - hasPrev: true, + hasMoreTail: true, + hasMoreHead: true, isLoading: false, items: undefined, lastQueryError: undefined, @@ -556,8 +556,8 @@ describe('ChannelPaginator', () => { describe('setters', () => { const stateAfterQuery = { items: [channel1, channel2], - hasNext: false, - hasPrev: false, + hasMoreTail: false, + hasMoreHead: false, offset: 10, isLoading: false, lastQueryError: undefined, @@ -620,7 +620,7 @@ describe('ChannelPaginator', () => { paginator.staticFilters = filters; paginator.sort = sort; - paginator.setItems(items1); + paginator.setItems({ valueOrFactory: items1 }); expect(paginator.items).toStrictEqual(items1); expect( client.offlineDb?.upsertCidsForQuery as unknown as MockInstance, diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts new file mode 100644 index 0000000000..c86f218549 --- /dev/null +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -0,0 +1,493 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { ZERO_PAGE_CURSOR } from '../../../../src/pagination/paginators/BasePaginator'; +import type { Interval } from '../../../../src/pagination/paginators/BasePaginator'; +import { MessagePaginator } from '../../../../src/pagination/paginators/MessagePaginator'; +import { ItemIndex } from '../../../../src/pagination/ItemIndex'; +import type { Channel } from '../../../../src/channel'; +import type { + LocalMessage, + MessagePaginationOptions, + MessageResponse, +} from '../../../../src/types'; +import { generateMessageDraft } from '../../test-utils/generateMessageDraft'; +import { generateMsg } from '../../test-utils/generateMessage'; +import { formatMessage } from '../../../../src'; +import { DEFAULT_QUERY_CHANNELS_MESSAGE_LIST_PAGE_SIZE } from '../../../../src/constants'; + +const createMessage = (overrides: Partial): LocalMessage => + formatMessage( + generateMsg({ + id: 'message-id', + ...overrides, + }), + ); + +describe('MessagePaginator', () => { + let channel: Channel; + let itemIndex: ItemIndex; + + beforeEach(() => { + channel = { cid: 'channel-id', query: vi.fn() } as unknown as Channel; + itemIndex = new ItemIndex({ getId: (message) => message.id }); + }); + + describe('constructor()', () => { + it('applies defaults and builds comparator', () => { + const paginator = new MessagePaginator({ channel }); + + expect(paginator.pageSize).toBe(100); + expect(paginator.id.startsWith('message-paginator-')).toBe(true); + expect(paginator.state.getLatestValue()).toEqual({ + cursor: ZERO_PAGE_CURSOR, + hasMoreHead: true, + hasMoreTail: true, + isLoading: false, + items: undefined, + lastQueryError: undefined, + offset: 0, + }); + // @ts-expect-error accessing protected property + expect(paginator._filterFieldToDataResolvers).toHaveLength(1); + + const newer = createMessage({ id: 'b', created_at: '2021-01-01T00:00:00.000Z' }); + const older = createMessage({ id: 'a', created_at: '2020-01-01T00:00:00.000Z' }); + expect(paginator.sortComparator(older, newer)).toBeLessThan(0); + expect(paginator.sortComparator(newer, older)).toBeGreaterThan(0); + + const sameDateA = createMessage({ + id: 'a', + created_at: '2021-01-01T00:00:00.000Z', + }); + const sameDateB = createMessage({ + id: 'b', + created_at: '2021-01-01T00:00:00.000Z', + }); + expect(paginator.sortComparator(sameDateA, sameDateB)).toBeLessThan(0); // because of the same date, the tiebreaker kicks in + }); + + it('respects provided paginator options', () => { + const doRequest = vi.fn(); + const paginator = new MessagePaginator({ + channel, + id: 'custom-id', + itemIndex, + paginatorOptions: { doRequest, pageSize: 5 }, + }); + + expect(paginator.pageSize).toBe(5); + expect(paginator.id).toBe('custom-id'); + expect(paginator.sort).toEqual({ created_at: 1 }); + expect(paginator.config.doRequest).toBe(doRequest); + }); + }); + + describe('query shape handling', () => { + it('returns always false for hasPaginationQueryShapeChanged', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const prev: MessagePaginationOptions = { id_gt: 'a', limit: 10 }; + const nextSameShape: MessagePaginationOptions = { id_gt: 'a', limit: 30 }; + const nextDifferent: MessagePaginationOptions = { id_gt: 'b', limit: 10 }; + + expect(paginator.config.hasPaginationQueryShapeChanged(prev, nextSameShape)).toBe( + false, + ); + expect(paginator.config.hasPaginationQueryShapeChanged(prev, nextDifferent)).toBe( + false, + ); + }); + + it('builds filters using the channel cid', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + expect(paginator.buildFilters()).toEqual({ cid: 'channel-id' }); + }); + + it('computes next query shape from cursor and direction', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const currentState = paginator.state.getLatestValue(); + paginator.state.next({ + ...currentState, + cursor: { headward: 'head-cursor', tailward: 'tail-cursor' }, + }); + + // @ts-expect-error accessing protected method + expect(paginator.getNextQueryShape({ direction: 'tailward' })).toEqual({ + id_lt: 'tail-cursor', + limit: 100, + }); + + // @ts-expect-error accessing protected method + expect(paginator.getNextQueryShape({ direction: 'headward' })).toEqual({ + id_gt: 'head-cursor', + limit: 100, + }); + }); + }); + + describe('query()', () => { + it('uses an existing query shape when provided and respects doRequest path', async () => { + const paginator = new MessagePaginator({ + channel, + itemIndex, + paginatorOptions: { + doRequest: vi.fn().mockResolvedValue({ + cursor: { headward: 'head', tailward: 'tail' }, + items: [generateMsg({ id: '1' })], + }), + }, + }); + // @ts-expect-error setting protected field for test coverage + paginator._nextQueryShape = { + custom: 'shape', + } as unknown as MessagePaginationOptions; + // @ts-expect-error spying on protected method + const getNextQueryShapeSpy = vi.spyOn(paginator, 'getNextQueryShape'); + + const result = await paginator.query({ direction: 'headward' }); + + expect(paginator.config.doRequest).toHaveBeenCalledWith({ custom: 'shape' }); + expect(result.headward).toBe('head'); + expect(result.tailward).toBeUndefined(); + expect(getNextQueryShapeSpy).not.toHaveBeenCalled(); + }); + + it('formats channel query results and sets cursors based on direction', async () => { + const messages = [ + { id: 'first', created_at: '2022-01-01T00:00:00.000Z' }, + { id: 'last', created_at: '2022-01-02T00:00:00.000Z' }, + ]; + (channel.query as unknown as ReturnType).mockResolvedValue({ + messages, + }); + const paginator = new MessagePaginator({ channel, itemIndex }); + // @ts-expect-error setting protected field for test coverage + paginator._nextQueryShape = { id_gt: 'from-cursor', limit: 30 }; + + const result = await paginator.query({}); + + expect(channel.query).toHaveBeenCalledWith({ + messages: { id_gt: 'from-cursor', limit: 30 }, + }); + expect(result.tailward).toBe('first'); + expect(result.headward).toBe('last'); + expect(result.items[0].created_at).toBeInstanceOf(Date); + expect(result.items[1].created_at).toBeInstanceOf(Date); + }); + }); + + describe('jumpToMessage()', () => { + it('delegates to executeQuery with id_around payload', async () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + itemIndex.setOne( + createMessage({ id: 'target-message', created_at: '2020-01-01T00:00:00.000Z' }), + ); + const targetInterval: Interval = { + id: 'interval-1', + hasMoreHead: true, + hasMoreTail: true, + itemIds: ['target-message'], + isHead: false, + isTail: false, + }; + const executeQuerySpy = vi + .spyOn(paginator, 'executeQuery') + .mockResolvedValue({ stateCandidate: {}, targetInterval }); + + const result = await paginator.jumpToMessage('target-message', { pageSize: 13 }); + + expect(executeQuerySpy).toHaveBeenCalledWith({ + queryShape: { id_around: 'target-message', limit: 13 }, + updateState: false, + }); + expect(result).toBe(true); + }); + + it('updates cursor when jumping between already loaded intervals', async () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + + const m4 = createMessage({ + cid: 'channel-id', + id: 'm4', + created_at: '2020-01-04T00:00:00.000Z', + }); + const m5 = createMessage({ + cid: 'channel-id', + id: 'm5', + created_at: '2020-01-05T00:00:00.000Z', + }); + const m8 = createMessage({ + cid: 'channel-id', + id: 'm8', + created_at: '2020-01-08T00:00:00.000Z', + }); + const m9 = createMessage({ + cid: 'channel-id', + id: 'm9', + created_at: '2020-01-09T00:00:00.000Z', + }); + + // two disjoint anchored intervals + paginator.ingestPage({ page: [m8, m9], isHead: true, setActive: true }); + paginator.ingestPage({ page: [m4, m5] }); + + await paginator.jumpToMessage('m4'); + expect(paginator.cursor?.tailward).toBe('m4'); + + await paginator.jumpToMessage('m9'); + // jumping back to the head interval should restore its tailward cursor + expect(paginator.cursor?.tailward).toBe('m8'); + }); + }); + + describe.todo('jumpToTheLatestMessage', () => {}); + + describe('jumpToTheFirstUnreadMessage()', () => { + it('uses unreadState snapshot even if channel read state is already "read"', async () => { + const channelWithReadState = { + cid: 'channel-id', + query: vi.fn(), + state: { + read: { + user1: { + first_unread_message_id: null, + last_read_message_id: null, + }, + }, + }, + getClient: () => ({ + user: { id: 'user1' }, + }), + } as unknown as Channel; + + const paginator = new MessagePaginator({ + channel: channelWithReadState, + itemIndex, + }); + paginator.setUnreadSnapshot({ + firstUnreadMessageId: 'm-unread', + lastReadMessageId: 'm-read', + }); + + const jumpSpy = vi.spyOn(paginator, 'jumpToMessage').mockResolvedValue(true); + + const ok = await paginator.jumpToTheFirstUnreadMessage(); + + expect(ok).toBe(true); + expect(jumpSpy).toHaveBeenCalledWith('m-unread', undefined); + }); + + it('can ignore snapshot and rely on channel read state only', async () => { + const channelWithReadState = { + cid: 'channel-id', + query: vi.fn(), + state: { + read: { + user1: { + first_unread_message_id: null, + last_read_message_id: null, + }, + }, + }, + getClient: () => ({ + user: { id: 'user1' }, + }), + } as unknown as Channel; + + const paginator = new MessagePaginator({ + channel: channelWithReadState, + itemIndex, + unreadReferencePolicy: 'read-state-only', + }); + paginator.setUnreadSnapshot({ + firstUnreadMessageId: 'm-unread', + lastReadMessageId: 'm-read', + }); + + const jumpSpy = vi.spyOn(paginator, 'jumpToMessage').mockResolvedValue(true); + + const ok = await paginator.jumpToTheFirstUnreadMessage(); + + expect(ok).toBe(false); + expect(jumpSpy).not.toHaveBeenCalled(); + }); + }); + + describe('filterQueryResults()', () => { + it('removes shadowed messages', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + let items = [createMessage({ id: 'only' })]; + expect(paginator.filterQueryResults(items)).toEqual(items); + + items = [createMessage({ id: 'only', shadowed: true })]; + expect(paginator.filterQueryResults(items)).toEqual([]); + }); + }); + + describe.todo('postQueryReconcile and deriveCursor for', () => {}); + describe('linear pagination', () => { + describe('updates the hasMoreTail flag only if the first message on page is the first message in interval', () => { + it('no query shape is given', () => { + // const paginator = new MessagePaginator({ channel, itemIndex }); + // paginator.postQueryReconcile({ + // isFirstPage: true, + // requestedPageSize: + // queryChannelsOptions?.message_limit || + // DEFAULT_QUERY_CHANNELS_MESSAGE_LIST_PAGE_SIZE, + // results: { + // items: channelState.messages.map(formatMessage), + // }, + // }); + }); + it('and direction is "tailward"', () => { + // const paginator = new MessagePaginator({ channel, itemIndex }); + // paginator.config.deriveCursor({ + // direction: 'tailward', + // isFirstPage: true, + // requestedPageSize: + // queryChannelsOptions?.message_limit || + // DEFAULT_QUERY_CHANNELS_MESSAGE_LIST_PAGE_SIZE, + // results: { + // items: channelState.messages.map(formatMessage), + // }, + // }); + }); + it('query shape contains "created_at_before_or_equal"', () => {}); + it('query shape contains "created_at_before"', () => {}); + it('query shape contains "id_lt"', () => {}); + it('query shape contains "id_lte"', () => {}); + it('query shape contains "offset"', () => {}); + it('contains unrecognized query shape properties only', () => {}); + }); + it('updates the hasMoreTail flag if the page is empty', () => {}); + + describe('updates the hasMoreHead flag only if the last message on page is the last message in interval', () => { + it('and direction is "headward"', () => {}); + it('query shape contains "created_at_after_or_equal"', () => {}); + it('query shape contains "created_at_after"', () => {}); + it('query shape contains "id_gt"', () => {}); + it('query shape contains "id_gte"', () => {}); + it('query shape contains "offset"', () => {}); + it('contains unrecognized query shape properties only', () => {}); + }); + it('updates the hasMoreHead flag if the page is empty', () => {}); + }); + + describe('interval head/tail semantics', () => { + it('treats interval head as the newest edge (head is last itemId)', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + + const older = createMessage({ + cid: 'channel-id', + id: 'm1', + created_at: '2020-01-01T00:00:00.000Z', + }); + const newer = createMessage({ + cid: 'channel-id', + id: 'm2', + created_at: '2020-01-02T00:00:00.000Z', + }); + itemIndex.setMany([older, newer]); + + const intervalA = paginator.makeInterval({ page: [older] }); + const intervalB = paginator.makeInterval({ page: [newer] }); + + // @ts-expect-error accessing protected method + const sorted = paginator.sortIntervals([intervalA, intervalB]); + expect(sorted[0].id).toBe(intervalB.id); + expect(sorted[1].id).toBe(intervalA.id); + }); + + it('ingests a newer live message into the head interval (not logical tail)', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + + const m1 = createMessage({ + cid: 'channel-id', + id: 'm1', + created_at: '2020-01-01T00:00:00.000Z', + }); + const m2 = createMessage({ + cid: 'channel-id', + id: 'm2', + created_at: '2020-01-02T00:00:00.000Z', + }); + paginator.setItems({ + valueOrFactory: [m1, m2], + isFirstPage: true, + isLastPage: true, + }); + + const m3 = createMessage({ + cid: 'channel-id', + id: 'm3', + created_at: '2020-01-03T00:00:00.000Z', + }); + paginator.ingestItem(m3); + + expect(paginator.items?.map((m) => m.id)).toEqual(['m1', 'm2', 'm3']); + + // @ts-expect-error accessing protected storage + expect(paginator._itemIntervals.has('__logical_tail__')).toBe(false); + // @ts-expect-error accessing protected storage + expect(paginator._itemIntervals.has('__logical_head__')).toBe(false); + }); + }); + + describe('jump pagination + local filtering', () => { + it('marks jump interval as head when the newest message in the raw page is shadowed', async () => { + // postQueryReconcile override reads `channel.getClient().user.id` + (channel as unknown as { getClient: () => { user: { id: string } } }).getClient = + () => ({ + user: { id: 'user1' }, + }); + // also needs read state access for first page snapshot side effects + (channel as unknown as { state?: { read?: Record } }).state = { + read: {}, + }; + + const paginator = new MessagePaginator({ channel, itemIndex }); + + const m1 = createMessage({ + cid: 'channel-id', + id: 'm1', + created_at: '2020-01-01T00:00:00.000Z', + }); + const m2 = createMessage({ + cid: 'channel-id', + id: 'm2', + created_at: '2020-01-02T00:00:00.000Z', + }); + const m3 = createMessage({ + cid: 'channel-id', + id: 'm3', + created_at: '2020-01-03T00:00:00.000Z', + }); + const around = createMessage({ + cid: 'channel-id', + id: 'm4', + created_at: '2020-01-04T00:00:00.000Z', + }); + // newest message is shadowed -> filtered out before interval ingestion + const newestShadowed = createMessage({ + cid: 'channel-id', + id: 'm5', + created_at: '2020-01-05T00:00:00.000Z', + shadowed: true, + }); + + const { targetInterval } = await paginator.postQueryReconcile({ + isFirstPage: true, + queryShape: { id_around: around.id, limit: 5 }, + requestedPageSize: 5, + results: { items: [m1, m2, m3, around, newestShadowed] }, + updateState: false, + }); + + expect(targetInterval).toBeTruthy(); + expect((targetInterval as unknown as { isHead: boolean }).isHead).toBe(true); + expect((targetInterval as unknown as { isTail: boolean }).isTail).toBe(false); + }); + }); + + it('cannot be customized', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + }); +}); diff --git a/test/unit/pagination/paginators/MessageReplyPaginator.test.ts b/test/unit/pagination/paginators/MessageReplyPaginator.test.ts new file mode 100644 index 0000000000..43f73495cc --- /dev/null +++ b/test/unit/pagination/paginators/MessageReplyPaginator.test.ts @@ -0,0 +1,114 @@ +import { describe, expect, it, vi } from 'vitest'; +import { MessageReplyPaginator } from '../../../../src/pagination/paginators/MessageReplyPaginator'; +import type { + LocalMessage, + MessagePaginationOptions, + MessageResponse, +} from '../../../../src/types'; + +const makeLocalMessage = (id: string, createdAtMs: number): LocalMessage => + ({ + attachments: [], + created_at: new Date(createdAtMs), + deleted_at: null, + id, + mentioned_users: [], + pinned_at: null, + reaction_groups: null, + status: 'received', + text: id, + type: 'regular', + updated_at: new Date(createdAtMs), + }) as LocalMessage; + +const makeChannel = () => + ({ + cid: 'messaging:cid', + getClient: () => ({ + notifications: { addError: vi.fn() }, + }), + // Not used when config.doRequest is provided + getReplies: vi.fn(), + }) as unknown as import('../../../../src/channel').Channel; + +describe('MessageReplyPaginator', () => { + it('jumpToMessage does not query if message already in an interval', async () => { + const channel = makeChannel(); + const paginator = new MessageReplyPaginator({ + channel, + parentMessageId: 'parent-1', + }); + + const doRequest = vi.fn(async (query) => { + const options = query.options as MessagePaginationOptions; + const ids = options.id_around ? ['m1'] : ['m1']; + return { + items: ids.map((id) => makeLocalMessage(id, 1)), + }; + }); + + paginator.config.doRequest = doRequest; + + // Seed intervals + index + await paginator.executeQuery({ + queryShape: { options: { limit: 1 }, sort: paginator.sort }, + }); + expect(doRequest).toHaveBeenCalledTimes(1); + + const executeSpy = vi.spyOn(paginator, 'executeQuery'); + const ok = await paginator.jumpToMessage('m1'); + expect(ok).toBe(true); + expect(executeSpy).not.toHaveBeenCalled(); + }); + + it('jumpToMessage queries id_around when message not present', async () => { + const channel = makeChannel(); + const paginator = new MessageReplyPaginator({ + channel, + parentMessageId: 'parent-1', + }); + + const doRequest = vi.fn(async () => { + return { + items: [makeLocalMessage('m2', 2)], + }; + }); + paginator.config.doRequest = doRequest; + + const ok = await paginator.jumpToMessage('m2', { pageSize: 10 }); + expect(ok).toBe(true); + + expect(doRequest).toHaveBeenCalledTimes(1); + expect(doRequest).toHaveBeenCalledWith({ + options: { id_around: 'm2', limit: 10 }, + sort: [{ created_at: 1 }], + }); + }); + + it('jumpToTheLatestMessage calls jumpToMessage with latest id from head interval', async () => { + const channel = makeChannel(); + const paginator = new MessageReplyPaginator({ + channel, + parentMessageId: 'parent-1', + }); + + const doRequest = vi.fn(async () => { + return { + items: [makeLocalMessage('m1', 1), makeLocalMessage('m2', 2)], + }; + }); + paginator.config.doRequest = doRequest; + + // Ensure intervals are populated + await paginator.executeQuery({ + queryShape: { options: { limit: 2 }, sort: paginator.sort }, + }); + + const jumpSpy = vi.spyOn(paginator, 'jumpToMessage'); + await paginator.jumpToTheLatestMessage(); + + // We don't hard assert the id here because interval "head" semantics are internal, + // but we ensure it uses jumpToMessage as the final step. + expect(jumpSpy).toHaveBeenCalled(); + }); +}); diff --git a/test/unit/pagination/sortCompiler.test.ts b/test/unit/pagination/sortCompiler.test.ts index 500ab3eafd..ccc03a21bd 100644 --- a/test/unit/pagination/sortCompiler.test.ts +++ b/test/unit/pagination/sortCompiler.test.ts @@ -1,9 +1,6 @@ // sortCompiler.spec.ts import { describe, it, expect } from 'vitest'; -import { - binarySearchInsertIndex, - makeComparator, -} from '../../../src/pagination/sortCompiler'; +import { binarySearch, makeComparator } from '../../../src/pagination/sortCompiler'; import { resolveDotPathValue as defaultResolvePathValue } from '../../../src/pagination/utility.normalization'; import type { AscDesc } from '../../../src'; @@ -191,77 +188,387 @@ describe('makeComparator', () => { }); }); -describe('binarySearchInsertIndex', () => { - it('inserts at beginning, middle, and end as expected', () => { - const items: Item[] = [ - { cid: 'a', v: 10 }, - { cid: 'b', v: 20 }, - { cid: 'c', v: 30 }, - { cid: 'd', v: 40 }, - ]; - const cmp = toComparator({ v: 1 }); +const numberCompare = (a: number, b: number) => a - b; +const numberIdentityEquals = (a: number, b: number) => a === b; + +describe('binarySearch (generic cursor-based)', () => { + describe('empty array', () => { + it('returns not found and insertionIndex 0 for empty array', () => { + const result = binarySearch({ + needle: 42, + length: 0, + getItemAt: () => undefined, + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + }); + + expect(result).toEqual({ currentIndex: -1, insertionIndex: 0 }); + }); + }); + + describe('single-element array', () => { + it('finds the element with plateauScan enabled', () => { + const arr = [10]; + + const result = binarySearch({ + needle: arr[0], + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: true, + }); + + // insertionIndex is after the last <= needle (upper bound) + expect(result).toEqual({ currentIndex: 0, insertionIndex: 1 }); + }); + + it('does not find the element when plateauScan is disabled', () => { + const arr = [10]; - // Insert before all - let index = binarySearchInsertIndex({ - sortedArray: items, - needle: { cid: 'x', v: 5 }, - compare: cmp, + const result = binarySearch({ + needle: arr[0], + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: false, + }); + + // insertionIndex is upper bound; currentIndex is -1 when plateauScan is false + expect(result).toEqual({ currentIndex: -1, insertionIndex: 1 }); }); - expect(index).toBe(0); - // Insert in the middle - index = binarySearchInsertIndex({ - sortedArray: items, - needle: { cid: 'y', v: 25 }, - compare: cmp, + it('inserts before the element when needle is smaller', () => { + const arr = [10]; + + const result = binarySearch({ + needle: 5, + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: true, + }); + + expect(result).toEqual({ currentIndex: -1, insertionIndex: 0 }); }); - expect(index).toBe(2); // between 20 and 30 - // Insert after all - index = binarySearchInsertIndex({ - sortedArray: items, - needle: { cid: 'z', v: 50 }, - compare: cmp, + it('inserts after the element when needle is larger', () => { + const arr = [10]; + + const result = binarySearch({ + needle: 20, + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: true, + }); + + expect(result).toEqual({ currentIndex: -1, insertionIndex: 1 }); }); - expect(index).toBe(4); }); - it('inserts after equal values block (stable position after equals)', () => { - const items: Item[] = [ - { cid: 'a', v: 10 }, - { cid: 'b', v: 10 }, - { cid: 'c', v: 10 }, - ]; - const cmp = toComparator({ v: 1 }); + describe('unique ascending numbers', () => { + const arr = [1, 3, 5, 7, 9]; + + it('computes correct insertionIndex when item not present (various positions)', () => { + const baseArgs = { + length: arr.length, + getItemAt: (i: number) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + }; + + // before all elements + expect( + binarySearch({ + ...baseArgs, + needle: 0, + plateauScan: true, + }), + ).toEqual({ + currentIndex: -1, + insertionIndex: 0, + }); + + // between 1 and 3 + expect( + binarySearch({ + ...baseArgs, + needle: 2, + plateauScan: true, + }), + ).toEqual({ + currentIndex: -1, + insertionIndex: 1, + }); + + // between 3 and 5 + expect( + binarySearch({ + ...baseArgs, + needle: 4, + plateauScan: true, + }), + ).toEqual({ + currentIndex: -1, + insertionIndex: 2, + }); + + // after all elements + expect( + binarySearch({ + ...baseArgs, + needle: 10, + plateauScan: true, + }), + ).toEqual({ + currentIndex: -1, + insertionIndex: arr.length, + }); + }); - const index = binarySearchInsertIndex({ - sortedArray: items, - needle: { cid: 'x', v: 10 }, - compare: cmp, + it('finds existing elements only when plateauScan is enabled', () => { + const baseArgs = { + length: arr.length, + getItemAt: (i: number) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + }; + + for (let idx = 0; idx < arr.length; idx++) { + const needle = arr[idx]; + + const found = binarySearch({ + ...baseArgs, + needle, + plateauScan: true, + }); + + // insertionIndex is upper bound (after the element) + expect(found.currentIndex).toBe(idx); + expect(found.insertionIndex).toBe(idx + 1); + + const notFound = binarySearch({ + ...baseArgs, + needle, + plateauScan: false, + }); + + // Without plateauScan, currentIndex is always -1 even for existing element + expect(notFound.currentIndex).toBe(-1); + expect(notFound.insertionIndex).toBe(idx + 1); + } }); - // By design, our binary search returns the first position where existing > needle. - // For equals, it advances to the right of the equal block. - expect(index).toBe(3); + it('treats omitted plateauScan the same as plateauScan=false', () => { + const needleIndex = 2; + const needle = arr[needleIndex]; // 5 + + const result = binarySearch({ + needle, + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + }); + + // by default, plateauScan is falsy + expect(result.currentIndex).toBe(-1); + // insertionIndex is upper bound + expect(result.insertionIndex).toBe(needleIndex + 1); + }); }); - it('respects multi-field comparator (e.g., secondary key decides insertion point)', () => { - const items: Item[] = [ - { cid: '2', v: 1, nested: { x: 5 } }, - { cid: '1', v: 1, nested: { x: 10 } }, // comes earlier due to nested.x desc - { cid: '3', v: 2, nested: { x: 0 } }, - ]; - const cmp = toComparator([{ v: 1 }, { 'nested.x': -1 }]); + describe('duplicates (plateaus) with object identity', () => { + type Obj = { id: number; label: string }; + + it('returns end-of-plateau insertionIndex and correct currentIndex for identity', () => { + // Plateau of 3's in the middle + const arr: Obj[] = [ + { id: 1, label: 'a' }, // 0 + { id: 3, label: 'b' }, // 1 + { id: 3, label: 'c' }, // 2 + { id: 3, label: 'd' }, // 3 + { id: 5, label: 'e' }, // 4 + ]; + + const compare = (a: Obj, b: Obj) => a.id - b.id; + const identityEquals = (a: Obj, b: Obj) => a === b; + + const baseArgs = { + length: arr.length, + getItemAt: (i: number) => arr[i], + itemIdentityEquals: identityEquals, + compare, + plateauScan: true, + }; + + // insertionIndex for id=3 value is after all 3's → index 4 + const insertionIndexFor3 = 4; + + const needleMiddle = arr[2]; + const resMiddle = binarySearch({ + ...baseArgs, + needle: needleMiddle, + }); + expect(resMiddle).toEqual({ + currentIndex: 2, + insertionIndex: insertionIndexFor3, + }); + + const needleLeft = arr[1]; + const resLeft = binarySearch({ ...baseArgs, needle: needleLeft }); + expect(resLeft).toEqual({ + currentIndex: 1, + insertionIndex: insertionIndexFor3, + }); + + const needleRight = arr[3]; + const resRight = binarySearch({ ...baseArgs, needle: needleRight }); + expect(resRight).toEqual({ + currentIndex: 3, + insertionIndex: insertionIndexFor3, + }); + }); + + it('plateau at the start of the array', () => { + const arr: Obj[] = [ + { id: 3, label: 'a' }, // 0 + { id: 3, label: 'b' }, // 1 + { id: 3, label: 'c' }, // 2 + { id: 5, label: 'd' }, // 3 + { id: 8, label: 'e' }, // 4 + ]; + const compare = (a: Obj, b: Obj) => a.id - b.id; + const identityEquals = (a: Obj, b: Obj) => a === b; + + const insertionIndexFor3 = 3; // first element with id > 3 is index 3 + + const result = binarySearch({ + needle: arr[0], + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: identityEquals, + compare, + plateauScan: true, + }); + + expect(result).toEqual({ + currentIndex: 0, + insertionIndex: insertionIndexFor3, + }); + }); + + it('plateau at the end of the array', () => { + const arr: Obj[] = [ + { id: 1, label: 'a' }, // 0 + { id: 2, label: 'b' }, // 1 + { id: 5, label: 'c' }, // 2 + { id: 5, label: 'd' }, // 3 + { id: 5, label: 'e' }, // 4 + ]; + const compare = (a: Obj, b: Obj) => a.id - b.id; + const identityEquals = (a: Obj, b: Obj) => a === b; + + const insertionIndexFor5 = arr.length; // no element > 5 + + const result = binarySearch({ + needle: arr[4], + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: identityEquals, + compare, + plateauScan: true, + }); + + expect(result).toEqual({ + currentIndex: 4, + insertionIndex: insertionIndexFor5, + }); + }); + + it('does not match by value when identity differs', () => { + const arr: Obj[] = [ + { id: 1, label: 'a' }, + { id: 2, label: 'b' }, + { id: 3, label: 'c' }, + ]; + + const compare = (a: Obj, b: Obj) => a.id - b.id; + const identityEquals = (a: Obj, b: Obj) => a === b; + + // same id as arr[1] but different object => not identical + const needle: Obj = { id: 2, label: 'other' }; + + const result = binarySearch({ + needle, + length: arr.length, + getItemAt: (i) => arr[i], + itemIdentityEquals: identityEquals, + compare, + plateauScan: true, + }); + + // upper bound for id=2 is after index 1 → index 2 + expect(result).toEqual({ + currentIndex: -1, + insertionIndex: 2, + }); + }); + }); - // Needle with same v=1 but nested.x=7 should go between cid=1 (x=10) and cid=2 (x=5) - const index = binarySearchInsertIndex({ - sortedArray: orderByComparator(items, cmp).map( - (cid) => items.find((i) => i.cid === cid)!, - ) as Item[], - needle: { cid: 'x', v: 1, nested: { x: 7 } }, - compare: cmp, + describe('corruption handling (getItemAt returns undefined during binary search)', () => { + it('returns -1/-1 when mid item is undefined', () => { + // length = 4 → first mid = 2 + const length = 4; + + const getItemAt = (index: number): number | undefined => { + if (index === 2) return undefined; // corruption at mid + return index; // arbitrary non-undefined value for others + }; + + const result = binarySearch({ + needle: 10, + length, + getItemAt, + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: true, + }); + + expect(result).toEqual({ currentIndex: -1, insertionIndex: -1 }); }); + }); - expect(index).toBe(1); // after the 10, before the 5 + describe('plateauScan scanning behavior around insertionIndex', () => { + it('treats undefined during plateau scan as exhaustion of that side only', () => { + // We make one index undefined, but ensure binary search never hits it. + // length = 5 => first mid = 2. We'll set arr[2] so that compare(midItem, needle) > 0, + // forcing hi = 2 and thus never touching index 4 in binary search. + const backing: Array = [10, 20, 30, 40, undefined]; + + const getItemAt = (i: number) => backing[i]; + + const needle = 5; // smaller than 30, so hi will move left on the first step + + const result = binarySearch({ + needle, + length: backing.length, + getItemAt, + itemIdentityEquals: numberIdentityEquals, + compare: numberCompare, + plateauScan: true, + }); + + // insertionIndex is correct for the sorted values [10,20,30,40] + // first > 5 is 10 at index 0 + expect(result).toEqual({ + currentIndex: -1, + insertionIndex: 0, + }); + }); }); }); From 98c4d1b3821a6b2b5a34df8f60aa6f8d9a766514 Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 27 Feb 2026 11:02:41 +0100 Subject: [PATCH 11/31] chore(Thread): add initial spec for Thread Constructor Minimal Init --- .../decisions.md | 39 +++ specs/thread-constructor-minimal-init/plan.md | 182 +++++++++++++ .../state.json | 21 ++ .../thread-constructor-minimal-init.spec.md | 246 ++++++++++++++++++ 4 files changed, 488 insertions(+) create mode 100644 specs/thread-constructor-minimal-init/decisions.md create mode 100644 specs/thread-constructor-minimal-init/plan.md create mode 100644 specs/thread-constructor-minimal-init/state.json create mode 100644 specs/thread-constructor-minimal-init/thread-constructor-minimal-init.spec.md diff --git a/specs/thread-constructor-minimal-init/decisions.md b/specs/thread-constructor-minimal-init/decisions.md new file mode 100644 index 0000000000..e49c42c37a --- /dev/null +++ b/specs/thread-constructor-minimal-init/decisions.md @@ -0,0 +1,39 @@ +## Decision: Thread instance as the single runtime source for Thread.tsx + +**Date:** 2026-02-27 +**Context:** +The ChatView layoutController flow requires rendering `Thread.tsx` as a sibling of `Channel.tsx` and removing Thread runtime coupling to `ChannelActionContext`-based thread behavior. + +**Decision:** +`Thread.tsx` in the target flow will rely only on `Thread` instance API/state (`state`, `reload`, `loadPrevPage`, `loadNextPage`, lifecycle methods). `ThreadProvider` will provide thread context only and will not render `Channel`. + +**Reasoning:** +This makes sibling rendering possible without prefetching full thread payload and aligns thread lifecycle ownership with `stream-chat-js` `Thread` class. + +**Alternatives considered:** + +- Keep `ChannelActionContext` integration and preload full thread before mount — rejected because it blocks the target layout and increases coupling. +- Keep `ThreadProvider` rendering `Channel` while partially migrating internals — rejected because it preserves the same context coupling that the layoutController direction removes. + +**Tradeoffs / Consequences:** +`Thread` instance API/state must be complete enough for first render and post-mount hydration. React thread flow tests need to shift from channel-context assumptions to thread-instance assumptions. + +## Decision: Single constructor signature with optional threadData + +**Date:** 2026-02-27 +**Context:** +The implementation should support minimal initialization while keeping constructor ergonomics simple. + +**Decision:** +Use one constructor params object with optional `threadData`; when present initialize from `threadData`, otherwise initialize from `client + channel + parentMessage` (with optional `draft`). + +**Reasoning:** +This satisfies both minimal and payload-backed creation without constructor overload complexity and matches requested API direction. + +**Alternatives considered:** + +- Constructor overloads/discriminated unions — rejected because not required and adds typing complexity. +- Separate factory methods (`fromThreadData`, `fromParent`) — rejected to avoid API expansion at this stage. + +**Tradeoffs / Consequences:** +Runtime validation must be explicit for missing minimal inputs (especially `parentMessage.id`) to avoid ambiguous failures. diff --git a/specs/thread-constructor-minimal-init/plan.md b/specs/thread-constructor-minimal-init/plan.md new file mode 100644 index 0000000000..a613b51782 --- /dev/null +++ b/specs/thread-constructor-minimal-init/plan.md @@ -0,0 +1,182 @@ +# Plan: Thread Constructor Minimal Init + +## Worktree + +- **Path:** `/Users/martincupela/Projects/stream/chat/stream-chat-js-worktrees/thread-constructor-minimal-init` +- **Branch:** `feat/init-empty-thread` +- **Base branch:** `master` + +## Task Overview + +Tasks are self-contained and parallelizable where possible; tasks touching the same file have explicit dependencies and must run sequentially. + +## Task 1: Add Optional `threadData` Constructor Branch in `Thread` + +**File(s) to create/modify:** `src/thread.ts` + +**Dependencies:** None + +**Status:** in-progress + +**Owner:** codex + +**Scope:** + +- Keep a single constructor params object and make `threadData` optional. +- Add minimal-init branch (`client + channel + parentMessage`, optional `draft`). +- Validate required minimal identity fields (especially `parentMessage.id`). +- Initialize complete minimal `ThreadState` defaults. + +**Acceptance Criteria:** + +- [ ] `Thread` can be constructed without `threadData`. +- [ ] Constructor still accepts `threadData` when provided. +- [ ] Minimal init produces a valid `ThreadState` shape with no undefined required fields. + +## Task 2: Complete Hydration + Pagination Bootstrap for Minimal Threads + +**File(s) to create/modify:** `src/thread.ts` + +**Dependencies:** Task 1 + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- Ensure `hydrateState(...)` copies pagination state needed for thread-instance pagination. +- Ensure minimal-init threads can become paginable after hydration/reload. +- Keep read-state and message composer behavior consistent with thread-instance flow. + +**Acceptance Criteria:** + +- [ ] `hydrateState(...)` updates pagination fields required by `loadPrevPage/loadNextPage`. +- [ ] Minimal thread does not get stuck with unusable pagination after reload. + +## Task 3: Decouple `ThreadProvider` from `Channel` Rendering + +**File(s) to create/modify:** `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/ThreadContext.tsx` + +**Dependencies:** None + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- Remove `Channel` wrapper from `ThreadProvider`. +- Keep provider focused on thread context only. +- Preserve type safety for thread context consumers. + +**Acceptance Criteria:** + +- [ ] `ThreadProvider` no longer renders ``. +- [ ] Thread context remains available to downstream components. + +## Task 4: Make `Thread.tsx` Thread-Instance-Driven (No `ChannelActionContext` Thread Actions) + +**File(s) to create/modify:** `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/Thread.tsx` + +**Dependencies:** Task 3 + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- In thread-instance mode, use `Thread` instance API (`reload`, `loadPrevPage`, `loadNextPage`, state selectors). +- Trigger self-hydration on mount when thread state is stale. +- Remove reliance on `ChannelActionContext` thread actions for this flow. + +**Acceptance Criteria:** + +- [ ] `Thread.tsx` renders with a minimal thread instance and self-hydrates. +- [ ] Pagination in thread-instance mode uses `threadInstance` methods. +- [ ] Thread-instance flow does not require `ChannelActionContext.loadMoreThread/closeThread`. + +## Task 5: Add `stream-chat-js` Unit Coverage for Minimal Constructor + Hydration + +**File(s) to create/modify:** `test/unit/threads.test.ts` + +**Dependencies:** Task 2 + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- Add tests for minimal constructor path and validation behavior. +- Add tests for hydration/pagination behavior after minimal initialization. +- Confirm thread identity/read defaults for minimal mode. + +**Acceptance Criteria:** + +- [ ] Tests cover minimal construction, missing id validation, and reload hydration path. +- [ ] Tests verify pagination becomes usable after hydration. + +## Task 6: Add `stream-chat-react` Coverage for Thread-Instance-Only Flow + +**File(s) to create/modify:** `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/__tests__/Thread.test.js`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/__tests__/ThreadContext.test.tsx` + +**Dependencies:** Task 4 + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- Add tests for minimal thread instance render before hydration completes. +- Verify mount-time reload in thread-instance mode. +- Verify `ThreadProvider` works without `Channel` wrapper. + +**Acceptance Criteria:** + +- [ ] Tests fail if thread-instance flow regresses to `ChannelActionContext` dependency. +- [ ] Tests validate self-hydration and thread-instance pagination hooks. + +## Task 7: Integration Verification and Final Checks + +**File(s) to create/modify:** `src/thread.ts`, `test/unit/threads.test.ts`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/Thread.tsx`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/ThreadContext.tsx`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/__tests__/Thread.test.js`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/__tests__/ThreadContext.test.tsx` + +**Dependencies:** Task 5, Task 6 + +**Status:** pending + +**Owner:** unassigned + +**Scope:** + +- Run type checks and targeted tests across both repos. +- Fix any integration breakages caused by decoupling. +- Confirm spec acceptance criteria are met end-to-end. + +**Acceptance Criteria:** + +- [ ] Required type checks and tests pass for touched areas. +- [ ] No remaining `Thread.tsx` dependency on `ChannelActionContext` thread actions in target flow. + +## Execution Order + +- **Phase 1 (parallel):** Task 1, Task 3 +- **Phase 2 (sequential branches):** +- `src/thread.ts` branch: Task 2 (after Task 1) +- `stream-chat-react` branch: Task 4 (after Task 3) +- **Phase 3 (parallel):** Task 5 (after Task 2), Task 6 (after Task 4) +- **Phase 4:** Task 7 (after Task 5 and Task 6) + +## File Ownership Summary + +| Task | Creates/Modifies | +| ------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Task 1 | `src/thread.ts` | +| Task 2 | `src/thread.ts` | +| Task 3 | `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/ThreadContext.tsx` | +| Task 4 | `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/Thread.tsx` | +| Task 5 | `test/unit/threads.test.ts` | +| Task 6 | `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Thread/__tests__/Thread.test.js`, `/Users/martincupela/Projects/stream/chat/stream-chat-react/src/components/Threads/__tests__/ThreadContext.test.tsx` | +| Task 7 | Integration verification across touched files | diff --git a/specs/thread-constructor-minimal-init/state.json b/specs/thread-constructor-minimal-init/state.json new file mode 100644 index 0000000000..927d96719a --- /dev/null +++ b/specs/thread-constructor-minimal-init/state.json @@ -0,0 +1,21 @@ +{ + "tasks": { + "thread-minimal-constructor": "pending", + "thread-hydration-pagination": "pending", + "react-thread-instance-flow": "pending", + "thread-provider-decoupling": "pending", + "js-tests": "pending", + "react-tests": "pending", + "verification": "pending" + }, + "flags": { + "blocked": false, + "needs-review": false, + "awaiting-human-input": false + }, + "meta": { + "last_updated": "2026-02-27", + "worktree": "/Users/martincupela/Projects/stream/chat/stream-chat-js-worktrees/thread-constructor-minimal-init", + "branch": "feat/init-empty-thread" + } +} diff --git a/specs/thread-constructor-minimal-init/thread-constructor-minimal-init.spec.md b/specs/thread-constructor-minimal-init/thread-constructor-minimal-init.spec.md new file mode 100644 index 0000000000..b130c0e74f --- /dev/null +++ b/specs/thread-constructor-minimal-init/thread-constructor-minimal-init.spec.md @@ -0,0 +1,246 @@ +# Thread Constructor Minimal Initialization Spec + +## Problem Statement + +Today, `Thread` in `stream-chat-js` can only be created from full `threadData` (`ThreadResponse`). + +This spec exists to support `stream-chat-react/src/components/ChatView/layoutController/spec.md`. + +That is a blocker for the target UI composition in `stream-chat-react`: + +- render `` and `` as siblings +- hand a `Thread` instance to `` immediately (from known parent message + channel) +- fetch/hydrate full thread data after `` mounts + +Without constructor support for this, `` in thread-instance mode expects an already hydrated instance, which defeats the sibling/lazy-hydration flow. + +## Desired UX/Data Flow + +1. User opens a thread from a parent message already available in channel state. +2. App creates a minimal `Thread` instance using known references (`client`, `channel`, `parentMessage`). +3. `` renders immediately using minimal state (at least parent message context). +4. `` (or the thread instance) triggers fetch (`reload`) to hydrate replies/read/participants/pagination. +5. UI updates seamlessly once data arrives. + +Why this matters: + +- faster perceived response (no blocking on `getThread` before render) +- enables clean sibling layout architecture +- keeps SDK-level thread lifecycle encapsulated in `Thread` + +## Current Cross-Codebase Constraints + +### `stream-chat-js` assumptions that must remain valid + +- `ThreadManager` expects stable `thread.id`, `thread.channel.cid`, `hasStaleState`, `hydrateState`. +- `MessageComposer` uses `compositionContext instanceof Thread` and requires `thread.channel`. +- `MessageDeliveryReporter` reads thread `id`, `channel`, `state.replies`, and `state.read`. + +### `stream-chat-react` target architecture constraints (per ChatView layoutController) + +- `ThreadProvider` should provide thread context only; it should not render/wrap a `Channel` component. +- `Thread.tsx` in the new ChatView flow should not depend on `ChannelActionContext` thread functions (`loadMoreThread`, `closeThread`, etc.). +- `Thread.tsx` should operate from `Thread` instance state/methods in thread-instance mode (`state`, `reload`, `loadPrevPage`, `loadNextPage`, `activate`, `deactivate`). +- Legacy Channel-centric thread behavior is out of scope for this change. + +## Goals + +- Add a safe minimal constructor path for `Thread`. +- Support sibling `` + `` rendering with post-mount hydration. +- Make `Thread.tsx` rely only on `Thread` instance API in the ChatView layoutController flow. +- Keep thread identity and behavior predictable across SDK and React consumers that use `Thread` instances. + +## Non-Goals + +- No API contract changes for backend thread endpoints. +- No support for `Thread.tsx` behavior that depends on `ChannelActionContext` thread actions. +- No removals/renames of public `Thread` methods. + +## Proposed API + +Constructor should use a single params object where `threadData` is optional: + +- `{ client, channel, parentMessage, draft?, threadData? }` + +Initialization behavior: + +- if `threadData` is provided, initialize from `threadData` +- if `threadData` is not provided, initialize from `channel + parentMessage` + +Rationale: + +- `channel` and `parentMessage` are exactly what the sibling-render flow already has. +- optional `draft` enables initializing thread message composition state in instance-only flow. +- optional `threadData` allows callers that already have server payload to initialize directly. + +## Required Changes in `src/thread.ts` + +### 1) Constructor typing and branching + +Change: + +- Keep a single constructor signature and make `threadData` optional. +- When `threadData` is present, initialize from it. +- When `threadData` is absent, require minimal input: `client + channel + parentMessage` (with optional `draft`). + +Why: + +- keeps API explicit and type-safe +- prevents ambiguous partially-hydrated constructor inputs + +### 2) Add minimal initialization path + +Change: + +- Build a valid `ThreadState` from minimal input (without server thread payload). + +Why: + +- `Thread.tsx` and other consumers can safely subscribe to `thread.state` immediately. + +### 3) Validate identity-critical fields + +Change: + +- In minimal mode, require `parentMessage.id`; throw early if missing. +- Set `this.id = parentMessage.id`. + +Why: + +- `id` is used everywhere (React keys, thread selection, mark-read targets, manager maps). +- silent `undefined` ids would create hard-to-debug downstream failures. + +### 4) Keep `channel` as provided in minimal mode + +Change: + +- Do not synthesize channel from thread payload in minimal mode; use provided instance. + +Why: + +- sibling rendering already operates in a concrete channel context. +- `ThreadProvider` no longer wrapping `Channel` means `Thread` instance must be the source of channel linkage for thread operations. + +### 5) Share read-state placeholder logic + +Change: + +- Extract current placeholder read behavior and reuse for both constructor modes. + +Why: + +- unread/read logic currently depends on read-state shape being initialized. +- avoids drift between modes. + +### 6) Hydration completeness + +Change: + +- Update `hydrateState(...)` to also copy/hydrate pagination state, not only replies/read/etc. + +Why: + +- minimal thread starts without useful cursors. +- pagination must become operational after hydration. + +### 7) Pagination bootstrap behavior + +Change: + +- Ensure minimal thread does not get stuck with both cursors `null`. +- Either: + - guarantee `reload()` is run before paginating, and hydration sets pagination correctly, or + - allow first pagination query to bootstrap when stale/minimal. + +Why: + +- in thread-instance mode, `Thread.tsx` uses `thread.loadPrevPage/loadNextPage`. +- if cursors never initialize, load-more becomes inert. + +### 8) Composer initialization parity + +Change: + +- Initialize `messageComposer` in minimal mode with optional `draft`. + +Why: + +- support draft-first UIs in the instance-only flow. + +## Minimal `ThreadState` Defaults (with rationale) + +- `active: false` (not yet focused) +- `isLoading: false` (no request in flight initially) +- `isStateStale: true` (explicit signal that server hydration is needed) +- `channel: provided channel` (required by React and composer) +- `parentMessage: formatMessage(parentMessage)` (enables immediate header/parent render) +- `createdAt: parent message created time or now` (non-null contract) +- `deletedAt: null` +- `participants: []` (unknown until hydration) +- `read: placeholder per current user strategy` (stable unread logic) +- `replies: []` (unknown until hydration) +- `replyCount: 0` (unknown default) +- `pagination: { isLoadingNext: false, isLoadingPrev: false, nextCursor: null, prevCursor: null }` +- `updatedAt: null` +- `title: ''` +- `custom: {}` + +## Required `stream-chat-react` Integration Behavior + +For `Thread.tsx` thread-instance mode (from `ThreadProvider`) in ChatView layoutController flow: + +1. On mount, if `threadInstance.hasStaleState` is true, call `threadInstance.reload()`. +2. Keep immediate render using `parentMessage` from minimal state while loading. +3. Avoid duplicate fetches by relying on `Thread.reload()` in-flight guards. +4. Use `threadInstance.loadPrevPage/loadNextPage` for pagination; do not call `ChannelActionContext.loadMoreThread`. +5. Use thread-instance close/navigation callbacks provided by ChatView/layout controller wiring; do not require `ChannelActionContext.closeThread`. +6. `ThreadProvider` must not render ``; it should provide only thread context. + +Why: + +- this is the core mechanism that makes sibling rendering practical without prefetching. +- it removes the old Channel-coupled dependency chain that blocks the new layout controller architecture. + +## Testing Plan + +### `stream-chat-js` tests + +Add to `test/unit/threads.test.ts`: + +- constructs thread in minimal mode with valid default shape +- throws when minimal `parentMessage.id` is missing +- `id` derives from parent message id in minimal mode +- `reload()` hydrates stale minimal thread +- `hydrateState()` updates pagination too (not only replies/read/parent) +- pagination methods become usable after hydration + +### `stream-chat-react` verification tests + +Add/extend thread-instance tests: + +- `` renders with minimal thread instance (before hydration completes) +- mount triggers hydration path in thread-instance mode +- hydrated data appears in message list +- pagination uses thread-instance methods, not `ChannelActionContext` callbacks +- `ThreadProvider` can provide thread context without rendering `Channel` +- `activate/deactivate` lifecycle remains stable + +## Risks and Mitigations + +- Risk: minimal threads never hydrate in UI. + Mitigation: explicit mount-time stale check + reload in `Thread.tsx`. + +- Risk: pagination remains unusable after hydration. + Mitigation: include pagination in `hydrateState` and add dedicated tests. + +- Risk: hidden couplings to old Channel-context assumptions. + Mitigation: remove ChannelActionContext dependencies from `Thread.tsx` path and enforce thread-instance tests. + +## Acceptance Criteria + +- Minimal constructor mode compiles with strict types. +- `Thread.tsx` can be mounted as a sibling of `Channel.tsx` with a minimal thread instance. +- The mounted `Thread.tsx` self-hydrates thread data and updates UI without manual prefetch. +- ChatView layoutController path works with `Thread.tsx` not relying on `ChannelActionContext` thread actions. +- `ThreadProvider` no longer needs to render `Channel` to make thread-instance rendering functional. +- All existing tests pass and new minimal-flow tests pass. From 1ff227174395d949c374171efd54ef20725e9cb4 Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 27 Feb 2026 11:33:10 +0100 Subject: [PATCH 12/31] feat(Thread): implement Add Optional `threadData` Constructor Branch in `Thread` --- .../decisions.md | 20 +++ specs/thread-constructor-minimal-init/plan.md | 14 +- .../state.json | 6 +- src/thread.ts | 142 ++++++++++++------ 4 files changed, 126 insertions(+), 56 deletions(-) diff --git a/specs/thread-constructor-minimal-init/decisions.md b/specs/thread-constructor-minimal-init/decisions.md index e49c42c37a..4949653016 100644 --- a/specs/thread-constructor-minimal-init/decisions.md +++ b/specs/thread-constructor-minimal-init/decisions.md @@ -37,3 +37,23 @@ This satisfies both minimal and payload-backed creation without constructor over **Tradeoffs / Consequences:** Runtime validation must be explicit for missing minimal inputs (especially `parentMessage.id`) to avoid ambiguous failures. + +## Decision: Minimal constructor branch requires explicit parent identity and initializes deterministic defaults + +**Date:** 2026-02-27 +**Context:** +Task 1 implementation needed to support creating `Thread` without API `threadData` while preserving runtime guarantees expected by existing thread methods. + +**Decision:** +When `threadData` is absent, constructor requires `channel` and `parentMessage.id`; it initializes full `ThreadState` with deterministic defaults (`replies: []`, empty participants/custom/title, placeholder read state for current user when available, and pagination cursors set to `null`). + +**Reasoning:** +This keeps `Thread` usable immediately after construction with no undefined required fields and provides a stable baseline for later hydration/reload to populate server-backed state. + +**Alternatives considered:** + +- Allow missing `parentMessage.id` and derive later — rejected because thread identity and thread-scoped operations depend on a stable id at construction time. +- Leave read/pagination fields partially undefined in minimal mode — rejected because it introduces conditional handling across runtime selectors and pagination codepaths. + +**Tradeoffs / Consequences:** +Minimal instances start non-paginable until hydrated by server data; Task 2 is responsible for carrying hydrated pagination into existing instances. diff --git a/specs/thread-constructor-minimal-init/plan.md b/specs/thread-constructor-minimal-init/plan.md index a613b51782..4e937accc7 100644 --- a/specs/thread-constructor-minimal-init/plan.md +++ b/specs/thread-constructor-minimal-init/plan.md @@ -3,7 +3,7 @@ ## Worktree - **Path:** `/Users/martincupela/Projects/stream/chat/stream-chat-js-worktrees/thread-constructor-minimal-init` -- **Branch:** `feat/init-empty-thread` +- **Branch:** `agent/feat/init-empty-thread` - **Base branch:** `master` ## Task Overview @@ -16,7 +16,7 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** None -**Status:** in-progress +**Status:** done **Owner:** codex @@ -29,9 +29,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Acceptance Criteria:** -- [ ] `Thread` can be constructed without `threadData`. -- [ ] Constructor still accepts `threadData` when provided. -- [ ] Minimal init produces a valid `ThreadState` shape with no undefined required fields. +- [x] `Thread` can be constructed without `threadData`. +- [x] Constructor still accepts `threadData` when provided. +- [x] Minimal init produces a valid `ThreadState` shape with no undefined required fields. ## Task 2: Complete Hydration + Pagination Bootstrap for Minimal Threads @@ -39,9 +39,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** Task 1 -**Status:** pending +**Status:** in-progress -**Owner:** unassigned +**Owner:** codex **Scope:** diff --git a/specs/thread-constructor-minimal-init/state.json b/specs/thread-constructor-minimal-init/state.json index 927d96719a..494edf780e 100644 --- a/specs/thread-constructor-minimal-init/state.json +++ b/specs/thread-constructor-minimal-init/state.json @@ -1,7 +1,7 @@ { "tasks": { - "thread-minimal-constructor": "pending", - "thread-hydration-pagination": "pending", + "thread-minimal-constructor": "done", + "thread-hydration-pagination": "in-progress", "react-thread-instance-flow": "pending", "thread-provider-decoupling": "pending", "js-tests": "pending", @@ -16,6 +16,6 @@ "meta": { "last_updated": "2026-02-27", "worktree": "/Users/martincupela/Projects/stream/chat/stream-chat-js-worktrees/thread-constructor-minimal-init", - "branch": "feat/init-empty-thread" + "branch": "agent/feat/init-empty-thread" } } diff --git a/src/thread.ts b/src/thread.ts index bf6f778121..1f1c7715e2 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -7,6 +7,7 @@ import { } from './utils'; import type { AscDesc, + DraftResponse, EventTypes, LocalMessage, MessagePaginationOptions, @@ -120,64 +121,102 @@ export class Thread extends WithSubscriptions { constructor({ client, threadData, + channel, + parentMessage, + draft, }: { client: StreamChat; - threadData: ThreadResponse; + threadData?: ThreadResponse; + channel?: Channel; + parentMessage?: MessageResponse | LocalMessage; + draft?: DraftResponse; }) { super(); + if (threadData) { + const threadChannel = client.channel(threadData.channel.type, threadData.channel.id, { + // @ts-expect-error name is a "custom" property + name: threadData.channel.name, + }); + threadChannel._hydrateMembers({ + members: threadData.channel.members ?? [], + overrideCurrentState: false, + }); - const channel = client.channel(threadData.channel.type, threadData.channel.id, { - // @ts-expect-error name is a "custom" property - name: threadData.channel.name, - }); - channel._hydrateMembers({ - members: threadData.channel.members ?? [], - overrideCurrentState: false, - }); + this.state = new StateStore({ + // local only + active: false, + isLoading: false, + isStateStale: false, + // 99.9% should never change + channel: threadChannel, + createdAt: new Date(threadData.created_at), + // rest + deletedAt: threadData.deleted_at ? new Date(threadData.deleted_at) : null, + pagination: repliesPaginationFromInitialThread(threadData), + parentMessage: formatMessage(threadData.parent_message), + participants: threadData.thread_participants, + read: formatReadState( + !threadData.read || threadData.read.length === 0 + ? getPlaceholderReadResponse(client.userID) + : threadData.read, + ), + replies: threadData.latest_replies.map(formatMessage), + replyCount: threadData.reply_count ?? 0, + updatedAt: threadData.updated_at ? new Date(threadData.updated_at) : null, + title: threadData.title, + custom: constructCustomDataObject(threadData), + }); - // For when read object is undefined and due to that unreadMessageCount for - // the current user isn't being incremented on message.new - const placeholderReadResponse: ReadResponse[] = client.userID - ? [ - { - user: { id: client.userID }, - unread_messages: 0, - last_read: new Date().toISOString(), - }, - ] - : []; + this.id = threadData.parent_message_id; + } else { + if (!channel) { + throw new Error('Channel is required when threadData is not provided'); + } - this.state = new StateStore({ - // local only - active: false, - isLoading: false, - isStateStale: false, - // 99.9% should never change - channel, - createdAt: new Date(threadData.created_at), - // rest - deletedAt: threadData.deleted_at ? new Date(threadData.deleted_at) : null, - pagination: repliesPaginationFromInitialThread(threadData), - parentMessage: formatMessage(threadData.parent_message), - participants: threadData.thread_participants, - read: formatReadState( - !threadData.read || threadData.read.length === 0 - ? placeholderReadResponse - : threadData.read, - ), - replies: threadData.latest_replies.map(formatMessage), - replyCount: threadData.reply_count ?? 0, - updatedAt: threadData.updated_at ? new Date(threadData.updated_at) : null, - title: threadData.title, - custom: constructCustomDataObject(threadData), - }); + if (!parentMessage || !parentMessage.id) { + throw new Error( + 'Parent message with a valid id is required when threadData is not provided', + ); + } + + const formattedParentMessage = formatMessage(parentMessage); + const createdAt = parentMessage.created_at + ? new Date(parentMessage.created_at) + : new Date(); + + this.state = new StateStore({ + active: false, + channel, + createdAt, + custom: {}, + deletedAt: formattedParentMessage.deleted_at, + isLoading: false, + isStateStale: false, + pagination: { + isLoadingNext: false, + isLoadingPrev: false, + nextCursor: null, + prevCursor: null, + }, + parentMessage: formattedParentMessage, + participants: [], + read: formatReadState(getPlaceholderReadResponse(client.userID)), + replies: [], + replyCount: parentMessage.reply_count ?? 0, + title: '', + updatedAt: parentMessage.updated_at + ? new Date(parentMessage.updated_at) + : null, + }); + + this.id = parentMessage.id; + } - this.id = threadData.parent_message_id; this.client = client; this.messageComposer = new MessageComposer({ client, - composition: threadData.draft, + composition: threadData?.draft ?? draft, compositionContext: this, }); } @@ -618,6 +657,17 @@ const formatReadState = (read: ReadResponse[]): ThreadReadState => return state; }, {}); +const getPlaceholderReadResponse = (currentUserId?: string): ReadResponse[] => + currentUserId + ? [ + { + user: { id: currentUserId }, + unread_messages: 0, + last_read: new Date().toISOString(), + }, + ] + : []; + const repliesPaginationFromInitialThread = ( thread: ThreadResponse, ): ThreadRepliesPagination => { From 9650862c11802c10410644115315b88f61457e8e Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 27 Feb 2026 11:44:41 +0100 Subject: [PATCH 13/31] feat(Thread): implement Complete Hydration + Pagination Bootstrap for Minimal Threads --- .../decisions.md | 20 ++++ specs/thread-constructor-minimal-init/plan.md | 6 +- .../state.json | 2 +- src/thread.ts | 2 + test/unit/threads.test.ts | 112 ++++++++++++++++++ 5 files changed, 138 insertions(+), 4 deletions(-) diff --git a/specs/thread-constructor-minimal-init/decisions.md b/specs/thread-constructor-minimal-init/decisions.md index 4949653016..1525bec088 100644 --- a/specs/thread-constructor-minimal-init/decisions.md +++ b/specs/thread-constructor-minimal-init/decisions.md @@ -57,3 +57,23 @@ This keeps `Thread` usable immediately after construction with no undefined requ **Tradeoffs / Consequences:** Minimal instances start non-paginable until hydrated by server data; Task 2 is responsible for carrying hydrated pagination into existing instances. + +## Decision: Hydration must overwrite pagination from the fetched thread instance + +**Date:** 2026-02-27 +**Context:** +Minimal constructor threads initialize with null pagination cursors, so pagination methods remain inert until server-backed thread state is applied. + +**Decision:** +`Thread.hydrateState(...)` now copies `pagination` from the hydrated source thread alongside replies/read/metadata. + +**Reasoning:** +`loadPrevPage/loadNextPage` depend on `prevCursor/nextCursor`; without hydration of pagination, minimal threads stay permanently non-paginable after `reload()`. + +**Alternatives considered:** + +- Recompute pagination from current local replies during hydration — rejected because local replies may include optimistic/pending items and may not reflect server window boundaries. +- Keep pagination untouched and rely on later events — rejected because pagination remains blocked with null cursors. + +**Tradeoffs / Consequences:** +Hydration treats fetched thread pagination as source-of-truth and replaces local pagination state at once. diff --git a/specs/thread-constructor-minimal-init/plan.md b/specs/thread-constructor-minimal-init/plan.md index 4e937accc7..c85d5247e1 100644 --- a/specs/thread-constructor-minimal-init/plan.md +++ b/specs/thread-constructor-minimal-init/plan.md @@ -39,7 +39,7 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** Task 1 -**Status:** in-progress +**Status:** done **Owner:** codex @@ -51,8 +51,8 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Acceptance Criteria:** -- [ ] `hydrateState(...)` updates pagination fields required by `loadPrevPage/loadNextPage`. -- [ ] Minimal thread does not get stuck with unusable pagination after reload. +- [x] `hydrateState(...)` updates pagination fields required by `loadPrevPage/loadNextPage`. +- [x] Minimal thread does not get stuck with unusable pagination after reload. ## Task 3: Decouple `ThreadProvider` from `Channel` Rendering diff --git a/specs/thread-constructor-minimal-init/state.json b/specs/thread-constructor-minimal-init/state.json index 494edf780e..fecfcabe5f 100644 --- a/specs/thread-constructor-minimal-init/state.json +++ b/specs/thread-constructor-minimal-init/state.json @@ -1,7 +1,7 @@ { "tasks": { "thread-minimal-constructor": "done", - "thread-hydration-pagination": "in-progress", + "thread-hydration-pagination": "done", "react-thread-instance-flow": "pending", "thread-provider-decoupling": "pending", "js-tests": "pending", diff --git a/src/thread.ts b/src/thread.ts index 1f1c7715e2..146363e0af 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -273,6 +273,7 @@ export class Thread extends WithSubscriptions { custom, title, deletedAt, + pagination, parentMessage, participants, read, @@ -293,6 +294,7 @@ export class Thread extends WithSubscriptions { participants, read, replyCount, + pagination, replies: pendingReplies.length ? replies.concat(pendingReplies) : replies, updatedAt, isStateStale: false, diff --git a/test/unit/threads.test.ts b/test/unit/threads.test.ts index 121cfc40f5..482ec90ab9 100644 --- a/test/unit/threads.test.ts +++ b/test/unit/threads.test.ts @@ -48,6 +48,26 @@ describe('Threads 2.0', () => { }); } + function createMinimalThread({ + parentMessageOverrides = {}, + draft, + }: { + parentMessageOverrides?: Partial; + draft?: { + channel_cid: string; + created_at: string; + message: { id: string; text: string; parent_id?: string }; + parent_id?: string; + }; + } = {}) { + return new Thread({ + client, + channel, + parentMessage: { ...parentMessageResponse, ...parentMessageOverrides }, + draft, + }); + } + beforeEach(() => { client = new StreamChat('apiKey'); client._setUser({ id: TEST_USER_ID }); @@ -81,6 +101,43 @@ describe('Threads 2.0', () => { expect(thread.channel.data?.name).to.equal(channelResponse.name); }); + it('initializes properly without threadData', () => { + const thread = createMinimalThread(); + const state = thread.state.getLatestValue(); + + expect(thread.id).to.equal(parentMessageResponse.id); + expect(thread.channel.cid).to.equal(channel.cid); + expect(state.parentMessage.id).to.equal(parentMessageResponse.id); + expect(state.replies).to.deep.equal([]); + expect(state.participants).to.deep.equal([]); + expect(state.custom).to.deep.equal({}); + expect(state.pagination.prevCursor).to.be.null; + expect(state.pagination.nextCursor).to.be.null; + expect(state.read).to.have.keys([TEST_USER_ID]); + }); + + it('throws if minimal init parent message id is missing', () => { + expect(() => + createMinimalThread({ + parentMessageOverrides: { id: '' }, + }), + ).to.throw(); + }); + + it('accepts draft in minimal init path', () => { + const draftId = uuidv4(); + const thread = createMinimalThread({ + draft: { + channel_cid: channel.cid, + created_at: new Date().toISOString(), + message: { id: draftId, text: 'draft text', parent_id: parentMessageResponse.id }, + parent_id: parentMessageResponse.id, + }, + }); + + expect(thread.messageComposer.draftId).to.equal(draftId); + }); + describe('Methods', () => { describe('upsertReplyLocally', () => { it('prevents inserting a new message that does not belong to the associated thread', () => { @@ -265,6 +322,30 @@ describe('Threads 2.0', () => { expect(stateAfter.participants).to.equal(hydrationState.participants); }); + it('copies pagination state during hydration', () => { + const thread = createMinimalThread(); + const hydrationThread = createTestThread({ + latest_replies: [ + generateMsg({ parent_id: parentMessageResponse.id }) as MessageResponse, + ], + reply_count: 3, + }); + + hydrationThread.state.next((current) => ({ + ...current, + pagination: { + ...current.pagination, + nextCursor: 'next-cursor', + }, + })); + + thread.hydrateState(hydrationThread); + + const stateAfter = thread.state.getLatestValue(); + expect(stateAfter.pagination.prevCursor).to.not.be.null; + expect(stateAfter.pagination.nextCursor).to.equal('next-cursor'); + }); + it('retains failed replies after hydration', () => { const thread = createTestThread(); const hydrationThread = createTestThread({ @@ -287,6 +368,37 @@ describe('Threads 2.0', () => { }); }); + describe('reload', () => { + it('bootstraps pagination for minimally initialized threads', async () => { + const minimalThread = createMinimalThread(); + const hydratedThread = createTestThread({ + latest_replies: [ + generateMsg({ parent_id: parentMessageResponse.id }) as MessageResponse, + ], + reply_count: 3, + }); + hydratedThread.state.next((current) => ({ + ...current, + pagination: { + ...current.pagination, + nextCursor: 'next-cursor', + }, + })); + + sinon.stub(client, 'getThread').resolves(hydratedThread); + + const stateBefore = minimalThread.state.getLatestValue(); + expect(stateBefore.pagination.prevCursor).to.be.null; + expect(stateBefore.pagination.nextCursor).to.be.null; + + await minimalThread.reload(); + + const stateAfter = minimalThread.state.getLatestValue(); + expect(stateAfter.pagination.prevCursor).to.not.be.null; + expect(stateAfter.pagination.nextCursor).to.equal('next-cursor'); + }); + }); + describe('deleteReplyLocally', () => { it('deletes appropriate message', () => { const createdAt = new Date().getTime(); From 2ab25b96ede79f9143818b2ec5f15b8cbaa80bd1 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 3 Mar 2026 15:37:37 +0100 Subject: [PATCH 14/31] feat: add reactive states to ChannelState --- .../decisions.md | 38 ++ specs/thread-constructor-minimal-init/plan.md | 26 +- .../state.json | 6 +- src/channel.ts | 303 ++++++++++++---- src/channel_state.ts | 203 ++++++++++- src/client.ts | 40 ++- .../middleware/textComposer/types.ts | 7 +- src/messageComposer/textComposer.ts | 33 +- src/messageDelivery/MessageReceiptsTracker.ts | 273 ++++++++++++++- .../unit/MessageComposer/textComposer.test.ts | 9 +- test/unit/channel.test.js | 162 ++++++++- test/unit/channel_state.test.js | 320 +++++++++++++++++ test/unit/client.test.js | 80 +++++ .../MessageReceiptsTracker.test.ts | 329 ++++++++++++++++-- 14 files changed, 1672 insertions(+), 157 deletions(-) diff --git a/specs/thread-constructor-minimal-init/decisions.md b/specs/thread-constructor-minimal-init/decisions.md index 1525bec088..b7586edcc9 100644 --- a/specs/thread-constructor-minimal-init/decisions.md +++ b/specs/thread-constructor-minimal-init/decisions.md @@ -77,3 +77,41 @@ Minimal constructor threads initialize with null pagination cursors, so paginati **Tradeoffs / Consequences:** Hydration treats fetched thread pagination as source-of-truth and replaces local pagination state at once. + +## Decision: ThreadProvider should be thread-context-only and not mount Channel + +**Date:** 2026-02-27 +**Context:** +Task 3 requires enabling sibling rendering of `Channel` and `Thread` in layoutController flow, which is blocked when `ThreadProvider` internally mounts ``. + +**Decision:** +`ThreadProvider` now renders only `` and no longer wraps children with ``. + +**Reasoning:** +This removes hidden channel-context coupling from thread provider composition and makes thread context provisioning independent from channel rendering topology. + +**Alternatives considered:** + +- Keep `` wrapper and adapt Thread internals only — rejected because it preserves structural coupling and prevents true sibling layout control. + +**Tradeoffs / Consequences:** +`Thread.tsx` must no longer rely on channel action/state contexts in thread-instance mode; this is addressed in Task 4. + +## Decision: Thread.tsx runs in thread-instance-only mode without channel contexts + +**Date:** 2026-02-27 +**Context:** +After removing `` from `ThreadProvider`, `Thread.tsx` must render outside channel providers in the layoutController sibling setup. + +**Decision:** +`Thread.tsx` now depends only on `Thread` instance data (`useThreadContext` + `useStateStore(thread.state, ...)`) and uses thread instance methods for close, hydration (`reload` when stale), and pagination. + +**Reasoning:** +This removes hard runtime coupling to `ChannelStateContext`/`ChannelActionContext`, which are not guaranteed to exist in the target composition. + +**Alternatives considered:** + +- Keep optional reads from `ChannelStateContext` as fallback — rejected because that still makes Thread behavior coupled to channel context presence. + +**Tradeoffs / Consequences:** +Legacy channel-thread-only usage of `Thread.tsx` without a thread instance is no longer handled by this flow and must be adapted through thread-instance provisioning. diff --git a/specs/thread-constructor-minimal-init/plan.md b/specs/thread-constructor-minimal-init/plan.md index c85d5247e1..21702c0609 100644 --- a/specs/thread-constructor-minimal-init/plan.md +++ b/specs/thread-constructor-minimal-init/plan.md @@ -60,9 +60,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** None -**Status:** pending +**Status:** done -**Owner:** unassigned +**Owner:** codex **Scope:** @@ -72,8 +72,8 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Acceptance Criteria:** -- [ ] `ThreadProvider` no longer renders ``. -- [ ] Thread context remains available to downstream components. +- [x] `ThreadProvider` no longer renders ``. +- [x] Thread context remains available to downstream components. ## Task 4: Make `Thread.tsx` Thread-Instance-Driven (No `ChannelActionContext` Thread Actions) @@ -81,9 +81,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** Task 3 -**Status:** pending +**Status:** done -**Owner:** unassigned +**Owner:** codex **Scope:** @@ -93,9 +93,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Acceptance Criteria:** -- [ ] `Thread.tsx` renders with a minimal thread instance and self-hydrates. -- [ ] Pagination in thread-instance mode uses `threadInstance` methods. -- [ ] Thread-instance flow does not require `ChannelActionContext.loadMoreThread/closeThread`. +- [x] `Thread.tsx` renders with a minimal thread instance and self-hydrates. +- [x] Pagination in thread-instance mode uses `threadInstance` methods. +- [x] Thread-instance flow does not require `ChannelActionContext.loadMoreThread/closeThread`. ## Task 5: Add `stream-chat-js` Unit Coverage for Minimal Constructor + Hydration @@ -103,9 +103,9 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Dependencies:** Task 2 -**Status:** pending +**Status:** done -**Owner:** unassigned +**Owner:** codex **Scope:** @@ -115,8 +115,8 @@ Tasks are self-contained and parallelizable where possible; tasks touching the s **Acceptance Criteria:** -- [ ] Tests cover minimal construction, missing id validation, and reload hydration path. -- [ ] Tests verify pagination becomes usable after hydration. +- [x] Tests cover minimal construction, missing id validation, and reload hydration path. +- [x] Tests verify pagination becomes usable after hydration. ## Task 6: Add `stream-chat-react` Coverage for Thread-Instance-Only Flow diff --git a/specs/thread-constructor-minimal-init/state.json b/specs/thread-constructor-minimal-init/state.json index fecfcabe5f..a8ec53b195 100644 --- a/specs/thread-constructor-minimal-init/state.json +++ b/specs/thread-constructor-minimal-init/state.json @@ -2,9 +2,9 @@ "tasks": { "thread-minimal-constructor": "done", "thread-hydration-pagination": "done", - "react-thread-instance-flow": "pending", - "thread-provider-decoupling": "pending", - "js-tests": "pending", + "react-thread-instance-flow": "done", + "thread-provider-decoupling": "done", + "js-tests": "done", "react-tests": "pending", "verification": "pending" }, diff --git a/src/channel.ts b/src/channel.ts index bd94876f20..c2f5c0f313 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -2,6 +2,7 @@ import { ChannelState } from './channel_state'; import { CooldownTimer } from './CooldownTimer'; import { MessageComposer } from './messageComposer'; import { MessageReceiptsTracker } from './messageDelivery'; +import type { ReadStoreReconcileMeta } from './messageDelivery'; import { generateChannelTempCid, logChatPromiseExecution, @@ -164,12 +165,8 @@ export class Channel { compositionContext: this, }); - this.messageReceiptsTracker = new MessageReceiptsTracker({ - locateMessage: (timestampMs) => { - const msg = this.state.findMessageByTimestamp(timestampMs); - return msg && { timestampMs, msgId: msg.id }; - }, - }); + this.messageReceiptsTracker = new MessageReceiptsTracker({ channel: this }); + this.messageReceiptsTracker.registerSubscriptions(); this.cooldownTimer = new CooldownTimer({ channel: this }); } @@ -641,7 +638,9 @@ export class Channel { ] .sort() .join(); + const previousData = this.data; this.data = data.channel; + this._syncStateFromChannelData(this.data, previousData); // If the capabiltities are changed, we trigger the `capabilities.changed` event. if (areCapabilitiesChanged) { this.getClient().dispatchEvent({ @@ -666,7 +665,9 @@ export class Channel { cooldown: coolDownInterval, }, ); + const previousData = this.data; this.data = data.channel; + this._syncStateFromChannelData(this.data, previousData); return data; } @@ -682,7 +683,9 @@ export class Channel { cooldown: 0, }, ); + const previousData = this.data; this.data = data.channel; + this._syncStateFromChannelData(this.data, previousData); return data; } @@ -903,7 +906,9 @@ export class Channel { this._channelURL(), payload, ); + const previousData = this.data; this.data = data.channel; + this._syncStateFromChannelData(this.data, previousData); return data; } @@ -1261,7 +1266,9 @@ export class Channel { const combined = { ...defaultOptions, ...options }; const state = await this.query(combined, 'latest'); this.initialized = true; + const previousData = this.data; this.data = state.channel; + this._syncStateFromChannelData(this.data, previousData); this._client.logger( 'info', @@ -1583,7 +1590,9 @@ export class Channel { ] .sort() .join(); + const previousData = this.data; this.data = state.channel; + this._syncStateFromChannelData(this.data, previousData); this.offlineMode = false; this.cooldownTimer.refresh(); @@ -1908,6 +1917,56 @@ export class Channel { this.listeners[key] = this.listeners[key].filter((value) => value !== callback); } + private _patchReadState( + patch: (currentReadState: ChannelState['read']) => ChannelState['read'], + reconcileMeta?: ReadStoreReconcileMeta, + ) { + let hasStateChanged = false; + this.messageReceiptsTracker.setPendingReadStoreReconcileMeta(reconcileMeta); + + this.state.readStore.next((currentReadStoreState) => { + const nextReadState = patch(currentReadStoreState.read); + + if (nextReadState === currentReadStoreState.read) { + return currentReadStoreState; + } + hasStateChanged = true; + + return { + ...currentReadStoreState, + read: nextReadState, + }; + }); + + if (!hasStateChanged) { + this.messageReceiptsTracker.setPendingReadStoreReconcileMeta(undefined); + } + } + + private _upsertReadState( + userId: string, + update: ( + currentUserReadState: ChannelState['read'][string] | undefined, + ) => ChannelState['read'][string], + reconcileMeta?: ReadStoreReconcileMeta, + ) { + let nextUserReadState: ChannelState['read'][string] | undefined; + + this._patchReadState((currentReadState) => { + const currentUserReadState = currentReadState[userId]; + const updatedUserReadState = update(currentUserReadState); + + nextUserReadState = updatedUserReadState; + + return { + ...currentReadState, + [userId]: updatedUserReadState, + }; + }, reconcileMeta); + + return nextUserReadState; + } + _handleChannelEvent(event: Event) { // eslint-disable-next-line @typescript-eslint/no-this-alias const channel = this; @@ -1924,32 +1983,54 @@ export class Channel { switch (event.type) { case 'typing.start': if (event.user?.id) { - channelState.typing[event.user.id] = event; + channelState.setTypingEvent(event.user.id, event); } break; case 'typing.stop': if (event.user?.id) { - delete channelState.typing[event.user.id]; + channelState.removeTypingEvent(event.user.id); } break; case 'message.read': if (event.user?.id && event.created_at) { - const previousReadState = channelState.read[event.user.id]; - channelState.read[event.user.id] = { - // in case we already have delivery information - ...previousReadState, - last_read: new Date(event.created_at), - last_read_message_id: event.last_read_message_id, - user: event.user, - unread_messages: 0, - }; - this.messageReceiptsTracker.onMessageRead({ - user: event.user, - readAt: event.created_at, - lastReadMessageId: event.last_read_message_id, - }); - const client = this.getClient(); + const eventUser = event.user; + const readAtDate = new Date(event.created_at); + const toDate = (value?: string | Date) => + value ? (value instanceof Date ? value : new Date(value)) : undefined; + const userReadState = this._upsertReadState( + eventUser.id, + (currentUserReadState) => { + const currentDeliveredAt = toDate(currentUserReadState?.last_delivered_at); + + return { + // preserve delivery information already known for user + ...currentUserReadState, + ...(currentUserReadState?.last_read + ? { last_read: toDate(currentUserReadState.last_read) } + : null), + ...(currentDeliveredAt + ? { last_delivered_at: currentDeliveredAt } + : null), + last_read: readAtDate, + last_read_message_id: event.last_read_message_id, + last_delivered_at: + !currentDeliveredAt || currentDeliveredAt < readAtDate + ? readAtDate + : currentDeliveredAt, + last_delivered_message_id: + !currentDeliveredAt || currentDeliveredAt < readAtDate + ? event.last_read_message_id ?? + currentUserReadState?.last_delivered_message_id + : currentUserReadState?.last_delivered_message_id, + user: eventUser, + unread_messages: 0, + }; + }, + { changedUserIds: [eventUser.id] }, + ); + void userReadState; + const client = this.getClient(); const isOwnEvent = event.user?.id === client.user?.id; if (isOwnEvent) { @@ -1961,21 +2042,40 @@ export class Channel { case 'message.delivered': // todo: update also on thread if (event.user?.id && event.created_at) { - const previousReadState = channelState.read[event.user.id]; - channelState.read[event.user.id] = { - ...previousReadState, - last_delivered_at: event.last_delivered_at - ? new Date(event.last_delivered_at) - : undefined, - last_delivered_message_id: event.last_delivered_message_id, - user: event.user, - }; - - this.messageReceiptsTracker.onMessageDelivered({ - user: event.user, - deliveredAt: event.created_at, - lastDeliveredMessageId: event.last_delivered_message_id, - }); + const eventUser = event.user; + const createdAt = event.created_at; + const toDate = (value?: string | Date) => + value ? (value instanceof Date ? value : new Date(value)) : undefined; + const resolvedDeliveredAt = new Date(event.last_delivered_at ?? createdAt); + const userReadState = this._upsertReadState( + eventUser.id, + (currentUserReadState) => { + const currentDeliveredAt = toDate(currentUserReadState?.last_delivered_at); + const currentReadAt = toDate(currentUserReadState?.last_read); + + return { + ...currentUserReadState, + ...(currentReadAt ? { last_read: currentReadAt } : null), + ...(currentDeliveredAt + ? { last_delivered_at: currentDeliveredAt } + : null), + last_delivered_at: + currentDeliveredAt && currentDeliveredAt > resolvedDeliveredAt + ? currentDeliveredAt + : resolvedDeliveredAt, + last_delivered_message_id: + currentDeliveredAt && currentDeliveredAt > resolvedDeliveredAt + ? currentUserReadState?.last_delivered_message_id + : event.last_delivered_message_id, + user: eventUser, + // delivery events can be received before read events + last_read: currentReadAt ?? new Date(createdAt), + unread_messages: currentUserReadState?.unread_messages ?? 0, + }; + }, + { changedUserIds: [eventUser.id] }, + ); + void userReadState; const client = this.getClient(); const isOwnEvent = event.user?.id === client.user?.id; @@ -2046,19 +2146,36 @@ export class Channel { if (preventUnreadCountUpdate) break; if (event.user?.id) { - for (const userId in channelState.read) { - if (userId === event.user.id) { - channelState.read[event.user.id] = { - last_read: new Date(event.created_at as string), - user: event.user, - unread_messages: 0, - last_delivered_at: new Date(event.created_at as string), - last_delivered_message_id: event.message.id, - }; - } else { - channelState.read[userId].unread_messages += 1; + const eventUser = event.user; + const eventUserId = eventUser.id; + const createdAt = new Date(event.created_at ?? Date.now()); + const eventMessageId = event.message.id; + this._patchReadState((currentReadState) => { + const userIds = Object.keys(currentReadState); + if (!userIds.length) return currentReadState; + + const nextReadState = { ...currentReadState }; + + for (const userId of userIds) { + if (userId === eventUserId) { + nextReadState[eventUserId] = { + last_read: createdAt, + user: eventUser, + unread_messages: 0, + last_delivered_at: createdAt, + last_delivered_message_id: eventMessageId, + }; + } else { + nextReadState[userId] = { + ...currentReadState[userId], + unread_messages: + (currentReadState[userId]?.unread_messages ?? 0) + 1, + }; + } } - } + + return nextReadState; + }, { changedUserIds: Object.keys(channelState.read) }); } if (this._countMessageAsUnread(event.message)) { @@ -2132,7 +2249,10 @@ export class Channel { ...channelState.members, [memberCopy.user.id]: memberCopy, }; - if (channel.data?.member_count && event.type === 'member.added') { + if ( + event.type === 'member.added' && + typeof channel.data?.member_count === 'number' + ) { channel.data.member_count += 1; } } @@ -2157,7 +2277,7 @@ export class Channel { channelState.members = newMembers; - if (channel.data?.member_count) { + if (typeof channel.data?.member_count === 'number') { channel.data.member_count = Math.max(channel.data.member_count - 1, 0); } @@ -2166,26 +2286,26 @@ export class Channel { break; case 'notification.mark_unread': { const ownMessage = event.user?.id === this.getClient().user?.id; - if (!ownMessage || !event.user) break; - + if (!ownMessage || !event.user || !event.last_read_at) break; + const eventUser = event.user; + const lastReadAt = event.last_read_at; const unreadCount = event.unread_messages ?? 0; - const currentState = channelState.read[event.user.id]; - channelState.read[event.user.id] = { - // keep the message delivery info - ...currentState, - first_unread_message_id: event.first_unread_message_id, - last_read: new Date(event.last_read_at as string), - last_read_message_id: event.last_read_message_id, - user: event.user, - unread_messages: unreadCount, - }; + const userReadState = this._upsertReadState( + eventUser.id, + (currentUserReadState) => ({ + // keep the message delivery info + ...currentUserReadState, + first_unread_message_id: event.first_unread_message_id, + last_read: new Date(lastReadAt), + last_read_message_id: event.last_read_message_id, + user: eventUser, + unread_messages: unreadCount, + }), + { changedUserIds: [eventUser.id] }, + ); + void userReadState; channelState.unreadCount = unreadCount; - this.messageReceiptsTracker.onNotificationMarkUnread({ - user: event.user, - lastReadAt: event.last_read_at, - lastReadMessageId: event.last_read_message_id, - }); break; } case 'channel.updated': @@ -2196,13 +2316,16 @@ export class Channel { if (isFrozenChanged) { this.query({ state: false, messages: { limit: 0 }, watchers: { limit: 0 } }); } + const previousChannelData = channel.data; const newChannelData = { ...event.channel, hidden: event.channel?.hidden ?? channel.data?.hidden, + member_count: event.channel?.member_count ?? channel.data?.member_count, own_capabilities: event.channel?.own_capabilities ?? channel.data?.own_capabilities, }; channel.data = newChannelData; + channel._syncStateFromChannelData(channel.data, previousChannelData); this.cooldownTimer.refresh(); } break; @@ -2229,24 +2352,30 @@ export class Channel { ) as MessageResponse; } break; - case 'channel.hidden': + case 'channel.hidden': { + const previousChannelData = channel.data; channel.data = { ...channel.data, blocked: !!event.channel?.blocked, hidden: true, }; + channel._syncStateFromChannelData(channel.data, previousChannelData); if (event.clear_history) { channelState.clearMessages(); } break; - case 'channel.visible': + } + case 'channel.visible': { + const previousChannelData = channel.data; channel.data = { ...channel.data, blocked: !!event.channel?.blocked, hidden: false, }; + channel._syncStateFromChannelData(channel.data, previousChannelData); this.getClient().offlineDb?.handleChannelVisibilityEvent({ event }); break; + } case 'user.banned': if (!event.user?.id) break; channelState.members[event.user.id] = { @@ -2320,6 +2449,14 @@ export class Channel { } } + _syncStateFromChannelData( + data: Channel['data'], + fallbackData: Channel['data'] = this.data, + ) { + this.state.syncOwnCapabilitiesFromChannelData(data, fallbackData); + this.state.syncMemberCountFromChannelData(data, fallbackData); + } + _initializeState( state: ChannelAPIResponse, messageSetToAddToIfDoesNotExist: MessageSetType = 'latest', @@ -2374,10 +2511,11 @@ export class Channel { // initialize read state to last message or current time if the channel is empty // if the user is a member, this value will be overwritten later on otherwise this ensures // that everything up to this point is not marked as unread + const readUpdates: ChannelState['read'] = {}; if (userID != null) { const last_read = this.state.last_message_at || new Date(); if (user) { - this.state.read[user.id] = { + readUpdates[user.id] = { user, last_read, unread_messages: 0, @@ -2388,7 +2526,7 @@ export class Channel { // apply read state if part of the state if (state.read) { for (const read of state.read) { - this.state.read[read.user.id] = { + readUpdates[read.user.id] = { last_delivered_at: read.last_delivered_at ? new Date(read.last_delivered_at) : undefined, @@ -2400,11 +2538,25 @@ export class Channel { }; if (read.user.id === user?.id) { - this.state.unreadCount = this.state.read[read.user.id].unread_messages; + this.state.unreadCount = readUpdates[read.user.id].unread_messages; } } + } + + const entries = Object.entries(readUpdates); + if (entries.length) { + this._patchReadState((currentReadState) => { + let hasChanges = false; + const nextReadState = { ...currentReadState }; + + for (const [userId, readState] of entries) { + if (nextReadState[userId] === readState) continue; + nextReadState[userId] = readState; + hasChanges = true; + } - this.messageReceiptsTracker.ingestInitial(state.read); + return hasChanges ? nextReadState : currentReadState; + }, { changedUserIds: entries.map(([userId]) => userId) }); } return { @@ -2466,6 +2618,7 @@ export class Channel { ); this.disconnected = true; + this.messageReceiptsTracker.unregisterSubscriptions(); this.cooldownTimer.clearTimeout(); this.state.setIsUpToDate(false); } diff --git a/src/channel_state.ts b/src/channel_state.ts index aa8b600140..44e62e2f35 100644 --- a/src/channel_state.ts +++ b/src/channel_state.ts @@ -18,6 +18,7 @@ import { isBlockedMessage, } from './utils'; import { DEFAULT_MESSAGE_SET_PAGINATION } from './constants'; +import { StateStore } from './store'; type ChannelReadStatus = Record< string, @@ -32,6 +33,32 @@ type ChannelReadStatus = Record< } >; +export type WatcherState = { + watcherCount: number; + watchers: Record; +}; + +export type TypingUsersState = { + typing: Record; +}; + +export type ReadState = { + read: ChannelReadStatus; +}; + +export type MutedUsersState = { + mutedUsers: Array; +}; + +export type MembersState = { + members: Record; + memberCount: number; +}; + +export type OwnCapabilitiesState = { + ownCapabilities: string[]; +}; + const messageSetBounds = ( a: LocalMessage[] | MessageResponse[], b: LocalMessage[] | MessageResponse[], @@ -69,15 +96,16 @@ const messageSetsOverlapByTimestamp = (a: LocalMessage[], b: LocalMessage[]) => */ export class ChannelState { _channel: Channel; - watcher_count: number; - typing: Record; - read: ChannelReadStatus; + readonly watcherStore: StateStore; + readonly typingStore: StateStore; + readonly readStore: StateStore; + readonly membersStore: StateStore; + readonly ownCapabilitiesStore: StateStore; + // todo: is this actually used somewhere? + readonly mutedUsersStore: StateStore; pinnedMessages: Array>; pending_messages: Array; threads: Record>>; - mutedUsers: Array; - watchers: Record; - members: Record; unreadCount: number; membership: ChannelMemberResponse; last_message_at: Date | null; @@ -98,17 +126,26 @@ export class ChannelState { constructor(channel: Channel) { this._channel = channel; - this.watcher_count = 0; - this.typing = {}; - this.read = {}; + this.watcherStore = new StateStore({ + watcherCount: 0, + watchers: {}, + }); + this.typingStore = new StateStore({ + typing: {}, + }); + this.readStore = new StateStore({ read: {} }); + // a list of users to hide messages from + this.mutedUsersStore = new StateStore({ mutedUsers: [] }); + this.membersStore = new StateStore({ members: {}, memberCount: 0 }); + this.ownCapabilitiesStore = new StateStore({ + ownCapabilities: [], + }); + this.syncMemberCountFromChannelData(channel?.data); + this.syncOwnCapabilitiesFromChannelData(channel?.data); this.initMessages(); this.pinnedMessages = []; this.pending_messages = []; this.threads = {}; - // a list of users to hide messages from - this.mutedUsers = []; - this.watchers = {}; - this.members = {}; this.membership = {}; this.unreadCount = 0; /** @@ -146,6 +183,144 @@ export class ChannelState { this.messageSets[index].messages = messages; } + get members() { + return this.membersStore.getLatestValue().members; + } + + set members(members: Record) { + this.membersStore.partialNext({ members }); + } + + get member_count() { + return this.membersStore.getLatestValue().memberCount; + } + + set member_count(memberCount: number) { + this.membersStore.partialNext({ memberCount }); + } + + get read() { + return this.readStore.getLatestValue().read; + } + + set read(read: ChannelReadStatus) { + this.readStore.next({ read }); + } + + get typing() { + return this._channel?.messageComposer?.textComposer.typing ?? + this.typingStore.getLatestValue().typing; + } + + set typing(typing: Record) { + this.typingStore.next({ typing }); + + if (this._channel?.messageComposer) { + this._channel.messageComposer.textComposer.setTyping(typing); + } + } + + syncMemberCountFromChannelData( + data: Channel['data'], + fallbackData: Channel['data'] = this._channel?.data, + ) { + const fallbackMemberCount = + typeof fallbackData?.member_count === 'number' + ? fallbackData.member_count + : this.membersStore.getLatestValue().memberCount; + + if (!data || typeof data !== 'object') { + this.membersStore.partialNext({ memberCount: fallbackMemberCount ?? 0 }); + return; + } + + const dataDescriptor = Object.getOwnPropertyDescriptor(data, 'member_count'); + let memberCount = + typeof data.member_count === 'number' + ? data.member_count + : typeof fallbackMemberCount === 'number' + ? fallbackMemberCount + : undefined; + + this.membersStore.partialNext({ memberCount: memberCount ?? 0 }); + + Object.defineProperty(data, 'member_count', { + configurable: true, + enumerable: dataDescriptor?.enumerable ?? false, + get: () => memberCount, + set: (nextMemberCount: number | undefined) => { + memberCount = typeof nextMemberCount === 'number' ? nextMemberCount : undefined; + this.membersStore.partialNext({ memberCount: memberCount ?? 0 }); + }, + }); + } + + syncOwnCapabilitiesFromChannelData( + data: Channel['data'], + fallbackData: Channel['data'] = this._channel?.data, + ) { + if (!data || typeof data !== 'object') { + this.ownCapabilitiesStore.next({ ownCapabilities: [] }); + return; + } + + let ownCapabilities = Array.isArray(data.own_capabilities) + ? [...data.own_capabilities] + : Array.isArray(fallbackData?.own_capabilities) + ? [...fallbackData.own_capabilities] + : []; + + this.ownCapabilitiesStore.next({ ownCapabilities: ownCapabilities }); + + Object.defineProperty(data, 'own_capabilities', { + configurable: true, + enumerable: true, + get: () => ownCapabilities, + set: (nextOwnCapabilities: string[] | undefined) => { + ownCapabilities = Array.isArray(nextOwnCapabilities) + ? [...nextOwnCapabilities] + : []; + this.ownCapabilitiesStore.next({ ownCapabilities: ownCapabilities }); + }, + }); + } + + setTypingEvent(userID: string, event: Event) { + this.typing = { ...this.typing, [userID]: event }; + } + + removeTypingEvent(userID: string) { + if (!this.typing[userID]) return; + + const typing = { ...this.typing }; + delete typing[userID]; + this.typing = typing; + } + + get mutedUsers() { + return this.mutedUsersStore.getLatestValue().mutedUsers; + } + + set mutedUsers(mutedUsers: Array) { + this.mutedUsersStore.next({ mutedUsers }); + } + + get watchers() { + return this.watcherStore.getLatestValue().watchers; + } + + set watchers(watchers: Record) { + this.watcherStore.partialNext({ watchers }); + } + + get watcher_count() { + return this.watcherStore.getLatestValue().watcherCount; + } + + set watcher_count(watcherCount: number) { + this.watcherStore.partialNext({ watcherCount }); + } + get messagePagination() { return ( this.messageSets.find((s) => s.isCurrent)?.pagination || @@ -820,7 +995,7 @@ export class ChannelState { ? new Date(lastEvent.received_at) : lastEvent.received_at || new Date(); if (now.getTime() - receivedAt.getTime() > 7000) { - delete this.typing[userID]; + this.removeTypingEvent(userID); this._channel.getClient().dispatchEvent({ cid: this._channel.cid, type: 'typing.stop', diff --git a/src/client.ts b/src/client.ts index 0bdf1dcbf7..204439831f 100644 --- a/src/client.ts +++ b/src/client.ts @@ -275,6 +275,10 @@ type MessageComposerSetupFunction = ({ export type BlockedUsersState = { userIds: string[] }; +export type ChannelConfigsState = { + configs: Configs; +}; + export type MessageComposerSetupState = { /** * Each `MessageComposer` runs this function each time its signature changes or @@ -307,7 +311,6 @@ export class StreamChat { browser: boolean; cleaningIntervalRef?: NodeJS.Timeout; clientID?: string; - configs: Configs; key: string; listeners: Record void>>; logger: Logger; @@ -323,7 +326,8 @@ export class StreamChat { recoverStateOnReconnect?: boolean; moderation: Moderation; mutedChannels: ChannelMute[]; - mutedUsers: Mute[]; + readonly mutedUsersStore: StateStore<{ mutedUsers: Mute[] }>; + readonly configsStore: StateStore; blockedUsers: StateStore; node: boolean; options: StreamChatOptions; @@ -384,7 +388,12 @@ export class StreamChat { this.state = new ClientState({ client: this }); // a list of channels to hide ws events from this.mutedChannels = []; - this.mutedUsers = []; + this.mutedUsersStore = new StateStore<{ mutedUsers: Mute[] }>({ + mutedUsers: [], + }); + this.configsStore = new StateStore<{ configs: Configs }>({ + configs: {}, + }); this.blockedUsers = new StateStore({ userIds: [] }); this.moderation = new Moderation(this); @@ -525,6 +534,22 @@ export class StreamChat { this.messageDeliveryReporter = new MessageDeliveryReporter({ client: this }); } + get mutedUsers() { + return this.mutedUsersStore.getLatestValue().mutedUsers; + } + + set mutedUsers(mutedUsers: Mute[]) { + this.mutedUsersStore.next({ mutedUsers }); + } + + get configs() { + return this.configsStore.getLatestValue().configs; + } + + set configs(configs: Configs) { + this.configsStore.next({ configs }); + } + /** * Get a client instance * @@ -2021,7 +2046,9 @@ export class StreamChat { for (const channelState of channelsFromApi) { this._addChannelConfig(channelState.channel); const c = this.channel(channelState.channel.type, channelState.channel.id); + const previousData = c.data; c.data = channelState.channel; + c._syncStateFromChannelData(c.data, previousData); c.offlineMode = offlineMode; c.initialized = !offlineMode; c.push_preferences = channelState.push_preferences; @@ -2248,7 +2275,10 @@ export class StreamChat { _addChannelConfig({ cid, config }: ChannelResponse) { if (this._cacheEnabled()) { - this.configs[cid] = config; + this.configs = { + ...this.configs, + [cid]: config, + }; } } @@ -2398,7 +2428,9 @@ export class StreamChat { ) { const channel = this.activeChannels[cid]; if (Object.keys(custom).length > 0) { + const previousData = channel.data; channel.data = { ...channel.data, ...custom }; + channel._syncStateFromChannelData(channel.data, previousData); channel._data = { ...channel._data, ...custom }; } return channel; diff --git a/src/messageComposer/middleware/textComposer/types.ts b/src/messageComposer/middleware/textComposer/types.ts index f7aaf70d2c..5179253f48 100644 --- a/src/messageComposer/middleware/textComposer/types.ts +++ b/src/messageComposer/middleware/textComposer/types.ts @@ -1,5 +1,5 @@ import type { MessageComposer } from '../../messageComposer'; -import type { CommandResponse, UserResponse } from '../../../types'; +import type { CommandResponse, Event, UserResponse } from '../../../types'; import type { TokenizationPayload } from './textMiddlewareUtils'; import type { SearchSource, SearchSourceSync } from '../../../search'; import type { CustomTextComposerSuggestion } from '../../types.custom'; @@ -38,6 +38,11 @@ export type TextComposerState = { mentionedUsers: UserResponse[]; selection: TextSelection; text: string; + /** + * Live typing events keyed by user id. + * Maps `user.id` -> latest typing event (`typing.start`/`typing.stop`) for that user. + */ + typing: Record; command?: CommandResponse | null; suggestions?: Suggestions; }; diff --git a/src/messageComposer/textComposer.ts b/src/messageComposer/textComposer.ts index 6e1514958e..39c5d5bf96 100644 --- a/src/messageComposer/textComposer.ts +++ b/src/messageComposer/textComposer.ts @@ -6,7 +6,13 @@ import type { TextSelection } from './middleware/textComposer/types'; import type { TextComposerState } from './middleware/textComposer/types'; import type { Suggestions } from './middleware/textComposer/types'; import type { MessageComposer } from './messageComposer'; -import type { CommandResponse, DraftMessage, LocalMessage, UserResponse } from '../types'; +import type { + CommandResponse, + DraftMessage, + Event, + LocalMessage, + UserResponse, +} from '../types'; export type TextComposerOptions = { composer: MessageComposer; @@ -40,6 +46,7 @@ const initState = ({ command: null, mentionedUsers: [], text, + typing: {}, selection: { start: text.length, end: text.length }, }; } @@ -49,6 +56,7 @@ const initState = ({ typeof item === 'string' ? ({ id: item } as UserResponse) : item, ), text, + typing: {}, selection: { start: text.length, end: text.length }, }; }; @@ -139,6 +147,29 @@ export class TextComposer { return this.state.getLatestValue().text; } + get typing() { + return this.state.getLatestValue().typing; + } + + set typing(typing: Record) { + this.state.partialNext({ typing }); + } + + setTyping = (typing: Record) => { + this.typing = typing; + }; + + setTypingEvent = (userId: string, event: Event) => { + this.typing = { ...this.typing, [userId]: event }; + }; + + removeTypingEvent = (userId: string) => { + if (!this.typing[userId]) return; + const typing = { ...this.typing }; + delete typing[userId]; + this.typing = typing; + }; + get textIsEmpty() { return textIsEmpty(this.text); } diff --git a/src/messageDelivery/MessageReceiptsTracker.ts b/src/messageDelivery/MessageReceiptsTracker.ts index 06860314b7..6a42f06b6c 100644 --- a/src/messageDelivery/MessageReceiptsTracker.ts +++ b/src/messageDelivery/MessageReceiptsTracker.ts @@ -1,4 +1,7 @@ import type { ReadResponse, UserResponse } from '../types'; +import { StateStore } from '../store'; +import type { Channel } from '../channel'; +import { WithSubscriptions } from '../utils/WithSubscriptions'; type UserId = string; type MessageId = string; @@ -11,11 +14,38 @@ export type UserProgress = { lastReadRef: MsgRef; // MIN_REF if none lastDeliveredRef: MsgRef; // MIN_REF if none; always >= readRef }; +export type MessageReceiptsSnapshot = { + revision: number; + readersByMessageId: Record; + deliveredByMessageId: Record; +}; +export type ReadStoreReconcileMeta = { + changedUserIds?: string[]; + removedUserIds?: string[]; +}; +type ReadStoreUserState = { + last_read?: Date | string; + unread_messages?: number; + user?: UserResponse; + first_unread_message_id?: string; + last_read_message_id?: string; + last_delivered_at?: Date | string; + last_delivered_message_id?: string; +}; // ---------- ordering utilities ---------- const MIN_REF: MsgRef = { timestampMs: Number.NEGATIVE_INFINITY, msgId: '' } as const; +const toTimestampMs = (value: Date | string) => value instanceof Date ? value.getTime() : new Date(value).getTime(); + +const isValidReadState = ( + readState: ReadStoreUserState | undefined, +): readState is ReadStoreUserState & { + last_read: Date | string; + user: UserResponse; +} => !!readState?.user && !!readState.last_read; + const compareRefsAsc = (a: MsgRef, b: MsgRef) => a.timestampMs !== b.timestampMs ? a.timestampMs - b.timestampMs : 0; @@ -71,7 +101,8 @@ const removeByOldKey = ( }; export type OwnMessageReceiptsTrackerOptions = { - locateMessage: OwnMessageReceiptsTrackerMessageLocator; + channel: Channel; + locateMessage?: OwnMessageReceiptsTrackerMessageLocator; }; /** @@ -92,9 +123,10 @@ export type OwnMessageReceiptsTrackerOptions = { * * Construction * ------------ - * `new MessageReceiptsTracker({locateMessage})` - * - `locateMessage(timestamp) => MsgRef | null` must resolve a message ref representation - `{ timestamp, msgId }`. - * - If `locateMessage` returns `null`, the event is ignored (message unknown locally). + * `new MessageReceiptsTracker({ channel, locateMessage? })` + * - By default, message references are read through `channel.state.findMessageByTimestamp`. + * - `locateMessage` can override this lookup strategy. + * If a message cannot be resolved locally, the event is ignored. * * Event ingestion * --------------- @@ -131,14 +163,98 @@ export type OwnMessageReceiptsTrackerOptions = { * equal-timestamp plateau (upper-bound insertion), preserving intuitive arrival order. * - This tracker models **others’ progress toward own messages**; */ -export class MessageReceiptsTracker { +export class MessageReceiptsTracker extends WithSubscriptions { private byUser = new Map(); private readSorted: UserProgress[] = []; // asc by lastReadRef private deliveredSorted: UserProgress[] = []; // asc by lastDeliveredRef + private channel: Channel; private locateMessage: OwnMessageReceiptsTrackerMessageLocator; + private pendingReadStoreReconcileMeta?: ReadStoreReconcileMeta; + readonly snapshotStore = new StateStore({ + revision: 0, + readersByMessageId: {}, + deliveredByMessageId: {}, + }); + + constructor({ channel, locateMessage }: OwnMessageReceiptsTrackerOptions) { + super(); + this.channel = channel; + this.locateMessage = locateMessage ?? ((timestampMs: number) => { + const message = this.channel.state.findMessageByTimestamp(timestampMs); + return message ? { timestampMs, msgId: message.id } : null; + }); + } + + public registerSubscriptions = () => { + this.incrementRefCount(); + if (this.hasSubscriptions) return; + + this.addUnsubscribeFunction( + this.channel.state.readStore.subscribe((next, prev) => { + this.reconcileFromReadStore({ + previousReadState: prev?.read, + nextReadState: next.read, + meta: this.pendingReadStoreReconcileMeta, + }); + this.pendingReadStoreReconcileMeta = undefined; + }), + ); + }; - constructor({ locateMessage }: OwnMessageReceiptsTrackerOptions) { - this.locateMessage = locateMessage; + public unregisterSubscriptions = () => { + this.pendingReadStoreReconcileMeta = undefined; + return super.unregisterSubscriptions(); + }; + + public setPendingReadStoreReconcileMeta(meta?: ReadStoreReconcileMeta) { + this.pendingReadStoreReconcileMeta = meta; + } + + reconcileFromReadStore({ + previousReadState, + nextReadState, + meta, + }: { + previousReadState?: Record; + nextReadState: Record; + meta?: ReadStoreReconcileMeta; + }) { + if (!previousReadState) { + this.ingestInitial(this.readStoreStateToResponses(nextReadState)); + return; + } + + // For non-bootstrap updates, we require patch metadata from channel read-store mutations. + if (!meta) return; + + const removedUserIds = new Set(meta?.removedUserIds ?? []); + const changedUserIds = new Set(meta?.changedUserIds ?? []); + + const changedOrRemovedUserIds = new Set([ + ...changedUserIds, + ...removedUserIds, + ]); + + if (!changedOrRemovedUserIds.size) return; + + let hasEffectiveChange = false; + + for (const userId of changedOrRemovedUserIds) { + if (removedUserIds.has(userId) || !nextReadState[userId]) { + hasEffectiveChange = this.removeUserProgress(userId) || hasEffectiveChange; + continue; + } + + const nextUserReadState = nextReadState[userId]; + if (!isValidReadState(nextUserReadState)) continue; + const resolvedProgress = this.readStateToUserProgress(nextUserReadState); + hasEffectiveChange = + this.upsertUserProgress(resolvedProgress) || hasEffectiveChange; + } + + if (hasEffectiveChange) { + this.emitSnapshot(); + } } /** Build initial state from server snapshots (single pass + sort). */ @@ -173,6 +289,8 @@ export class MessageReceiptsTracker { userProgress, ); } + + this.emitSnapshot(); } /** message.delivered — user device confirmed delivery up to and including messageId. */ @@ -207,6 +325,7 @@ export class MessageReceiptsTracker { ); userProgress.lastDeliveredRef = newDelivered; insertByKey(this.deliveredSorted, userProgress, (x) => x.lastDeliveredRef); + this.emitSnapshot(); } /** message.read — user read up to and including messageId. */ @@ -249,6 +368,8 @@ export class MessageReceiptsTracker { userProgress.lastDeliveredRef = userProgress.lastReadRef; insertByKey(this.deliveredSorted, userProgress, (x) => x.lastDeliveredRef); } + + this.emitSnapshot(); } /** notification.mark_unread — user marked messages unread starting at `first_unread_message_id`. @@ -300,6 +421,8 @@ export class MessageReceiptsTracker { userProgress.lastDeliveredRef = userProgress.lastReadRef; insertByKey(this.deliveredSorted, userProgress, (x) => x.lastDeliveredRef); } + + this.emitSnapshot(); } /** All users who READ this message. */ @@ -414,4 +537,140 @@ export class MessageReceiptsTracker { } return up; } + + private removeUserProgress(userId: string) { + const userProgress = this.byUser.get(userId); + if (!userProgress) return false; + + removeByOldKey(this.readSorted, userProgress, userProgress.lastReadRef, (x) => x.lastReadRef); + removeByOldKey( + this.deliveredSorted, + userProgress, + userProgress.lastDeliveredRef, + (x) => x.lastDeliveredRef, + ); + this.byUser.delete(userId); + + return true; + } + + private upsertUserProgress(nextUserProgress: UserProgress) { + const existingUserProgress = this.byUser.get(nextUserProgress.user.id); + if (!existingUserProgress) { + this.byUser.set(nextUserProgress.user.id, nextUserProgress); + insertByKey(this.readSorted, nextUserProgress, (x) => x.lastReadRef); + insertByKey(this.deliveredSorted, nextUserProgress, (x) => x.lastDeliveredRef); + return true; + } + + const hasSameReadRef = + compareRefsAsc(existingUserProgress.lastReadRef, nextUserProgress.lastReadRef) === 0 && + existingUserProgress.lastReadRef.msgId === nextUserProgress.lastReadRef.msgId; + const hasSameDeliveredRef = + compareRefsAsc( + existingUserProgress.lastDeliveredRef, + nextUserProgress.lastDeliveredRef, + ) === 0 && + existingUserProgress.lastDeliveredRef.msgId === + nextUserProgress.lastDeliveredRef.msgId; + const hasSameUser = existingUserProgress.user.id === nextUserProgress.user.id; + + if (hasSameReadRef && hasSameDeliveredRef && hasSameUser) { + return false; + } + + removeByOldKey( + this.readSorted, + existingUserProgress, + existingUserProgress.lastReadRef, + (x) => x.lastReadRef, + ); + removeByOldKey( + this.deliveredSorted, + existingUserProgress, + existingUserProgress.lastDeliveredRef, + (x) => x.lastDeliveredRef, + ); + + existingUserProgress.user = nextUserProgress.user; + existingUserProgress.lastReadRef = nextUserProgress.lastReadRef; + existingUserProgress.lastDeliveredRef = nextUserProgress.lastDeliveredRef; + + insertByKey(this.readSorted, existingUserProgress, (x) => x.lastReadRef); + insertByKey(this.deliveredSorted, existingUserProgress, (x) => x.lastDeliveredRef); + + return true; + } + + private readStateToUserProgress(readState: { + last_read: Date | string; + unread_messages?: number; + user: UserResponse; + first_unread_message_id?: string; + last_read_message_id?: string; + last_delivered_at?: Date | string; + last_delivered_message_id?: string; + }): UserProgress { + const lastReadTimestamp = toTimestampMs(readState.last_read); + const lastDeliveredTimestamp = readState.last_delivered_at + ? toTimestampMs(readState.last_delivered_at) + : null; + const lastReadRef = readState.last_read_message_id + ? { timestampMs: lastReadTimestamp, msgId: readState.last_read_message_id } + : this.locateMessage(lastReadTimestamp) ?? MIN_REF; + let lastDeliveredRef = readState.last_delivered_message_id + ? { + timestampMs: lastDeliveredTimestamp ?? lastReadTimestamp, + msgId: readState.last_delivered_message_id, + } + : lastDeliveredTimestamp + ? (this.locateMessage(lastDeliveredTimestamp) ?? MIN_REF) + : MIN_REF; + + if (compareRefsAsc(lastDeliveredRef, lastReadRef) < 0) { + lastDeliveredRef = lastReadRef; + } + + return { + user: readState.user, + lastReadRef, + lastDeliveredRef, + }; + } + + private readStoreStateToResponses( + readState: Record, + ): ReadResponse[] { + return Object.values(readState).reduce((responses, userReadState) => { + if (!isValidReadState(userReadState)) return responses; + const lastReadDate = new Date(userReadState.last_read); + if (Number.isNaN(lastReadDate.getTime())) return responses; + const lastReadIso = lastReadDate.toISOString(); + + responses.push({ + last_read: lastReadIso, + user: userReadState.user, + last_read_message_id: userReadState.last_read_message_id, + unread_messages: userReadState.unread_messages ?? 0, + last_delivered_at: userReadState.last_delivered_at + ? new Date(userReadState.last_delivered_at).toISOString() + : undefined, + last_delivered_message_id: userReadState.last_delivered_message_id, + }); + + return responses; + }, []); + } + + private emitSnapshot() { + const readersByMessageId = this.groupUsersByLastReadMessage(); + const deliveredByMessageId = this.groupUsersByLastDeliveredMessage(); + const currentSnapshot = this.snapshotStore.getLatestValue(); + + this.snapshotStore.next({ + revision: currentSnapshot.revision + 1, + readersByMessageId, + deliveredByMessageId, + }); + } } diff --git a/test/unit/MessageComposer/textComposer.test.ts b/test/unit/MessageComposer/textComposer.test.ts index cf81aa0f5e..ab6453f80b 100644 --- a/test/unit/MessageComposer/textComposer.test.ts +++ b/test/unit/MessageComposer/textComposer.test.ts @@ -105,8 +105,9 @@ describe('TextComposer', () => { expect(messageComposer.textComposer.state.getLatestValue()).toEqual({ command: null, mentionedUsers: [], - text: '', selection: { start: 0, end: 0 }, + text: '', + typing: {}, }); }); @@ -116,8 +117,9 @@ describe('TextComposer', () => { expect(messageComposer.textComposer.state.getLatestValue()).toEqual({ command: null, mentionedUsers: [], - text: defaultValue, selection: { start: defaultValue.length, end: defaultValue.length }, + text: defaultValue, + typing: {}, }); }); @@ -228,8 +230,9 @@ describe('TextComposer', () => { const initialState = { command: null, mentionedUsers: [], - text: '', selection: { start: 0, end: 0 }, + text: '', + typing: {}, }; const { messageComposer: { textComposer }, diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index 36d4d853be..2bc18bd595 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -32,6 +32,7 @@ describe('Channel count unread', function () { channel = client.channel(channelResponse.channel.type, channelResponse.channel.id); channel.initialized = true; channel.lastRead = () => lastRead; + channel.data.own_capabilities = ['read-events']; const ignoredMessages = [ generateMsg({ date: '2018-01-01T00:00:00', mentioned_users: [user] }), @@ -223,6 +224,7 @@ describe('Channel _handleChannelEvent', function () { client.userID = user.id; client.userMuteStatus = (targetId) => targetId.startsWith('mute'); channel = client.channel('messaging', 'id'); + channel.data.own_capabilities = ['read-events']; channel.initialized = true; }); @@ -277,6 +279,21 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.membership).to.equal(channel.state.members[user.id]); }); + it('increments member_count from zero on member.added and syncs state member_count', () => { + channel.data.member_count = 0; + + channel._handleChannelEvent({ + type: 'member.added', + user, + member: generateMember({ + user: { id: 'new-user' }, + }), + }); + + expect(channel.data.member_count).to.equal(1); + expect(channel.state.member_count).to.equal(1); + }); + it('message.new does not reset the unreadCount for current user messages', function () { channel.state.unreadCount = 100; channel._handleChannelEvent({ @@ -701,6 +718,24 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.read[user.id].last_delivered_message_id).toBe( initialReadState.last_delivered_message_id, ); + expect( + channel.messageReceiptsTracker.getUserProgress(user.id)?.lastReadRef.msgId, + ).toBe(event.last_read_message_id); + }); + + it('should reconcile tracker with metadata patch for notification.mark_unread', () => { + channel.state.read[user.id] = initialReadState; + const reconcileSpy = vi.spyOn( + channel.messageReceiptsTracker, + 'reconcileFromReadStore', + ); + + channel._handleChannelEvent(notificationMarkUnreadEvent); + + expect(reconcileSpy).toHaveBeenCalledTimes(1); + expect(reconcileSpy.mock.calls[0][0].meta).toEqual({ + changedUserIds: [user.id], + }); }); it('should not update channel read state produced for another user or user is missing', () => { @@ -771,12 +806,15 @@ describe('Channel _handleChannelEvent', function () { event.last_read_message_id, ); expect(channel.state.read[user.id].unread_messages).toBe(0); - expect(channel.state.read[user.id].last_delivered_at).toBe( - initialReadState.last_delivered_at, + expect(new Date(channel.state.read[user.id].last_delivered_at).getTime()).toBe( + new Date(messageReadEvent.created_at).getTime(), ); expect(channel.state.read[user.id].last_delivered_message_id).toBe( - initialReadState.last_delivered_message_id, + event.last_read_message_id, ); + expect( + channel.messageReceiptsTracker.getUserProgress(user.id)?.lastReadRef.msgId, + ).toBe(event.last_read_message_id); }); it('should update channel read state produced for another user', () => { @@ -795,11 +833,32 @@ describe('Channel _handleChannelEvent', function () { event.last_read_message_id, ); expect(channel.state.read[anotherUser.id].unread_messages).toBe(0); - expect(channel.state.read[anotherUser.id].last_delivered_at).toBe( - initialReadState.last_delivered_at, + expect(new Date(channel.state.read[anotherUser.id].last_delivered_at).getTime()).toBe( + new Date(messageReadEvent.created_at).getTime(), ); expect(channel.state.read[anotherUser.id].last_delivered_message_id).toBe( - initialReadState.last_delivered_message_id, + event.last_read_message_id, + ); + }); + + it('should emit readStore subscription updates for single-user message.read events', () => { + channel.state.read[user.id] = initialReadState; + const changes = []; + const unsubscribe = channel.state.readStore.subscribe((next, prev) => { + if (!prev) return; + changes.push({ + next: next.read[user.id], + prev: prev.read[user.id], + }); + }); + + channel._handleChannelEvent(messageReadEvent); + unsubscribe(); + + expect(changes).to.have.length(1); + expect(changes[0].next).to.not.equal(changes[0].prev); + expect(new Date(changes[0].next.last_read).getTime()).toBe( + new Date(messageReadEvent.created_at).getTime(), ); }); }); @@ -856,6 +915,29 @@ describe('Channel _handleChannelEvent', function () { ); }); + it('should not move canonical delivered state backwards on out-of-order events', () => { + channel.state.read[user.id] = { + ...initialReadState, + last_delivered_at: new Date(3000).toISOString(), + last_delivered_message_id: 'newer-message-id', + }; + const olderDeliveryEvent = { + ...messageDeliveredEvent, + created_at: new Date(2000).toISOString(), + last_delivered_at: new Date(2000).toISOString(), + last_delivered_message_id: 'older-message-id', + }; + + channel._handleChannelEvent(olderDeliveryEvent); + + expect(new Date(channel.state.read[user.id].last_delivered_at).getTime()).toBe( + new Date(3000).getTime(), + ); + expect(channel.state.read[user.id].last_delivered_message_id).toBe( + 'newer-message-id', + ); + }); + it('should update channel read state produced for another user', () => { const anotherUser = { id: 'another-user' }; channel.state.unreadCount = initialCountUnread; @@ -1202,7 +1284,7 @@ describe('Channel _handleChannelEvent', function () { expect(channel.data.blocked).eq(false); }); - it('should update the frozen flag and reload channel state to update `own_capabilities`', () => { + it('should update the frozen flag and reload channel state when frozen changes', () => { const event = { channel: { frozen: true }, type: 'channel.updated', @@ -1220,6 +1302,18 @@ describe('Channel _handleChannelEvent', function () { // Make sure that we don't wipe out any data }); + it('preserves member_count on channel.updated when event payload omits member_count', () => { + channel.data.member_count = 3; + channel.data.frozen = false; + channel._handleChannelEvent({ + channel: { frozen: false }, + type: 'channel.updated', + }); + + expect(channel.data.member_count).to.equal(3); + expect(channel.state.member_count).to.equal(3); + }); + it(`should make sure that state reload doesn't wipe out existing data`, async () => { const mock = sinon.mock(client); mock.expects('post').returns(Promise.resolve(mockChannelQueryResponse)); @@ -1381,16 +1475,17 @@ describe('Channels - Constructor', function () { const channel = client.channel('messaging', '123', { cool: true }); expect(channel.cid).to.eql('messaging:123'); expect(channel.id).to.eql('123'); - expect(channel.data).to.eql({ cool: true }); + expect(channel.data.cool).to.eql(true); }); it('custom data merges to the right with current data', function () { let channel = client.channel('messaging', 'brand_new_123', { cool: true }); expect(channel.cid).to.eql('messaging:brand_new_123'); expect(channel.id).to.eql('brand_new_123'); - expect(channel.data).to.eql({ cool: true }); + expect(channel.data.cool).to.eql(true); channel = client.channel('messaging', 'brand_new_123', { custom_cool: true }); - expect(channel.data).to.eql({ cool: true, custom_cool: true }); + expect(channel.data.cool).to.eql(true); + expect(channel.data.custom_cool).to.eql(true); }); it('default options', function () { @@ -1407,12 +1502,13 @@ describe('Channels - Constructor', function () { it('undefined ID no options', function () { const channel = client.channel('messaging', undefined); expect(channel.id).to.eql(undefined); - expect(channel.data).to.eql({}); + expect(channel.data.own_capabilities).to.eql([]); + expect(Object.keys(channel.data)).to.eql(['own_capabilities']); }); it('short version with options', function () { const channel = client.channel('messaging', { members: ['tommaso', 'thierry'] }); - expect(channel.data).to.eql({ members: ['tommaso', 'thierry'] }); + expect(channel.data.members).to.eql(['tommaso', 'thierry']); expect(channel.id).to.eql(undefined); }); @@ -1420,7 +1516,7 @@ describe('Channels - Constructor', function () { const channel = client.channel('messaging', null, { members: ['tommaso', 'thierry'], }); - expect(channel.data).to.eql({ members: ['tommaso', 'thierry'] }); + expect(channel.data.members).to.eql(['tommaso', 'thierry']); expect(channel.id).to.eql(undefined); }); @@ -1428,7 +1524,7 @@ describe('Channels - Constructor', function () { const channel = client.channel('messaging', '', { members: ['tommaso', 'thierry'], }); - expect(channel.data).to.eql({ members: ['tommaso', 'thierry'] }); + expect(channel.data.members).to.eql(['tommaso', 'thierry']); expect(channel.id).to.eql(undefined); }); @@ -1436,7 +1532,7 @@ describe('Channels - Constructor', function () { const channel = client.channel('messaging', undefined, { members: ['tommaso', 'thierry'], }); - expect(channel.data).to.eql({ members: ['tommaso', 'thierry'] }); + expect(channel.data.members).to.eql(['tommaso', 'thierry']); expect(channel.id).to.eql(undefined); }); }); @@ -1891,6 +1987,42 @@ describe('Channel _initializeState', () => { expect(Object.keys(channel.state.members)).deep.to.be.equal(['alice']); }); + + it('should merge read state without overwriting existing users', async () => { + const client = await getClientWithUser(); + const channel = client.channel('messaging', uuidv4()); + const existingUser = { id: 'existing-user' }; + const newUser = { id: 'new-user' }; + channel.messageReceiptsTracker.setPendingReadStoreReconcileMeta({ + changedUserIds: [existingUser.id], + }); + channel.state.read = { + [existingUser.id]: { + last_read: new Date('2026-01-01T00:00:00.000Z'), + unread_messages: 1, + user: existingUser, + }, + }; + + channel._initializeState({ + read: [ + { + last_delivered_at: new Date('2026-01-02T00:00:00.000Z').toISOString(), + last_delivered_message_id: 'delivered-message-id', + last_read: new Date('2026-01-02T00:00:00.000Z').toISOString(), + last_read_message_id: 'read-message-id', + unread_messages: 0, + user: newUser, + }, + ], + }); + + expect(channel.state.read[existingUser.id]).toBeDefined(); + expect(channel.state.read[newUser.id]).toBeDefined(); + expect(channel.state.read[newUser.id].last_read_message_id).toBe('read-message-id'); + expect(channel.messageReceiptsTracker.getUserProgress(existingUser.id)).toBeTruthy(); + expect(channel.messageReceiptsTracker.getUserProgress(newUser.id)).toBeTruthy(); + }); }); describe('Channel.query', async () => { diff --git a/test/unit/channel_state.test.js b/test/unit/channel_state.test.js index 8ac7345b85..99748a5135 100644 --- a/test/unit/channel_state.test.js +++ b/test/unit/channel_state.test.js @@ -1654,6 +1654,326 @@ describe('messagePagination', () => { }); }); +describe('ChannelState members store', () => { + it('initializes members store with an empty members map', () => { + const state = new ChannelState(); + + expect(state.members).to.eql({}); + expect(state.member_count).to.equal(0); + expect(state.membersStore.getLatestValue()).to.eql({ members: {}, memberCount: 0 }); + }); + + it('keeps members getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + const members = { + alice: { user: { id: 'alice' }, user_id: 'alice' }, + }; + + state.members = members; + + expect(state.members).to.equal(members); + expect(state.membersStore.getLatestValue()).to.eql({ + memberCount: 0, + members, + }); + }); + + it('keeps member_count getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + + state.member_count = 42; + + expect(state.member_count).to.equal(42); + expect(state.membersStore.getLatestValue()).to.eql({ + memberCount: 42, + members: {}, + }); + }); +}); + +describe('ChannelState member count bridge', () => { + it('initializes membersStore memberCount from channel.data.member_count', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { member_count: 3 }); + const state = channel.state; + + expect(state.member_count).to.equal(3); + expect(state.membersStore.getLatestValue()).to.eql({ + memberCount: 3, + members: {}, + }); + expect(channel.data?.member_count).to.equal(3); + }); + + it('syncs memberCount when channel.data is replaced', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { member_count: 1 }); + const state = channel.state; + + channel.data = { ...channel.data, member_count: 7 }; + state.syncMemberCountFromChannelData(channel.data); + + expect(state.member_count).to.equal(7); + expect(state.membersStore.getLatestValue()).to.eql({ + memberCount: 7, + members: {}, + }); + expect(channel.data?.member_count).to.equal(7); + }); + + it('keeps backward-compatible channel.data.member_count assignments in sync', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', {}); + const state = channel.state; + + channel.data.member_count = 5; + + expect(state.member_count).to.equal(5); + expect(state.membersStore.getLatestValue()).to.eql({ + memberCount: 5, + members: {}, + }); + expect(channel.data.member_count).to.equal(5); + }); +}); + +describe('ChannelState read store', () => { + it('initializes read store with an empty read map', () => { + const state = new ChannelState(); + + expect(state.read).to.eql({}); + expect(state.readStore.getLatestValue()).to.eql({ read: {} }); + }); + + it('keeps read getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + const read = { + alice: { + last_read: new Date('2026-02-28T00:00:00.000Z'), + unread_messages: 3, + user: { id: 'alice' }, + }, + }; + + state.read = read; + + expect(state.read).to.equal(read); + expect(state.readStore.getLatestValue()).to.eql({ read }); + }); +}); + +describe('ChannelState watcher count store', () => { + it('initializes watcher count store with zero', () => { + const state = new ChannelState(); + + expect(state.watcher_count).to.equal(0); + expect(state.watcherStore.getLatestValue()).to.eql({ + watcherCount: 0, + watchers: {}, + }); + }); + + it('keeps watcher_count getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + + state.watcher_count = 42; + + expect(state.watcher_count).to.equal(42); + expect(state.watcherStore.getLatestValue()).to.eql({ + watcherCount: 42, + watchers: {}, + }); + }); +}); + +describe('ChannelState watchers store', () => { + it('initializes watchers store with an empty watchers map', () => { + const state = new ChannelState(); + + expect(state.watchers).to.eql({}); + expect(state.watcherStore.getLatestValue()).to.eql({ + watcherCount: 0, + watchers: {}, + }); + }); + + it('keeps watchers getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + const watchers = { + alice: { id: 'alice' }, + }; + + state.watchers = watchers; + + expect(state.watchers).to.equal(watchers); + expect(state.watcherStore.getLatestValue()).to.eql({ + watcherCount: 0, + watchers, + }); + }); +}); + +describe('ChannelState muted users store', () => { + it('initializes muted users store with an empty list', () => { + const state = new ChannelState(); + + expect(state.mutedUsers).to.eql([]); + expect(state.mutedUsersStore.getLatestValue()).to.eql({ mutedUsers: [] }); + }); + + it('keeps mutedUsers getter/setter backward compatible while syncing the store', () => { + const state = new ChannelState(); + const mutedUsers = [{ id: 'alice' }]; + + state.mutedUsers = mutedUsers; + + expect(state.mutedUsers).to.equal(mutedUsers); + expect(state.mutedUsersStore.getLatestValue()).to.eql({ mutedUsers }); + }); +}); + +describe('ChannelState typing store', () => { + it('initializes typing store with an empty typing map', () => { + const state = new ChannelState(); + + expect(state.typing).to.eql({}); + expect(state.typingStore.getLatestValue()).to.eql({ typing: {} }); + }); + + it('keeps typing store and textComposer typing in sync via setTypingEvent/removeTypingEvent', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', {}); + const state = channel.state; + const typingStartEvent = { + type: 'typing.start', + user: { id: 'alice' }, + }; + + state.setTypingEvent('alice', typingStartEvent); + + expect(state.typing).to.have.property('alice'); + expect(state.typingStore.getLatestValue().typing).to.have.property('alice'); + expect(channel.messageComposer.textComposer.typing).to.have.property('alice'); + + state.removeTypingEvent('alice'); + + expect(state.typing).to.not.have.property('alice'); + expect(state.typingStore.getLatestValue().typing).to.not.have.property('alice'); + expect(channel.messageComposer.textComposer.typing).to.not.have.property('alice'); + }); +}); + +describe('ChannelState own capabilities store', () => { + it('does not redefine channel.data as an accessor property', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { + own_capabilities: ['send-message'], + }); + const descriptor = Object.getOwnPropertyDescriptor(channel, 'data'); + + expect(descriptor).toBeDefined(); + expect('value' in descriptor).toBe(true); + expect('get' in descriptor).toBe(false); + expect('set' in descriptor).toBe(false); + }); + + it('initializes ownCapabilitiesStore from channel.data.own_capabilities', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { + own_capabilities: ['send-message', 'upload-file'], + }); + const state = channel.state; + + expect(state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['send-message', 'upload-file'], + }); + expect(channel.data?.own_capabilities).to.eql(['send-message', 'upload-file']); + }); + + it('syncs ownCapabilitiesStore when channel.data is replaced', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { + own_capabilities: ['send-message'], + }); + const state = channel.state; + + channel.data = { + ...channel.data, + own_capabilities: ['pin-message'], + }; + state.syncOwnCapabilitiesFromChannelData(channel.data); + + expect(state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['pin-message'], + }); + expect(channel.data?.own_capabilities).to.eql(['pin-message']); + }); + + it('keeps backward-compatible channel.data.own_capabilities assignments in sync', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', {}); + const state = channel.state; + + channel.data.own_capabilities = ['delete-message']; + + expect(state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['delete-message'], + }); + expect(channel.data.own_capabilities).to.eql(['delete-message']); + }); + + it('only wraps own_capabilities and keeps other channel.data fields as value properties', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { + hidden: false, + member_count: 3, + own_capabilities: ['send-message'], + }); + + const ownCapabilitiesDescriptor = Object.getOwnPropertyDescriptor( + channel.data, + 'own_capabilities', + ); + const hiddenDescriptor = Object.getOwnPropertyDescriptor(channel.data, 'hidden'); + const memberCountDescriptor = Object.getOwnPropertyDescriptor( + channel.data, + 'member_count', + ); + + expect(ownCapabilitiesDescriptor).toBeDefined(); + expect('get' in ownCapabilitiesDescriptor).toBe(true); + expect('set' in ownCapabilitiesDescriptor).toBe(true); + expect(hiddenDescriptor).toBeDefined(); + expect('value' in hiddenDescriptor).toBe(true); + expect('get' in hiddenDescriptor).toBe(false); + expect('set' in hiddenDescriptor).toBe(false); + expect(memberCountDescriptor).toBeDefined(); + expect('get' in memberCountDescriptor).toBe(true); + expect('set' in memberCountDescriptor).toBe(true); + }); + + it('does not overwrite non-capability fields when own_capabilities is updated', () => { + const client = new StreamChat(); + const channel = new Channel(client, 'type', 'id', { + hidden: false, + member_count: 3, + own_capabilities: ['send-message'], + }); + const state = channel.state; + + channel.data.hidden = true; + channel.data.member_count = 5; + channel.data.own_capabilities = ['pin-message']; + + expect(channel.data.hidden).to.equal(true); + expect(channel.data.member_count).to.equal(5); + expect(state.member_count).to.equal(5); + expect(state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['pin-message'], + }); + }); +}); + describe('loadMessageIntoState', () => { let state; diff --git a/test/unit/client.test.js b/test/unit/client.test.js index 35e26adea4..f97ebec14a 100644 --- a/test/unit/client.test.js +++ b/test/unit/client.test.js @@ -131,6 +131,48 @@ describe('StreamChat getInstance', () => { }); }); +describe('StreamChat config(s) store', () => { + it('initializes configsStore and keeps configs access backward compatible', () => { + const client = new StreamChat('key', 'secret'); + + expect(client.configs).to.eql({}); + expect(client.configsStore.getLatestValue()).to.eql({ configs: {} }); + + const nextConfigs = { 'messaging:next': { typing_events: true } }; + client.configs = nextConfigs; + + expect(client.configs).to.equal(nextConfigs); + expect(client.configsStore.getLatestValue()).to.eql({ configs: nextConfigs }); + }); + + it('updates configsStore through _addChannelConfig when cache is enabled', () => { + const client = new StreamChat('key', 'secret'); + + client._addChannelConfig({ + cid: 'messaging:channel-1', + config: { replies: true }, + }); + + expect(client.configsStore.getLatestValue()).to.eql({ + configs: { + 'messaging:channel-1': { replies: true }, + }, + }); + }); + + it('does not update configsStore through _addChannelConfig when cache is disabled', () => { + const client = new StreamChat('key', 'secret'); + client._cacheEnabled = () => false; + + client._addChannelConfig({ + cid: 'messaging:channel-1', + config: { replies: true }, + }); + + expect(client.configsStore.getLatestValue()).to.eql({ configs: {} }); + }); +}); + describe('Client userMuteStatus', function () { const client = new StreamChat('', ''); const user = { id: 'user' }; @@ -724,6 +766,44 @@ describe('StreamChat.queryChannels', async () => { postStub.restore(); }); + it('should sync channel data-backed stores when hydrating channels from queryChannels', async () => { + const client = await getClientWithUser(); + const mockedChannelsQueryResponse = [ + { + ...mockChannelQueryResponse, + channel: { + ...mockChannelQueryResponse.channel, + member_count: 7, + own_capabilities: ['send-message', 'read-events'], + }, + messages: Array.from( + { length: DEFAULT_QUERY_CHANNEL_MESSAGE_LIST_PAGE_SIZE }, + generateMsg, + ), + }, + ]; + const postStub = sinon + .stub(client, 'post') + .returns(Promise.resolve({ channels: mockedChannelsQueryResponse })); + + const [channel] = await client.queryChannels(); + + expect(channel.state.member_count).to.equal(7); + expect(channel.state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['send-message', 'read-events'], + }); + + channel.data.member_count = 8; + channel.data.own_capabilities = ['send-message']; + + expect(channel.state.member_count).to.equal(8); + expect(channel.state.ownCapabilitiesStore.getLatestValue()).to.eql({ + ownCapabilities: ['send-message'], + }); + + postStub.restore(); + }); + it('should return the raw channels response from queryChannelsRequest', async () => { const client = await getClientWithUser(); const mockedChannelsQueryResponse = Array.from({ length: 10 }, () => ({ diff --git a/test/unit/messageDelivery/MessageReceiptsTracker.test.ts b/test/unit/messageDelivery/MessageReceiptsTracker.test.ts index 380aad2d21..a0f3348cc2 100644 --- a/test/unit/messageDelivery/MessageReceiptsTracker.test.ts +++ b/test/unit/messageDelivery/MessageReceiptsTracker.test.ts @@ -5,6 +5,8 @@ import { ReadResponse, UserResponse, } from '../../../src'; +import { StateStore } from '../../../src/store'; +import type { Channel } from '../../../src/channel'; const ownUserId = 'author'; const U = (id: string): UserResponse => ({ id, name: id }); // matches UserResponse shape for the service @@ -20,11 +22,30 @@ const msgs = [ const byTs = new Map(msgs.map((m) => [m.ts, m])); const ref = (ts: number): MsgRef => ({ timestampMs: ts, msgId: byTs.get(ts)!.id }); -// Message locator used by the service (O(1) lookup by exact timestamp) -const makeLocator = () => (timestampMs?: number) => { - if (!timestampMs) return null; +const defaultFindMessageByTimestamp = (timestampMs?: number) => { + if (!timestampMs) return undefined; const m = byTs.get(timestampMs); - return m ? { timestampMs: m.ts, msgId: m.id } : null; + return m ? { id: m.id } : undefined; +}; + +const createChannelMock = ({ + findMessageByTimestamp = defaultFindMessageByTimestamp, +}: { + findMessageByTimestamp?: (timestampMs?: number) => { id: string } | undefined; +} = {}) => { + const readStore = new StateStore({ + read: {}, + }); + + return { + channel: { + state: { + findMessageByTimestamp, + readStore, + }, + } as unknown as Channel, + readStore, + }; }; // ISO builders (service parses Date strings) @@ -37,9 +58,34 @@ const ids = (users: any[]) => users.map((u) => u.id); describe('MessageDeliveryReadTracker', () => { let tracker: MessageReceiptsTracker; + let channelMock: ReturnType; beforeEach(() => { - tracker = new MessageReceiptsTracker({ locateMessage: makeLocator() }); + channelMock = createChannelMock(); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); + }); + + describe('constructor', () => { + it('allows locateMessage constructor override while requiring channel', () => { + const customLocateMessage = vi.fn((timestampMs: number) => ({ + timestampMs, + msgId: 'custom', + })); + const trackerWithCustomLocator = new MessageReceiptsTracker({ + channel: channelMock.channel, + locateMessage: customLocateMessage, + }); + + trackerWithCustomLocator.onMessageRead({ + user: U('compat-user'), + readAt: iso(2000), + }); + + expect(customLocateMessage).toHaveBeenCalledWith(2000); + expect( + trackerWithCustomLocator.getUserProgress('compat-user')?.lastReadRef.msgId, + ).toBe('custom'); + }); }); describe('ingestInitial', () => { @@ -126,10 +172,12 @@ describe('MessageDeliveryReadTracker', () => { }); it('ignores read events with unknown timestamps (locator returns null)', () => { - // re-init with a locator that knows only m1..m3 (m4 is unknown) - const locator = (ts?: number) => - ts && ts <= 3000 ? { timestampMs: ts, msgId: byTs.get(ts)!.id } : null; - tracker = new MessageReceiptsTracker({ locateMessage: locator }); + // re-init with channel state that knows only m1..m3 (m4 is unknown) + channelMock = createChannelMock({ + findMessageByTimestamp: (ts?: number) => + ts && ts <= 3000 ? { id: byTs.get(ts)!.id } : undefined, + }); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); const dave = U('dave'); tracker.onMessageRead({ user: dave, readAt: iso(4000) }); // unknown -> ignored @@ -143,11 +191,12 @@ describe('MessageDeliveryReadTracker', () => { }); it('prevents search for message if last read message id is provided', () => { - const locator = vi.fn().mockImplementation(() => {}); - tracker = new MessageReceiptsTracker({ locateMessage: locator }); + const findMessageByTimestamp = vi.fn().mockImplementation(() => {}); + channelMock = createChannelMock({ findMessageByTimestamp }); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); const user = U('frank'); tracker.onMessageRead({ user, readAt: iso(3000), lastReadMessageId: 'X' }); // unknown -> ignored - expect(locator).not.toHaveBeenCalled(); + expect(findMessageByTimestamp).not.toHaveBeenCalled(); expect(tracker.getUserProgress('frank')).toStrictEqual({ lastDeliveredRef: { msgId: 'X', @@ -201,9 +250,11 @@ describe('MessageDeliveryReadTracker', () => { }); it('ignores delivered events with unknown timestamps (locator returns null)', () => { - const locator = (t?: number) => - t && t <= 2000 ? { timestampMs: t, msgId: byTs.get(t)!.id } : null; - tracker = new MessageReceiptsTracker({ locateMessage: locator }); + channelMock = createChannelMock({ + findMessageByTimestamp: (t?: number) => + t && t <= 2000 ? { id: byTs.get(t)!.id } : undefined, + }); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); const frank = U('frank'); tracker.onMessageDelivered({ user: frank, deliveredAt: iso(3000) }); // unknown -> ignored @@ -215,15 +266,16 @@ describe('MessageDeliveryReadTracker', () => { }); it('prevents search for message if last read message id is provided', () => { - const locator = vi.fn().mockImplementation(() => {}); - tracker = new MessageReceiptsTracker({ locateMessage: locator }); + const findMessageByTimestamp = vi.fn().mockImplementation(() => {}); + channelMock = createChannelMock({ findMessageByTimestamp }); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); const user = U('frank'); tracker.onMessageDelivered({ user, deliveredAt: iso(3000), lastDeliveredMessageId: 'X', }); // unknown -> ignored - expect(locator).not.toHaveBeenCalled(); + expect(findMessageByTimestamp).not.toHaveBeenCalled(); expect(tracker.getUserProgress('frank')).toStrictEqual({ lastDeliveredRef: { msgId: 'X', @@ -311,8 +363,9 @@ describe('MessageDeliveryReadTracker', () => { }); it('does not call locateMessage when lastReadMessageId is provided', () => { - const locator = vi.fn().mockImplementation(makeLocator()); - tracker = new MessageReceiptsTracker({ locateMessage: locator }); + const findMessageByTimestamp = vi.fn().mockImplementation(defaultFindMessageByTimestamp); + channelMock = createChannelMock({ findMessageByTimestamp }); + tracker = new MessageReceiptsTracker({ channel: channelMock.channel }); tracker.onNotificationMarkUnread({ user, @@ -325,7 +378,42 @@ describe('MessageDeliveryReadTracker', () => { expect(userProgress.lastReadRef).toEqual(ref(2000)); // ensure locator wasn’t used to derive the read ref - expect(locator).not.toHaveBeenCalled(); + expect(findMessageByTimestamp).not.toHaveBeenCalled(); + }); + }); + + describe('subscriptions', () => { + it('reconciles from readStore emissions when subscribed and stops after unsubscribe', () => { + const user = U('subscribed-user'); + tracker.registerSubscriptions(); + tracker.setPendingReadStoreReconcileMeta({ changedUserIds: [user.id] }); + + channelMock.readStore.next({ + read: { + [user.id]: { + last_read: new Date(2000), + user, + unread_messages: 0, + last_read_message_id: 'm2', + }, + }, + }); + expect(tracker.getUserProgress(user.id)?.lastReadRef).toEqual(ref(2000)); + + tracker.unregisterSubscriptions(); + channelMock.readStore.next({ + read: { + [user.id]: { + last_read: new Date(3000), + user, + unread_messages: 0, + last_read_message_id: 'm3', + }, + }, + }); + + // no longer subscribed -> unchanged + expect(tracker.getUserProgress(user.id)?.lastReadRef).toEqual(ref(2000)); }); }); @@ -461,4 +549,203 @@ describe('MessageDeliveryReadTracker', () => { expect(ids(tracker.readersForMessage(ref(4000)))).toEqual(['x']); }); }); + + describe('snapshotStore', () => { + it('updates revision on every ingestInitial call', () => { + const snapshot = [ + { user: U('alice'), last_read: iso(2000), last_delivered_at: iso(2000) }, + ]; + + tracker.ingestInitial(snapshot); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + expect(tracker.snapshotStore.getLatestValue().readersByMessageId).toEqual({ + m2: [U('alice')], + }); + + // same state still emits for full ingest calls + tracker.ingestInitial(snapshot); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(2); + + // changed state -> new revision + tracker.ingestInitial([ + { user: U('alice'), last_read: iso(3000), last_delivered_at: iso(3000) }, + ]); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(3); + }); + + it('updates revision for effective message.read changes only', () => { + const user = U('reader'); + + tracker.onMessageRead({ user, readAt: iso(2000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + // same/older read should be a no-op + tracker.onMessageRead({ user, readAt: iso(2000) }); + tracker.onMessageRead({ user, readAt: iso(1000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + tracker.onMessageRead({ user, readAt: iso(3000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(2); + }); + + it('updates revision for effective message.delivered changes only', () => { + const user = U('delivered-user'); + + tracker.onMessageDelivered({ user, deliveredAt: iso(2000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + // same/older delivery should be a no-op + tracker.onMessageDelivered({ user, deliveredAt: iso(2000) }); + tracker.onMessageDelivered({ user, deliveredAt: iso(1000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + tracker.onMessageDelivered({ user, deliveredAt: iso(3000) }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(2); + }); + + it('updates revision for effective notification.mark_unread changes only', () => { + const user = U('mark-unread-user'); + + tracker.onMessageRead({ user, readAt: iso(3000), lastReadMessageId: 'm3' }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + tracker.onNotificationMarkUnread({ + user, + lastReadAt: iso(2000), + lastReadMessageId: 'm2', + }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(2); + + // same boundary -> no-op + tracker.onNotificationMarkUnread({ + user, + lastReadAt: iso(2000), + lastReadMessageId: 'm2', + }); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(2); + }); + }); + + describe('reconcileFromReadStore', () => { + it('reconciles changed/removed users from metadata deltas', () => { + const alice = U('alice'); + const bob = U('bob'); + const carol = U('carol'); + const previousReadState = { + [alice.id]: { + last_read: new Date(2000), + unread_messages: 0, + user: alice, + last_read_message_id: 'm2', + }, + [bob.id]: { + last_read: new Date(3000), + unread_messages: 0, + user: bob, + last_read_message_id: 'm3', + last_delivered_at: new Date(3000), + last_delivered_message_id: 'm3', + }, + }; + const nextReadState = { + [bob.id]: { + last_read: new Date(4000), + unread_messages: 0, + user: bob, + last_read_message_id: 'm4', + last_delivered_at: new Date(4000), + last_delivered_message_id: 'm4', + }, + [carol.id]: { + last_read: new Date(2000), + unread_messages: 0, + user: carol, + last_read_message_id: 'm2', + last_delivered_at: new Date(2000), + last_delivered_message_id: 'm2', + }, + }; + + tracker.ingestInitial([ + { user: alice, last_read: iso(2000), last_delivered_at: iso(2000) }, + { user: bob, last_read: iso(3000), last_delivered_at: iso(3000) }, + ]); + + tracker.reconcileFromReadStore({ + previousReadState, + nextReadState, + meta: { + changedUserIds: [bob.id, carol.id], + removedUserIds: [alice.id], + }, + }); + + expect(tracker.getUserProgress(alice.id)).toBeNull(); + expect(tracker.getUserProgress(bob.id)?.lastReadRef).toEqual(ref(4000)); + expect(tracker.getUserProgress(carol.id)?.lastReadRef).toEqual(ref(2000)); + }); + + it('ignores non-bootstrap reconcile when metadata is absent', () => { + const user = U('missing-meta-user'); + + tracker.reconcileFromReadStore({ + previousReadState: {}, + nextReadState: { + [user.id]: { + last_read: new Date(3000), + unread_messages: 0, + user, + last_read_message_id: 'm3', + last_delivered_at: new Date(3000), + last_delivered_message_id: 'm3', + }, + }, + }); + + expect(tracker.getUserProgress(user.id)).toBeNull(); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(0); + }); + + it('applies only metadata-declared user deltas', () => { + const user = U('meta-user'); + tracker.ingestInitial([ + { + user, + last_read: iso(2000), + last_delivered_at: iso(2000), + last_read_message_id: 'm2', + last_delivered_message_id: 'm2', + }, + ]); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + + tracker.reconcileFromReadStore({ + previousReadState: { + [user.id]: { + last_read: new Date(2000), + unread_messages: 0, + user, + last_read_message_id: 'm2', + last_delivered_at: new Date(2000), + last_delivered_message_id: 'm2', + }, + }, + nextReadState: { + [user.id]: { + last_read: new Date(4000), + unread_messages: 0, + user, + last_read_message_id: 'm4', + last_delivered_at: new Date(4000), + last_delivered_message_id: 'm4', + }, + }, + meta: { changedUserIds: [] }, + }); + + // Metadata drives reconciliation; undeclared users are ignored. + expect(tracker.getUserProgress(user.id)?.lastReadRef).toEqual(ref(2000)); + expect(tracker.snapshotStore.getLatestValue().revision).toBe(1); + }); + }); }); From 892f6e6e17b115192566c51b0d6e2194b98338e6 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 3 Mar 2026 17:19:34 +0100 Subject: [PATCH 15/31] test: fix message composer tests --- .../MessageOperationStatePolicy.ts | 12 +- .../MessageComposer/messageComposer.test.ts | 215 ++++++++++++++++-- .../unit/MessageComposer/textComposer.test.ts | 1 + 3 files changed, 198 insertions(+), 30 deletions(-) diff --git a/src/messageOperations/MessageOperationStatePolicy.ts b/src/messageOperations/MessageOperationStatePolicy.ts index 82cc253356..329690d835 100644 --- a/src/messageOperations/MessageOperationStatePolicy.ts +++ b/src/messageOperations/MessageOperationStatePolicy.ts @@ -52,11 +52,13 @@ export class MessageOperationStatePolicy { const formatted = formatMessage({ ...messageFromResponse, status: 'received' }); const existing = this.ctx.get(messageId); - if ( - !existing || - existing.updated_at.getTime() < formatted.updated_at.getTime() || - existing.status === 'sending' - ) { + const serverNewer = + !existing || formatted.updated_at.getTime() > existing.updated_at.getTime(); + const serverSameOrNewer = + !existing || formatted.updated_at.getTime() >= existing.updated_at.getTime(); + const existingIsOurOptimisticSend = existing?.status === 'sending'; + + if (serverNewer || (existingIsOurOptimisticSend && serverSameOrNewer)) { this.ctx.ingest(formatted); } } diff --git a/test/unit/MessageComposer/messageComposer.test.ts b/test/unit/MessageComposer/messageComposer.test.ts index c72eab5d26..b2dfe626cb 100644 --- a/test/unit/MessageComposer/messageComposer.test.ts +++ b/test/unit/MessageComposer/messageComposer.test.ts @@ -976,6 +976,7 @@ describe('MessageComposer', () => { expect(result).toEqual({ localMessage: { attachments: [], + cid: 'messaging:test-channel-id', created_at: expect.any(Date), deleted_at: null, error: null, @@ -1043,6 +1044,7 @@ describe('MessageComposer', () => { expect(result).toEqual({ localMessage: { attachments: [{ type: 'file' }], + cid: 'messaging:test-channel-id', created_at: date, deleted_at: null, error: null, @@ -1120,25 +1122,175 @@ describe('MessageComposer', () => { }); describe('sendMessage', () => { - it.fails('performs optimistic update before sending the message'); - it.fails( - 'updates the message in state after successful response if message has not arrived over WS', - ); - it.fails( - 'does not update the message in state after successful response if message has arrived over WS and the update timestamp is <= existing message timestamp', - ); - it.fails( - 'does not update the message in state if it already exists on the server and in the local state as not delivered', - ); - it.fails( - 'does not update the message in state if it already exists on the server and in the local state as not failed', - ); - it.fails( - 'updates the message in state if it already exists on the server and in the local state with status sending', - ); - it.fails( - 'updates the message in state if it does not exist on the server and the send request failed', - ); + it('performs optimistic update before sending the message', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + expect(composed).toBeDefined(); + let resolveSend: (v: { message: MessageResponse }) => void = () => {}; + const sendPromise = mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: () => + new Promise((resolve) => { + resolveSend = resolve; + }), + }); + await Promise.resolve(); + const optimistic = mockChannel.messagePaginator.getItem( + composed!.localMessage.id, + ); + expect(optimistic?.status).toBe('sending'); + resolveSend({ message: generateMsg({ id: composed!.localMessage.id }) }); + await sendPromise; + }); + + it('updates the message in state after successful response if message has not arrived over WS', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const serverMessage = generateMsg({ + id: composed!.localMessage.id, + updated_at: new Date( + composed!.localMessage.updated_at.getTime() + 100, + ).toISOString(), + }); + await mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => ({ message: serverMessage }), + }); + const after = mockChannel.messagePaginator.getItem(composed!.localMessage.id); + expect(after?.status).toBe('received'); + }); + + it('does not update the message in state after successful response if message has arrived over WS and the update timestamp is <= existing message timestamp', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const messageId = composed!.localMessage.id; + const composedUpdatedAt = composed!.localMessage.updated_at.getTime(); + const olderServerTime = new Date(composedUpdatedAt - 5000); + const serverMessage = generateMsg({ + id: messageId, + updated_at: olderServerTime.toISOString(), + }); + await mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => ({ message: serverMessage }), + }); + const after = mockChannel.messagePaginator.getItem(messageId); + expect(after?.status).toBe('sending'); + expect(after?.updated_at.getTime()).toBeGreaterThanOrEqual( + composedUpdatedAt - 100, + ); + }); + + it('does not update the message in state if it already exists on the server and in the local state as not delivered', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const messageId = composed!.localMessage.id; + const composedUpdatedAt = composed!.localMessage.updated_at.getTime(); + const olderServerTime = new Date(composedUpdatedAt - 2000); + await mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => ({ + message: generateMsg({ + id: messageId, + updated_at: olderServerTime.toISOString(), + }), + }), + }); + const after = mockChannel.messagePaginator.getItem(messageId); + expect(after?.status).toBe('sending'); + expect(after?.updated_at.getTime()).toBeGreaterThanOrEqual( + composedUpdatedAt - 100, + ); + }); + + it('does not update the message in state if it already exists on the server and in the local state as not failed', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const messageId = composed!.localMessage.id; + const composedUpdatedAt = composed!.localMessage.updated_at.getTime(); + const olderServerTime = new Date(composedUpdatedAt - 1000); + await mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => ({ + message: generateMsg({ + id: messageId, + updated_at: olderServerTime.toISOString(), + }), + }), + }); + const after = mockChannel.messagePaginator.getItem(messageId); + expect(after?.status).toBe('sending'); + expect(after?.updated_at.getTime()).toBe(composedUpdatedAt); + }); + + it('updates the message in state if it already exists on the server and in the local state with status sending', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const messageId = composed!.localMessage.id; + const existingSending = { + ...composed!.localMessage, + status: 'sending' as const, + updated_at: new Date(Date.now() - 5000), + }; + mockChannel.messagePaginator.ingestItem(existingSending); + const serverUpdatedAt = new Date( + composed!.localMessage.updated_at.getTime() + 100, + ); + await mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => ({ + message: generateMsg({ + id: messageId, + updated_at: serverUpdatedAt.toISOString(), + }), + }), + }); + const after = mockChannel.messagePaginator.getItem(messageId); + expect(after?.status).toBe('received'); + expect(after?.updated_at.getTime()).toBe(serverUpdatedAt.getTime()); + }); + + it('updates the message in state if it does not exist on the server and the send request failed', async () => { + const { messageComposer, mockChannel } = setup(); + messageComposer.textComposer.setText('Hello'); + const composed = await messageComposer.compose(); + const messageId = composed!.localMessage.id; + const apiError = Object.assign(new Error('Network error'), { + code: 16, + response: { statusCode: 500 }, + }); + await expect( + mockChannel.sendMessageWithLocalUpdate({ + localMessage: composed!.localMessage, + message: composed!.message, + options: composed!.sendOptions, + sendMessageRequestFn: async () => { + throw apiError; + }, + }), + ).rejects.toThrow('Network error'); + const after = mockChannel.messagePaginator.getItem(messageId); + expect(after?.status).toBe('failed'); + expect(after?.error).toBeDefined(); + }); }); it('should compose draft', async () => { @@ -1818,16 +1970,29 @@ describe('MessageComposer', () => { }); describe('subscribeMessageComposerSetupStateChange', () => { - it('should apply modifications when setup state changes', () => { + it('calls setupFunction with { composer } when client setMessageComposerSetupFunction is invoked', () => { const { messageComposer, mockClient } = setup(); - const mockModifications = vi.fn(); + const setupFn = vi.fn(); messageComposer.registerSubscriptions(); - mockClient._messageComposerSetupState.next({ - setupFunction: mockModifications, - }); + mockClient.setMessageComposerSetupFunction(setupFn); + + expect(setupFn).toHaveBeenCalledWith({ composer: messageComposer }); + }); + + it('invokes previous tearDown before applying new setup when setup function changes', () => { + const { messageComposer, mockClient } = setup(); + const tearDown1 = vi.fn(); + const setup1 = vi.fn().mockReturnValue(tearDown1); + const setup2 = vi.fn(); + + messageComposer.registerSubscriptions(); + mockClient.setMessageComposerSetupFunction(setup1); + expect(tearDown1).not.toHaveBeenCalled(); - expect(mockModifications).toHaveBeenCalledWith({ composer: messageComposer }); + mockClient.setMessageComposerSetupFunction(setup2); + expect(tearDown1).toHaveBeenCalledOnce(); + expect(setup2).toHaveBeenCalledWith({ composer: messageComposer }); }); }); diff --git a/test/unit/MessageComposer/textComposer.test.ts b/test/unit/MessageComposer/textComposer.test.ts index ab6453f80b..88dd5ad007 100644 --- a/test/unit/MessageComposer/textComposer.test.ts +++ b/test/unit/MessageComposer/textComposer.test.ts @@ -40,6 +40,7 @@ vi.mock('../../../src/utils', () => ({ isLocalMessage: vi.fn().mockReturnValue(true), formatMessage: vi.fn().mockImplementation((msg) => msg), throttle: vi.fn().mockImplementation((fn) => fn), + normalizeQuerySort: vi.fn().mockReturnValue([{ field: 'created_at', direction: -1 }]), })); const setup = ({ From 3a7c93894491349af8b5e6231d4863de7bf75a33 Mon Sep 17 00:00:00 2001 From: martincupela Date: Tue, 3 Mar 2026 20:54:06 +0100 Subject: [PATCH 16/31] feat: add message send retry cache --- src/messageOperations/MessageOperations.ts | 122 +++++++++++++-- .../MessageOperations.test.ts | 140 +++++++++++++++++- 2 files changed, 245 insertions(+), 17 deletions(-) diff --git a/src/messageOperations/MessageOperations.ts b/src/messageOperations/MessageOperations.ts index b2833cb2b9..e5fee1cc01 100644 --- a/src/messageOperations/MessageOperations.ts +++ b/src/messageOperations/MessageOperations.ts @@ -9,9 +9,19 @@ import type { OperationRequestFn, } from './types'; +const FAILED_SEND_CACHE_MAX_SIZE = 100; +const FAILED_SEND_CACHE_TTL_MS = 5 * 60 * 1000; + +type FailedSendCacheEntry = { + message: Message; + options?: OperationParams<'send'>['options']; + cachedAt: number; +}; + export class MessageOperations { private ctx: MessageOperationsContext; private policy: MessageOperationStatePolicy; + private failedSendCache = new Map(); constructor(ctx: MessageOperationsContext) { this.ctx = ctx; @@ -24,6 +34,56 @@ export class MessageOperations { : message; } + private pruneExpiredFailedSendCache() { + const now = Date.now(); + + for (const [messageId, entry] of this.failedSendCache) { + if (now - entry.cachedAt > FAILED_SEND_CACHE_TTL_MS) { + this.clearCachedFailedSend(messageId); + } + } + } + + private cacheFailedSend(params: { + messageId: string; + message: Message; + options?: OperationParams<'send'>['options']; + }) { + this.pruneExpiredFailedSendCache(); + + if ( + !this.failedSendCache.has(params.messageId) && + this.failedSendCache.size >= FAILED_SEND_CACHE_MAX_SIZE + ) { + const oldestMessageId = this.failedSendCache.keys().next().value; + if (oldestMessageId) { + this.clearCachedFailedSend(oldestMessageId); + } + } + + this.failedSendCache.set(params.messageId, { + cachedAt: Date.now(), + message: params.message, + options: params.options, + }); + } + + private getCachedFailedSend(messageId: string) { + const cached = this.failedSendCache.get(messageId); + if (!cached) return; + + if (Date.now() - cached.cachedAt > FAILED_SEND_CACHE_TTL_MS) { + this.clearCachedFailedSend(messageId); + return; + } + + return cached; + } + + private clearCachedFailedSend(messageId: string) { + this.failedSendCache.delete(messageId); + } + private async run( params: OperationParams, doRequest: OperationRequestFn, @@ -50,13 +110,24 @@ export class MessageOperations { params.message ?? localMessageToNewMessagePayload(params.localMessage), ); - return await this.run<'send'>( - { ...params, message: messageToSend }, - requestFn ?? - handlers.send ?? - (async (p) => - await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), - ); + try { + await this.run<'send'>( + { ...params, message: messageToSend }, + requestFn ?? + handlers.send ?? + (async (p) => + await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), + ); + + this.clearCachedFailedSend(params.localMessage.id); + } catch (error) { + this.cacheFailedSend({ + messageId: params.localMessage.id, + message: messageToSend, + options: params.options, + }); + throw error; + } } async retry( @@ -64,23 +135,42 @@ export class MessageOperations { requestFn?: OperationRequestFn<'retry'>, ): Promise { const handlers = this.ctx.handlers(); + const cachedPayload = this.getCachedFailedSend(params.localMessage.id); const messageToSend = this.normalizeMessage( - params.message ?? localMessageToNewMessagePayload(params.localMessage), + params.message ?? + cachedPayload?.message ?? + localMessageToNewMessagePayload(params.localMessage), ); + const optionsToSend = params.options ?? cachedPayload?.options; const send = handlers.send; const sendAsRetry: OperationRequestFn<'retry'> | undefined = send ? (p) => send({ ...p } as OperationParams<'send'>) : undefined; - return await this.run<'retry'>( - { ...params, message: messageToSend }, - requestFn ?? - handlers.retry ?? - sendAsRetry ?? - (async (p) => - await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), - ); + try { + await this.run<'retry'>( + { + ...params, + message: messageToSend, + options: optionsToSend, + }, + requestFn ?? + handlers.retry ?? + sendAsRetry ?? + (async (p) => + await this.ctx.defaults.send(p.message ?? messageToSend, p.options)), + ); + + this.clearCachedFailedSend(params.localMessage.id); + } catch (error) { + this.cacheFailedSend({ + messageId: params.localMessage.id, + message: messageToSend, + options: optionsToSend, + }); + throw error; + } } async update( diff --git a/test/unit/messageOperations/MessageOperations.test.ts b/test/unit/messageOperations/MessageOperations.test.ts index 2f588bcd7c..378651038c 100644 --- a/test/unit/messageOperations/MessageOperations.test.ts +++ b/test/unit/messageOperations/MessageOperations.test.ts @@ -1,4 +1,4 @@ -import { describe, expect, it } from 'vitest'; +import { describe, expect, it, vi } from 'vitest'; import { MessageOperations } from '../../../src/messageOperations/MessageOperations'; import type { LocalMessage, Message, MessageResponse } from '../../../src/types'; @@ -114,6 +114,144 @@ describe('MessageOperations', () => { expect(store.get('m1')?.status).toBe('failed'); }); + it('reuses cached payload and options when retry is called without explicit params', async () => { + const store: Store = new Map(); + const sendCalls: Array<{ message: Message; options: unknown }> = []; + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async (message, options) => { + sendCalls.push({ message, options }); + if (sendCalls.length === 1) { + throw new Error('send failed'); + } + return { message: makeMessageResponse({ id: 'm1', text: 'retried' }) }; + }, + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', text: 'local text' }); + const cachedMessage = { + id: 'm1', + text: 'cached text', + type: 'regular', + } as Message; + const cachedOptions = { skip_push: true }; + + await expect( + ops.send({ + localMessage, + message: cachedMessage, + options: cachedOptions, + }), + ).rejects.toThrow('send failed'); + + await ops.retry({ localMessage }); + + expect(sendCalls[1].message).toEqual(cachedMessage); + expect(sendCalls[1].options).toEqual(cachedOptions); + }); + + it('does not reuse expired cached payload and options', async () => { + vi.useFakeTimers(); + try { + const store: Store = new Map(); + const sendCalls: Array<{ message: Message; options: unknown }> = []; + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async (message, options) => { + sendCalls.push({ message, options }); + if (sendCalls.length === 1) { + throw new Error('send failed'); + } + return { message: makeMessageResponse({ id: 'm1', text: 'retried' }) }; + }, + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', text: 'local text' }); + const cachedMessage = { + id: 'm1', + text: 'cached text', + type: 'regular', + } as Message; + const cachedOptions = { skip_push: true }; + + await expect( + ops.send({ + localMessage, + message: cachedMessage, + options: cachedOptions, + }), + ).rejects.toThrow('send failed'); + + vi.advanceTimersByTime(5 * 60 * 1000 + 1); + + await ops.retry({ localMessage }); + + expect(sendCalls[1].message.text).toBe('local text'); + expect(sendCalls[1].options).toBeUndefined(); + } finally { + vi.useRealTimers(); + } + }); + + it('clears cached payload after successful retry', async () => { + const store: Store = new Map(); + const sendCalls: Array<{ message: Message; options: unknown }> = []; + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + send: async (message, options) => { + sendCalls.push({ message, options }); + if (sendCalls.length === 1) { + throw new Error('send failed'); + } + return { + message: makeMessageResponse({ id: 'm1', text: `ok-${sendCalls.length}` }), + }; + }, + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', text: 'local text' }); + const cachedMessage = { + id: 'm1', + text: 'cached text', + type: 'regular', + } as Message; + const cachedOptions = { skip_push: true }; + + await expect( + ops.send({ + localMessage, + message: cachedMessage, + options: cachedOptions, + }), + ).rejects.toThrow('send failed'); + + await ops.retry({ localMessage }); + await ops.retry({ localMessage }); + + expect(sendCalls[1].message).toEqual(cachedMessage); + expect(sendCalls[1].options).toEqual(cachedOptions); + expect(sendCalls[2].message.text).toBe('local text'); + expect(sendCalls[2].options).toBeUndefined(); + }); + it('normalizes outgoing message for send', async () => { const store: Store = new Map(); From 9172f11e9e8af09b1526594cae1484b090198cb4 Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 4 Mar 2026 11:28:00 +0100 Subject: [PATCH 17/31] feat: query replies with MessagePaginator --- src/pagination/paginators/MessagePaginator.ts | 22 +++++++-- src/thread.ts | 5 +- .../paginators/MessagePaginator.test.ts | 48 ++++++++++++++++++- 3 files changed, 68 insertions(+), 7 deletions(-) diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index ea46d3f1f7..e27e0e2f9c 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -41,6 +41,7 @@ export type MessagePaginatorSort = { created_at: AscDesc } | { created_at: AscDe export type MessagePaginatorFilter = { cid: string; + parent_id?: string; }; const DEFAULT_BACKEND_SORT: MessagePaginatorSort = { @@ -72,6 +73,7 @@ export type MessagePaginatorOptions = { channel: Channel; id?: string; itemIndex?: ItemIndex; + parentMessageId?: string; paginatorOptions?: PaginatorOptions; /** * Controls whether `jumpToTheFirstUnreadMessage()` should prefer the `unreadStateSnapshot` @@ -107,6 +109,7 @@ export type UnreadSnapshotState = { export class MessagePaginator extends BasePaginator { private readonly _id: string; private channel: Channel; + private parentMessageId?: string; private unreadReferencePolicy: 'snapshot' | 'read-state-only'; /** * Independent unread reference state (not tied to `channel.state.read`). @@ -140,6 +143,7 @@ export class MessagePaginator extends BasePaginator item.id }), + parentMessageId, paginatorOptions, unreadReferencePolicy = 'snapshot', }: MessagePaginatorOptions) { @@ -152,6 +156,7 @@ export class MessagePaginator extends BasePaginator ({ cid: this.channel.cid, + ...(this.parentMessageId ? { parent_id: this.parentMessageId } : {}), }); // invoked inside BasePaginator.executeQuery() to keep it as a query descriptor; @@ -258,11 +264,17 @@ export class MessagePaginator extends BasePaginator { let itemIndex: ItemIndex; beforeEach(() => { - channel = { cid: 'channel-id', query: vi.fn() } as unknown as Channel; + channel = { + cid: 'channel-id', + getReplies: vi.fn(), + query: vi.fn(), + } as unknown as Channel; itemIndex = new ItemIndex({ getId: (message) => message.id }); }); @@ -101,6 +105,18 @@ describe('MessagePaginator', () => { expect(paginator.buildFilters()).toEqual({ cid: 'channel-id' }); }); + it('builds thread-scoped filters when parentMessageId is provided', () => { + const paginator = new MessagePaginator({ + channel, + itemIndex, + parentMessageId: 'parent-1', + }); + expect(paginator.buildFilters()).toEqual({ + cid: 'channel-id', + parent_id: 'parent-1', + }); + }); + it('computes next query shape from cursor and direction', () => { const paginator = new MessagePaginator({ channel, itemIndex }); const currentState = paginator.state.getLatestValue(); @@ -172,6 +188,36 @@ describe('MessagePaginator', () => { expect(result.items[0].created_at).toBeInstanceOf(Date); expect(result.items[1].created_at).toBeInstanceOf(Date); }); + + it('queries replies endpoint when parentMessageId is provided', async () => { + const messages = [ + { id: 'first-reply', created_at: '2022-01-01T00:00:00.000Z' }, + { id: 'last-reply', created_at: '2022-01-02T00:00:00.000Z' }, + ]; + (channel.getReplies as unknown as ReturnType).mockResolvedValue({ + messages, + }); + const paginator = new MessagePaginator({ + channel, + itemIndex, + parentMessageId: 'parent-1', + }); + // @ts-expect-error setting protected field for test coverage + paginator._nextQueryShape = { id_gt: 'from-cursor', limit: 30 }; + + const result = await paginator.query({}); + + expect(channel.getReplies).toHaveBeenCalledWith( + 'parent-1', + { id_gt: 'from-cursor', limit: 30 }, + [{ created_at: 1 }], + ); + expect(channel.query).not.toHaveBeenCalled(); + expect(result.tailward).toBe('first-reply'); + expect(result.headward).toBe('last-reply'); + expect(result.items[0].created_at).toBeInstanceOf(Date); + expect(result.items[1].created_at).toBeInstanceOf(Date); + }); }); describe('jumpToMessage()', () => { From a703f7c4e620a2cb973e9526b6ea8331b5a62195 Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 4 Mar 2026 14:30:25 +0100 Subject: [PATCH 18/31] feat: support delete operation on MessageOperations --- src/channel.ts | 41 ++++++++++++ src/messageOperations/MessageOperations.ts | 17 ++++- src/messageOperations/types.ts | 11 +++- src/thread.ts | 26 ++++++++ .../MessageOperations.test.ts | 64 +++++++++++++++++++ 5 files changed, 156 insertions(+), 3 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index f31169f106..102a317655 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -28,6 +28,7 @@ import type { ChannelUpdateOptions, CreateDraftResponse, DeleteChannelAPIResponse, + DeleteMessageOptions, DraftMessagePayload, Event, EventAPIResponse, @@ -111,6 +112,16 @@ export type UpdateMessageWithStateUpdateParams = { updateMessageRequestFn?: CustomUpdateMessageRequestFn; }; +export type DeleteMessageWithStateUpdateParams = { + localMessage: LocalMessage; + options?: DeleteMessageOptions; + /** + * Per-call override for the delete request (advanced). + * If set, it takes precedence over channel instance configuration handlers. + */ + deleteMessageRequestFn?: CustomDeleteMessageRequestFn; +}; + // Custom request function types for configuration export type CustomSendMessageRequestFn = ( params: Omit, @@ -120,8 +131,13 @@ export type CustomUpdateMessageRequestFn = ( params: Omit, ) => Promise<{ message: MessageResponse }>; +export type CustomDeleteMessageRequestFn = ( + params: Omit, +) => Promise<{ message: MessageResponse }>; + export type ChannelInstanceConfig = { requestHandlers?: { + deleteMessageRequest?: CustomDeleteMessageRequestFn; sendMessageRequest?: CustomSendMessageRequestFn; retrySendMessageRequest?: CustomSendMessageRequestFn; updateMessageRequest?: CustomUpdateMessageRequestFn; @@ -228,10 +244,18 @@ export class Channel { get: (id) => this.messagePaginator.getItem(id), handlers: () => { const { requestHandlers } = this.configState.getLatestValue(); + const deleteMessageRequest = requestHandlers?.deleteMessageRequest; const sendMessageRequest = requestHandlers?.sendMessageRequest; const retrySendMessageRequest = requestHandlers?.retrySendMessageRequest; const updateMessageRequest = requestHandlers?.updateMessageRequest; return { + delete: deleteMessageRequest + ? (p) => + deleteMessageRequest({ + localMessage: p.localMessage, + options: p.options, + }) + : undefined, send: sendMessageRequest ? (p) => sendMessageRequest({ @@ -258,6 +282,10 @@ export class Channel { }; }, defaults: { + delete: async (id, o) => { + const result = await this.getClient().deleteMessage(id, o); + return { message: result.message }; + }, send: async (m, o) => { const result = await this.sendMessage(m, o); return { message: result.message }; @@ -386,6 +414,19 @@ export class Channel { ); } + /** + * Deletes a message with local state update. + */ + async deleteMessageWithLocalUpdate(params: DeleteMessageWithStateUpdateParams) { + await this.messageOperations.delete( + { + localMessage: params.localMessage, + options: params.options, + }, + params.deleteMessageRequestFn, + ); + } + sendFile( uri: string | NodeJS.ReadableStream | Buffer | File, name?: string, diff --git a/src/messageOperations/MessageOperations.ts b/src/messageOperations/MessageOperations.ts index e5fee1cc01..8fb2314016 100644 --- a/src/messageOperations/MessageOperations.ts +++ b/src/messageOperations/MessageOperations.ts @@ -1,6 +1,6 @@ // todo: add tests import type { Message, UpdateMessageOptions } from '../types'; -import { localMessageToNewMessagePayload } from '../utils'; +import { formatMessage, localMessageToNewMessagePayload } from '../utils'; import { MessageOperationStatePolicy } from './MessageOperationStatePolicy'; import type { MessageOperationsContext, @@ -194,4 +194,19 @@ export class MessageOperations { (async (p) => await this.ctx.defaults.update(p.localMessage, updateOptions)), ); } + + async delete( + params: OperationParams<'delete'>, + requestFn?: OperationRequestFn<'delete'>, + ): Promise { + const handlers = this.ctx.handlers(); + const doRequest = + requestFn ?? + handlers.delete ?? + (async (p: OperationParams<'delete'>) => + await this.ctx.defaults.delete(p.localMessage.id, p.options)); + + const { message: messageFromResponse } = await doRequest(params); + this.ctx.ingest(formatMessage(messageFromResponse)); + } } diff --git a/src/messageOperations/types.ts b/src/messageOperations/types.ts index 1403646f40..4d7ce185e6 100644 --- a/src/messageOperations/types.ts +++ b/src/messageOperations/types.ts @@ -1,4 +1,5 @@ import type { + DeleteMessageOptions, LocalMessage, Message, MessageResponse, @@ -8,7 +9,7 @@ import type { UpdateMessageOptions, } from '../types'; -export type OperationKind = 'send' | 'retry' | 'update'; +export type OperationKind = 'send' | 'retry' | 'update' | 'delete'; export type MessageOperationSpec = { send: { @@ -23,12 +24,16 @@ export type MessageOperationSpec = { options: UpdateMessageOptions; requestResult: UpdateMessageAPIResponse; }; + delete: { + options: DeleteMessageOptions; + requestResult: { message: MessageResponse }; + }; }; export type OperationParams = { localMessage: LocalMessage; options?: MessageOperationSpec[K]['options']; -} & (K extends 'update' ? {} : { message?: Message }); +} & (K extends 'send' | 'retry' ? { message?: Message } : {}); export type OperationResponse = { message: MessageResponse }; @@ -37,6 +42,7 @@ export type OperationRequestFn = ( ) => Promise; export type MessageOperationsHandlers = { + delete?: OperationRequestFn<'delete'>; send?: OperationRequestFn<'send'>; retry?: OperationRequestFn<'retry'>; update?: OperationRequestFn<'update'>; @@ -49,6 +55,7 @@ export type MessageOperationsContext = { normalizeOutgoingMessage?: (m: Message) => Message; defaults: { + delete: (id: string, o?: DeleteMessageOptions) => Promise; send: (m: Message, o?: SendMessageOptions) => Promise; update: (m: LocalMessage, o?: UpdateMessageOptions) => Promise; }; diff --git a/src/thread.ts b/src/thread.ts index 5595ee4cd7..79e3bb7251 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -18,6 +18,7 @@ import type { } from './types'; import type { Channel, + DeleteMessageWithStateUpdateParams, SendMessageWithStateUpdateParams, UpdateMessageWithStateUpdateParams, } from './channel'; @@ -243,10 +244,18 @@ export class Thread extends WithSubscriptions { }), handlers: () => { const { requestHandlers } = this.channel.configState.getLatestValue(); + const deleteMessageRequest = requestHandlers?.deleteMessageRequest; const sendMessageRequest = requestHandlers?.sendMessageRequest; const retrySendMessageRequest = requestHandlers?.retrySendMessageRequest; const updateMessageRequest = requestHandlers?.updateMessageRequest; return { + delete: deleteMessageRequest + ? (p) => + deleteMessageRequest({ + localMessage: p.localMessage, + options: p.options, + }) + : undefined, send: sendMessageRequest ? (p) => sendMessageRequest({ @@ -273,6 +282,10 @@ export class Thread extends WithSubscriptions { }; }, defaults: { + delete: async (id, o) => { + const result = await this.channel.getClient().deleteMessage(id, o); + return { message: result.message }; + }, send: async (m, o) => { const result = await this.channel.sendMessage(m, o); return { message: result.message }; @@ -686,6 +699,19 @@ export class Thread extends WithSubscriptions { ); } + /** + * Deletes a message with local state update. + */ + async deleteMessageWithLocalUpdate(params: DeleteMessageWithStateUpdateParams) { + await this.messageOperations.delete( + { + localMessage: params.localMessage, + options: params.options, + }, + params.deleteMessageRequestFn, + ); + } + public markAsRead = async ({ force = false }: { force?: boolean } = {}) => { if (this.ownUnreadCount === 0 && !force) { return null; diff --git a/test/unit/messageOperations/MessageOperations.test.ts b/test/unit/messageOperations/MessageOperations.test.ts index 378651038c..90f358868d 100644 --- a/test/unit/messageOperations/MessageOperations.test.ts +++ b/test/unit/messageOperations/MessageOperations.test.ts @@ -30,6 +30,8 @@ const makeMessageResponse = (overrides?: Partial): MessageRespo ...overrides, }) as MessageResponse; +const defaultDelete = async () => ({ message: makeMessageResponse({ id: 'm1' }) }); + describe('MessageOperations', () => { it('marks optimistic message as sending, then ingests received response', async () => { const store: Store = new Map(); @@ -39,6 +41,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), }, @@ -58,6 +61,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), }, @@ -80,6 +84,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => { throw Object.assign(new Error('message already exists'), { code: 4 }); }, @@ -101,6 +106,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => { throw new Error('nope'); }, @@ -123,6 +129,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async (message, options) => { sendCalls.push({ message, options }); if (sendCalls.length === 1) { @@ -167,6 +174,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async (message, options) => { sendCalls.push({ message, options }); if (sendCalls.length === 1) { @@ -214,6 +222,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async (message, options) => { sendCalls.push({ message, options }); if (sendCalls.length === 1) { @@ -266,6 +275,7 @@ describe('MessageOperations', () => { }, }), defaults: { + delete: defaultDelete, send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), }, @@ -288,6 +298,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), update: async (_m, options) => { seenOptions = options; @@ -325,6 +336,7 @@ describe('MessageOperations', () => { get: (id) => store.get(id), handlers: () => ({}), defaults: { + delete: defaultDelete, send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), update: async (_m, options) => { seenOptions = options; @@ -338,4 +350,56 @@ describe('MessageOperations', () => { await ops.update({ localMessage }); expect(seenOptions).toBeUndefined(); }); + + it('delete uses defaults.delete and ingests deleted message', async () => { + const store: Store = new Map(); + const defaultsDelete = vi.fn(async () => ({ + message: makeMessageResponse({ id: 'm1', deleted_at: new Date().toISOString() }), + })); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + delete: defaultsDelete, + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'received' }); + await ops.delete({ localMessage }); + + expect(defaultsDelete).toHaveBeenCalledWith('m1', undefined); + expect(store.get('m1')?.deleted_at).toBeInstanceOf(Date); + }); + + it('delete uses per-call requestFn override', async () => { + const store: Store = new Map(); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({}), + defaults: { + delete: defaultDelete, + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'received' }); + + await ops.delete({ localMessage }, async () => ({ + message: makeMessageResponse({ + id: 'm1', + deleted_at: new Date().toISOString(), + text: 'deleted via override', + }), + })); + + expect(store.get('m1')?.text).toBe('deleted via override'); + expect(store.get('m1')?.deleted_at).toBeInstanceOf(Date); + }); }); From 314ea2cb41107b23e26319730535f70397106d6b Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 4 Mar 2026 23:05:07 +0100 Subject: [PATCH 19/31] feat: allow to define custom mark-read request function for Thread and Channel --- src/channel.ts | 10 ++++- .../MessageDeliveryReporter.ts | 40 ++++++++++++++---- src/thread.ts | 42 ++++++++++++------- 3 files changed, 69 insertions(+), 23 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index 102a317655..43dc7fd9bc 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -12,6 +12,7 @@ import { normalizeQuerySort, } from './utils'; import type { StreamChat } from './client'; +import type { Thread } from './thread'; import { DEFAULT_QUERY_CHANNEL_MESSAGE_LIST_PAGE_SIZE } from './constants'; import type { AIState, @@ -135,9 +136,16 @@ export type CustomDeleteMessageRequestFn = ( params: Omit, ) => Promise<{ message: MessageResponse }>; +export type CustomMarkReadRequestFn = (params: { + channel: Channel; + thread?: Thread; + options?: MarkReadOptions; +}) => Promise; + export type ChannelInstanceConfig = { requestHandlers?: { deleteMessageRequest?: CustomDeleteMessageRequestFn; + markReadRequest?: CustomMarkReadRequestFn; sendMessageRequest?: CustomSendMessageRequestFn; retrySendMessageRequest?: CustomSendMessageRequestFn; updateMessageRequest?: CustomUpdateMessageRequestFn; @@ -1377,7 +1385,7 @@ export class Channel { } /** - * markReadRequest - Send the mark read event for this user, only works if the `read_events` setting is enabled + * markAsReadRequest - Send the mark read event for this user, only works if the `read_events` setting is enabled * * @param {MarkReadOptions} data * @return {Promise} Description diff --git a/src/messageDelivery/MessageDeliveryReporter.ts b/src/messageDelivery/MessageDeliveryReporter.ts index 682cc50542..2919815307 100644 --- a/src/messageDelivery/MessageDeliveryReporter.ts +++ b/src/messageDelivery/MessageDeliveryReporter.ts @@ -279,14 +279,40 @@ export class MessageDeliveryReporter { * @param options */ public markRead = async (collection: Channel | Thread, options?: MarkReadOptions) => { + const isThreadCollection = isThread(collection); + const channel = isThreadCollection ? collection.channel : collection; + const requestOptions = isThreadCollection + ? { ...options, thread_id: collection.id } + : options; + let result: EventAPIResponse | null = null; - if (isChannel(collection)) { - result = await collection.markAsReadRequest(options); - } else if (isThread(collection)) { - result = await collection.channel.markAsReadRequest({ - ...options, - thread_id: collection.id, - }); + + if (isThreadCollection) { + const markReadRequestHandler = collection.configState.getLatestValue() + .requestHandlers?.markReadRequest as + | ((params: { + thread: Thread; + options?: MarkReadOptions; + }) => Promise | void) + | undefined; + result = markReadRequestHandler + ? ((await markReadRequestHandler({ + options: requestOptions, + thread: collection, + })) ?? null) + : await channel.markAsReadRequest(requestOptions); + } else { + const markReadRequestHandler = channel.configState.getLatestValue().requestHandlers + ?.markReadRequest as + | ((params: { + channel: Channel; + thread?: Thread; + options?: MarkReadOptions; + }) => Promise | void) + | undefined; + result = markReadRequestHandler + ? ((await markReadRequestHandler({ channel, options: requestOptions })) ?? null) + : await channel.markAsReadRequest(requestOptions); } this.removeCandidateFor(collection); diff --git a/src/thread.ts b/src/thread.ts index 79e3bb7251..d25328cbe4 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -1,15 +1,12 @@ import { StateStore } from './store'; -import { - addToMessageList, - findIndexInSortedArray, - formatMessage, - throttle, -} from './utils'; +import { addToMessageList, findIndexInSortedArray, formatMessage } from './utils'; import type { AscDesc, DraftResponse, + EventAPIResponse, EventTypes, LocalMessage, + MarkReadOptions, MessagePaginationOptions, MessageResponse, ReadResponse, @@ -77,7 +74,6 @@ export type ThreadReadState = Record; const DEFAULT_PAGE_LIMIT = 50; const DEFAULT_SORT: { created_at: AscDesc }[] = [{ created_at: -1 }]; -const MARK_AS_READ_THROTTLE_TIMEOUT = 1000; // TODO: remove this once we move to API v2 export const THREAD_RESPONSE_RESERVED_KEYS: Record = { active_participant_count: true, @@ -117,7 +113,19 @@ const constructCustomDataObject = (threadData: T) => { return custom; }; +export type CustomThreadMarkReadRequestFn = (params: { + thread: Thread; + options?: MarkReadOptions; +}) => Promise | void; + +export type ThreadInstanceConfig = { + requestHandlers?: { + markReadRequest?: CustomThreadMarkReadRequestFn; + }; +}; + export class Thread extends WithSubscriptions { + public readonly configState = new StateStore({}); public readonly state: StateStore; public readonly id: string; public readonly messageComposer: MessageComposer; @@ -419,7 +427,7 @@ export class Thread extends WithSubscriptions { }), ({ active, unreadMessageCount }) => { if (!active || !unreadMessageCount) return; - this.throttledMarkAsRead(); + this.throttledMarkRead(); }, ); @@ -465,7 +473,7 @@ export class Thread extends WithSubscriptions { }); if (active) { - this.throttledMarkAsRead(); + this.throttledMarkRead(); } const nextRead: ThreadReadState = {}; @@ -712,7 +720,7 @@ export class Thread extends WithSubscriptions { ); } - public markAsRead = async ({ force = false }: { force?: boolean } = {}) => { + public markRead = async ({ force = false }: { force?: boolean } = {}) => { if (this.ownUnreadCount === 0 && !force) { return null; } @@ -720,11 +728,15 @@ export class Thread extends WithSubscriptions { return await this.client.messageDeliveryReporter.markRead(this); }; - private throttledMarkAsRead = throttle( - () => this.markAsRead(), - MARK_AS_READ_THROTTLE_TIMEOUT, - { trailing: true }, - ); + private throttledMarkRead = () => { + this.client.messageDeliveryReporter.throttledMarkRead(this); + }; + + /** + * @deprecated Use `thread.markRead` instead. + */ + public markAsRead = ({ force = false }: { force?: boolean } = {}) => + this.markRead({ force }); public queryReplies = ({ limit = DEFAULT_PAGE_LIMIT, From fe7a68de3188ffddef4e4f104dc329a2e556dc39 Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 4 Mar 2026 23:05:30 +0100 Subject: [PATCH 20/31] feat: add messageFocusSignal state to MessagePaginator --- src/pagination/paginators/MessagePaginator.ts | 115 +++++++++++++++++- .../paginators/MessagePaginator.test.ts | 32 ++++- 2 files changed, 141 insertions(+), 6 deletions(-) diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index e27e0e2f9c..ad39d49264 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -35,7 +35,40 @@ import { deriveCreatedAtAroundPaginationFlags } from '../cursorDerivation'; import { deriveIdAroundPaginationFlags } from '../cursorDerivation/idAroundPaginationFlags'; import { deriveLinearPaginationFlags } from '../cursorDerivation/linearPaginationFlags'; -export type JumpToMessageOptions = { pageSize?: number }; +export type MessageFocusReason = + | 'jump-to-message' + | 'jump-to-first-unread' + | 'jump-to-latest'; + +export type MessageFocusSignal = { + messageId: string; + reason: MessageFocusReason; + token: number; + createdAt: number; + ttlMs: number; +}; + +export type MessageFocusSignalState = { + signal: MessageFocusSignal | null; +}; + +export type JumpToMessageOptions = { + pageSize?: number; + /** + * Optional reason attached to emitted focus signal. + * Defaults to `jump-to-message`. + */ + focusReason?: MessageFocusReason; + /** + * TTL for the emitted focus signal in milliseconds. + * Defaults to `3000`. + */ + focusSignalTtlMs?: number; + /** + * If true, suppresses focus signal emission after a successful jump. + */ + suppressFocusSignal?: boolean; +}; export type MessagePaginatorSort = { created_at: AscDesc } | { created_at: AscDesc }[]; @@ -116,6 +149,9 @@ export class MessagePaginator extends BasePaginator; + readonly messageFocusSignal: StateStore; + private clearMessageFocusSignalTimeoutId: ReturnType | null = null; + private messageFocusSignalToken = 0; protected _sort = DEFAULT_BACKEND_SORT; protected _nextQueryShape: MessageQueryShape | undefined; sortComparator: (a: LocalMessage, b: LocalMessage) => number; @@ -166,6 +202,9 @@ export class MessagePaginator extends BasePaginator({ + signal: null, + }); this.sortComparator = makeComparator({ sort: this._sort, resolvePathValue: resolveDotPathValue, @@ -317,7 +356,12 @@ export class MessagePaginator extends BasePaginator => { let localMessage = this.getItem(messageId); let interval: AnyInterval | undefined; @@ -366,6 +410,13 @@ export class MessagePaginator extends BasePaginator { + this.messageFocusSignalToken += 1; + const signal: MessageFocusSignal = { + messageId, + reason, + token: this.messageFocusSignalToken, + createdAt: Date.now(), + ttlMs, + }; + + if (this.clearMessageFocusSignalTimeoutId) { + clearTimeout(this.clearMessageFocusSignalTimeoutId); + this.clearMessageFocusSignalTimeoutId = null; + } + + this.messageFocusSignal.next({ signal }); + + this.clearMessageFocusSignalTimeoutId = setTimeout(() => { + this.clearMessageFocusSignal({ token: signal.token }); + }, ttlMs); + + return signal; + }; + + clearMessageFocusSignal = ({ token }: { token?: number } = {}) => { + const current = this.messageFocusSignal.getLatestValue().signal; + if (!current) return; + if (typeof token !== 'undefined' && current.token !== token) return; + + if (this.clearMessageFocusSignalTimeoutId) { + clearTimeout(this.clearMessageFocusSignalTimeoutId); + this.clearMessageFocusSignalTimeoutId = null; + } + + this.messageFocusSignal.next({ signal: null }); }; setUnreadSnapshot = (next: Partial): UnreadSnapshotState => { diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts index a1beb1e8e3..94f6c4cab1 100644 --- a/test/unit/pagination/paginators/MessagePaginator.test.ts +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -318,7 +318,10 @@ describe('MessagePaginator', () => { const ok = await paginator.jumpToTheFirstUnreadMessage(); expect(ok).toBe(true); - expect(jumpSpy).toHaveBeenCalledWith('m-unread', undefined); + expect(jumpSpy).toHaveBeenCalledWith( + 'm-unread', + expect.objectContaining({ focusReason: 'jump-to-first-unread' }), + ); }); it('can ignore snapshot and rely on channel read state only', async () => { @@ -368,6 +371,33 @@ describe('MessagePaginator', () => { }); }); + describe('messageFocusSignal', () => { + it('emits focus signal with unique token and clears stale timer safely', async () => { + vi.useFakeTimers(); + const paginator = new MessagePaginator({ channel, itemIndex }); + + const first = paginator.emitMessageFocusSignal({ + messageId: 'm1', + reason: 'jump-to-message', + ttlMs: 3000, + }); + const second = paginator.emitMessageFocusSignal({ + messageId: 'm1', + reason: 'jump-to-message', + ttlMs: 3000, + }); + + expect(second.token).toBeGreaterThan(first.token); + expect(paginator.messageFocusSignal.getLatestValue().signal?.token).toBe( + second.token, + ); + + vi.advanceTimersByTime(3000); + expect(paginator.messageFocusSignal.getLatestValue().signal).toBe(null); + vi.useRealTimers(); + }); + }); + describe.todo('postQueryReconcile and deriveCursor for', () => {}); describe('linear pagination', () => { describe('updates the hasMoreTail flag only if the first message on page is the first message in interval', () => { From cf4252ed7c45e6aa697988bbcc3c02f442707f7a Mon Sep 17 00:00:00 2001 From: martincupela Date: Wed, 4 Mar 2026 23:10:19 +0100 Subject: [PATCH 21/31] refactor: remove thread from Channel's CustomMarkReadRequestFn --- src/channel.ts | 2 -- src/messageDelivery/MessageDeliveryReporter.ts | 1 - 2 files changed, 3 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index 43dc7fd9bc..8173abce78 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -12,7 +12,6 @@ import { normalizeQuerySort, } from './utils'; import type { StreamChat } from './client'; -import type { Thread } from './thread'; import { DEFAULT_QUERY_CHANNEL_MESSAGE_LIST_PAGE_SIZE } from './constants'; import type { AIState, @@ -138,7 +137,6 @@ export type CustomDeleteMessageRequestFn = ( export type CustomMarkReadRequestFn = (params: { channel: Channel; - thread?: Thread; options?: MarkReadOptions; }) => Promise; diff --git a/src/messageDelivery/MessageDeliveryReporter.ts b/src/messageDelivery/MessageDeliveryReporter.ts index 2919815307..59a0a743d8 100644 --- a/src/messageDelivery/MessageDeliveryReporter.ts +++ b/src/messageDelivery/MessageDeliveryReporter.ts @@ -306,7 +306,6 @@ export class MessageDeliveryReporter { ?.markReadRequest as | ((params: { channel: Channel; - thread?: Thread; options?: MarkReadOptions; }) => Promise | void) | undefined; From aa706ded26a60b7de9cbaa54fe34dbcdf4d94f1c Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 07:40:33 +0100 Subject: [PATCH 22/31] test: fix failing tests --- .../MessageOperations.test.ts | 31 +++++++++++++++++++ test/unit/threads.test.ts | 10 +++--- 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/test/unit/messageOperations/MessageOperations.test.ts b/test/unit/messageOperations/MessageOperations.test.ts index 90f358868d..ef83969a9b 100644 --- a/test/unit/messageOperations/MessageOperations.test.ts +++ b/test/unit/messageOperations/MessageOperations.test.ts @@ -402,4 +402,35 @@ describe('MessageOperations', () => { expect(store.get('m1')?.text).toBe('deleted via override'); expect(store.get('m1')?.deleted_at).toBeInstanceOf(Date); }); + + it('delete uses configured handlers.delete when provided', async () => { + const store: Store = new Map(); + const configuredDelete = vi.fn(async () => ({ + message: makeMessageResponse({ + id: 'm1', + deleted_at: new Date().toISOString(), + text: 'deleted via configured handler', + }), + })); + + const ops = new MessageOperations({ + ingest: (m) => store.set(m.id, m), + get: (id) => store.get(id), + handlers: () => ({ delete: configuredDelete }), + defaults: { + delete: defaultDelete, + send: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + update: async () => ({ message: makeMessageResponse({ id: 'm1' }) }), + }, + }); + + const localMessage = makeLocalMessage({ id: 'm1', status: 'received' }); + await ops.delete({ localMessage, options: { hard: true } }); + + expect(configuredDelete).toHaveBeenCalledWith({ + localMessage, + options: { hard: true }, + }); + expect(store.get('m1')?.text).toBe('deleted via configured handler'); + }); }); diff --git a/test/unit/threads.test.ts b/test/unit/threads.test.ts index 2adbf057ee..26d9721c5b 100644 --- a/test/unit/threads.test.ts +++ b/test/unit/threads.test.ts @@ -692,17 +692,19 @@ describe('Threads 2.0', () => { thread.registerSubscriptions(); const stateBefore = thread.state.getLatestValue(); - const stubbedMarkAsRead = sinon.stub(thread, 'markAsRead').resolves(); + const stubbedMarkRead = sinon + .stub(client.messageDeliveryReporter, 'throttledMarkRead') + .returns(undefined); expect(stateBefore.active).to.be.false; expect(thread.ownUnreadCount).to.equal(42); - expect(stubbedMarkAsRead.called).to.be.false; + expect(stubbedMarkRead.called).to.be.false; thread.activate(); clock.runAll(); const stateAfter = thread.state.getLatestValue(); expect(stateAfter.active).to.be.true; - expect(stubbedMarkAsRead.calledOnce).to.be.true; + expect(stubbedMarkRead.calledOnce).to.be.true; client.dispatchEvent({ type: 'message.new', @@ -714,7 +716,7 @@ describe('Threads 2.0', () => { }); clock.runAll(); - expect(stubbedMarkAsRead.calledTwice).to.be.true; + expect(stubbedMarkRead.calledTwice).to.be.true; thread.unregisterSubscriptions(); clock.restore(); From 1fbf4206a23ecae907b65dc1a6401a4026930bde Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 11:51:52 +0100 Subject: [PATCH 23/31] feat: export configuration service --- src/index.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/index.ts b/src/index.ts index e75c01948b..336bd4eaed 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,6 +5,7 @@ export * from './client'; export * from './client_state'; export * from './channel'; export * from './channel_state'; +export * from './configuration'; export * from './connection'; export { type CooldownTimerState } from './CooldownTimer'; export * from './events'; From a3f4bd7a353f94da6c711fe9a89236e18016b69e Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 11:53:52 +0100 Subject: [PATCH 24/31] feat: add backwards compatible APIs --- .../breaking-change-summary.md | 36 ++++ .../compatibility-report.release-v13.md | 96 +++++++++++ .../decisions.md | 64 +++++++ .../message-paginator-release-compat/plan.md | 161 ++++++++++++++++++ .../message-paginator-release-compat/spec.md | 74 ++++++++ .../state.json | 20 +++ src/pagination/paginators/BasePaginator.ts | 71 +++++++- src/pagination/paginators/MessagePaginator.ts | 127 +++++++++++++- test/unit/channel.test.js | 30 ++++ .../paginators/BasePaginator.test.ts | 100 +++++++++++ .../paginators/MessagePaginator.test.ts | 108 ++++++++++++ 11 files changed, 874 insertions(+), 13 deletions(-) create mode 100644 specs/message-paginator-release-compat/breaking-change-summary.md create mode 100644 specs/message-paginator-release-compat/compatibility-report.release-v13.md create mode 100644 specs/message-paginator-release-compat/decisions.md create mode 100644 specs/message-paginator-release-compat/plan.md create mode 100644 specs/message-paginator-release-compat/spec.md create mode 100644 specs/message-paginator-release-compat/state.json diff --git a/specs/message-paginator-release-compat/breaking-change-summary.md b/specs/message-paginator-release-compat/breaking-change-summary.md new file mode 100644 index 0000000000..d408d1b5f2 --- /dev/null +++ b/specs/message-paginator-release-compat/breaking-change-summary.md @@ -0,0 +1,36 @@ +# Breaking-Change Summary (`master`...`feat/message-paginator`) + +## Highest-Risk Changes + +1. **`BasePaginator` API contract changed behind same public export name** + +- Old API shape (`next/prev`, `hasNext/hasPrev`, `cursor.next/prev`, direction `next|prev`) was replaced by (`toHead/toTail`, `hasMoreHead/hasMoreTail`, `cursor.headward/tailward`, direction `headward|tailward`). +- This can break downstream subclasses and direct usages. + +2. **Moved paginator source files can break unsupported deep-import paths (out of scope)** + +- Files moved from `src/pagination/*` to `src/pagination/paginators/*`. +- Root exports remain available via `src/index.ts -> export * from './pagination'`. +- Risk exists only for consumers importing internal paths such as `stream-chat/src/pagination/BasePaginator`. +- This is out of scope for this compatibility pass because the supported interface is root exports. + +3. **`MessageReceiptsTracker` constructor options changed (mostly internal/pseudo-break)** + +- Old usage expected `{ locateMessage }`; new usage requires `{ channel, locateMessage? }`. +- Existing direct instantiation can break, but expected impact is low because this class is primarily used internally by `Channel`. + +## Medium-Risk Changes + +4. **Setup-related type exports moved out of `client.ts`** + +- `MessageComposerSetupState` and related setup types were exported previously from `client.ts` and now live in `configuration/types.ts`. +- This is resolved if root `index.ts` re-exports `./configuration`. + +5. **`StreamChat._messageComposerSetupState` removed (internal-only)** + +- This is internal API and not part of supported semver surface. + +## Suggested Release Classification + +- If compatibility shims/re-exports are **not** added: treat as **major**. +- If shims/re-exports are added and `BasePaginator` compatibility is preserved/aliased: could remain **minor**. diff --git a/specs/message-paginator-release-compat/compatibility-report.release-v13.md b/specs/message-paginator-release-compat/compatibility-report.release-v13.md new file mode 100644 index 0000000000..528d8b7c2c --- /dev/null +++ b/specs/message-paginator-release-compat/compatibility-report.release-v13.md @@ -0,0 +1,96 @@ +# Compatibility Report: `stream-chat-react@release-v13` vs local `stream-chat-js@feat/message-paginator` + +## Environment + +- React worktree: `/Users/martincupela/Projects/stream/chat/stream-chat-react-worktrees/chatview-layout-controller` +- React branch: `release-v13` +- JS SDK repo: `/Users/martincupela/Projects/stream/chat/stream-chat-js` +- JS branch: `feat/message-paginator` + +## Dependency wiring + +`stream-chat-react` resolved `stream-chat` through existing yarn-link symlink: + +- `node_modules/stream-chat -> /Users/martincupela/.config/yarn/link/stream-chat` +- `/Users/martincupela/.config/yarn/link/stream-chat -> /Users/martincupela/Projects/stream/chat/stream-chat-js` + +So test runs consumed local SDK code from this branch. + +## Commands run + +1. Build / type readiness on JS SDK side (already validated in prior steps): + +- `yarn build` (in `stream-chat-js`) + +2. Targeted release-v13 compatibility tests (React worktree): + +- `yarn test --watchman=false src/components/Channel/__tests__/Channel.test.js src/components/MessageList/__tests__/MessageList.test.js src/components/Thread/__tests__/Thread.test.js` + +3. Typecheck in React worktree: + +- `yarn types` + +4. Full React test matrix: + +- `yarn test --watchman=false` + +5. Reproduction of initially failing suites only: + +- `yarn test --watchman=false src/components/MessageInput/__tests__/ThreadMessageInput.test.js src/components/Poll/__tests__/PollCreationDialog.test.js` + +6. Full React test matrix after test updates: + +- `yarn test --watchman=false` + +## Results + +- Targeted test suites: **PASS** + - `Channel.test.js`: pass + - `MessageList.test.js`: pass + - `Thread.test.js`: pass + - Total: 3 suites, 133 tests passed +- React typecheck: **PASS** +- Full suite (final): **PASS** + - `139 passed, 0 failed` + - `2024 passed tests, 2 skipped` + +## Observations + +- `--watchman=false` was required due to sandbox watchman permission errors; this is environment-related, not product behavior. +- There were pre-existing console warnings in tests (`MessageTimestamp ... invalid created_at date`, React `act(...)` warnings), but no assertion failures. + +## Compatibility conclusion (targeted) + +For the validated `release-v13` compatibility surfaces, no breaking regressions were detected when using local `stream-chat-js@feat/message-paginator`: + +- Legacy channel pagination usage (`channel.state.messagePagination` / `messageSets`) continues to work in tested flows. +- mark-read / `doMarkReadRequest`-related Channel and MessageList flows pass. +- Thread flows in tested suite pass. + +## Full-suite findings and resolution + +1. `ThreadMessageInput` draft test triggered unexpected network delete request + +- Failing test: + - `src/components/MessageInput/__tests__/ThreadMessageInput.test.js` +- case: `draft › is queried when drafts are enabled` +- Error: + - `AxiosError: Network Error` from `Channel._deleteDraft` via `MessageComposer.deleteDraft`. +- Resolution: + - mocked `customChannel._deleteDraft` in test setup to avoid external HTTP in test env. + +2. Poll max-vote validation behavior changed (value clamping) + +- Failing test: + - `src/components/Poll/__tests__/PollCreationDialog.test.js` +- case updated to `clamps max vote count to 10 and allows submission` +- Resolution: + - adjusted assertions to new behavior: + - error text is empty + - value is clamped to `10` + - submit button is enabled + - no translation updates required (`i18n/en.json` already contains the previous key). + +## Remaining risk + +- Full Jest matrix is green for this setup; no blocking compatibility failures remain. diff --git a/specs/message-paginator-release-compat/decisions.md b/specs/message-paginator-release-compat/decisions.md new file mode 100644 index 0000000000..514910aaac --- /dev/null +++ b/specs/message-paginator-release-compat/decisions.md @@ -0,0 +1,64 @@ +# Message Paginator Release Compatibility Decisions + +## Decision: Treat BasePaginator API drift as the primary release risk + +**Date:** 2026-03-05 +**Context:** +`BasePaginator` remains publicly exported but its method names, direction values, cursor shape, and state fields changed. + +**Decision:** +Prioritize compatibility strategy for `BasePaginator` before merging branch to `master`. + +**Reasoning:** +This is the most likely downstream compile/runtime break for advanced integrators that extend paginator classes. + +**Alternatives considered:** + +- Ignore and treat as internal-only: rejected because `BasePaginator` is exported. +- Delay until post-merge: rejected because release classification would be unclear. + +## Decision: Compatibility scope is root exports from `src/index.ts` only + +**Date:** 2026-03-05 +**Context:** +The release compatibility target is the public package API exposed through root exports. + +**Decision:** +Do not add deep-import compatibility shims for moved paginator files. +Compatibility work is limited to symbols exported via `src/index.ts`. + +**Reasoning:** +Deep imports are not the supported interface contract for this release. +Focusing on root exports keeps the compatibility scope explicit and maintainable. + +**Alternatives considered:** + +- Add shims for old deep-import file paths: rejected as out-of-scope for public API compatibility. + +## Decision: Restore removed setup type exports on root surface + +**Date:** 2026-03-05 +**Context:** +`MessageComposerSetupState` moved into configuration internals and is no longer exported from root API. + +**Decision:** +Plan includes restoring root exports (directly or via re-export) to avoid unintended TypeScript breakage. + +**Reasoning:** +Type-only breaks still impact consumers and should be avoided in non-major release. + +## Decision: Add transitional BasePaginator compatibility aliases + +**Date:** 2026-03-05 +**Context:** +`BasePaginator` introduced head/tail naming (`toTail`, `toHead`, `hasMoreTail`, `hasMoreHead`, `tailward/headward` cursors), while older consumers may still call legacy APIs. + +**Decision:** +Add deprecated alias APIs on `BasePaginator`: + +- methods: `next`, `prev`, `nextDebounced`, `prevDebounced` +- getters: `hasNext`, `hasPrev` +- query response compatibility: accept `next/prev` cursor fields as fallback to `tailward/headward`. + +**Reasoning:** +This preserves backward compatibility for non-migrated paginator consumers while keeping new naming as canonical. diff --git a/specs/message-paginator-release-compat/plan.md b/specs/message-paginator-release-compat/plan.md new file mode 100644 index 0000000000..61c5067c02 --- /dev/null +++ b/specs/message-paginator-release-compat/plan.md @@ -0,0 +1,161 @@ +# Message Paginator Release Compatibility Plan + +## Worktree + +**Worktree path:** `/Users/martincupela/Projects/stream/chat/stream-chat-js` +**Branch:** `feat/message-paginator` +**Base branch:** `master` + +## Task overview + +Scope is limited to the public interface exported via `src/index.ts`. +Deep-import path compatibility is explicitly out of scope. + +## Task 1: Confirm Public Interface Scope + +**File(s) to create/modify:** `specs/message-paginator-release-compat/decisions.md`, `specs/message-paginator-release-compat/spec.md` + +**Dependencies:** None + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Lock compatibility target to root exports from `src/index.ts`. +- Mark deep-import path stability as non-goal. + +**Acceptance Criteria:** + +- [x] Scope decision is documented. +- [x] Breaking-change summary reflects this scope. + +## Task 2: Restore Root Export Coverage for Configuration Types + +**File(s) to create/modify:** `src/index.ts`, `test/typescript/unit-test.ts` + +**Dependencies:** Task 1 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Ensure configuration setup types are root-exported (`export * from './configuration'`). +- Add a type-level regression check for configuration setup types. + +**Acceptance Criteria:** + +- [x] Root index exports configuration module. +- [x] Type-level regression check compiles. + +## Task 3: Add Legacy BasePaginator API Aliases + +**File(s) to create/modify:** `src/pagination/paginators/BasePaginator.ts` + +**Dependencies:** Task 1 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Add compatibility aliases: + - `next`/`prev` + - `nextDebounced`/`prevDebounced` + - `hasNext`/`hasPrev` +- Support legacy `next/prev` cursor fields in query result fallback. + +**Acceptance Criteria:** + +- [x] Existing legacy paginator call sites compile and run via aliases. +- [x] New API remains primary and unchanged. +- [x] Aliases are documented as transitional compatibility layer. + +## Task 4: Add Regression Tests for Alias Compatibility + +**File(s) to create/modify:** `test/unit/pagination/paginators/BasePaginator.test.ts` + +**Dependencies:** Task 3 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Add tests for legacy method/getter aliases. +- Add test for `next/prev` cursor field fallback. + +**Acceptance Criteria:** + +- [x] Alias tests pass. +- [x] Existing paginator tests stay green. + +## Task 5: Final Release Notes and Compatibility Summary + +**File(s) to create/modify:** `specs/message-paginator-release-compat/spec.md`, `specs/message-paginator-release-compat/decisions.md`, `specs/message-paginator-release-compat/breaking-change-summary.md` + +**Dependencies:** Task 2, Task 4 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Finalize real vs pseudo breaking changes for public root API. +- Document deprecations and migration notes. + +**Acceptance Criteria:** + +- [x] Summary is aligned with public root export scope. +- [x] Remaining intentional breaks are explicitly listed. + +## Task 6: Cross-Repo `release-v13` Compatibility Validation + +**File(s) to create/modify:** `specs/message-paginator-release-compat/compatibility-report.release-v13.md` + +**Dependencies:** Task 2, Task 4 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Use `stream-chat-react` worktree at `/Users/martincupela/Projects/stream/chat/stream-chat-react-worktrees/chatview-layout-controller` (`release-v13` branch). +- Run tests against local `stream-chat-js` branch build (`feat/message-paginator`) by wiring React worktree dependency to local SDK. +- Focus on legacy compatibility surfaces: + - `channel.state.messagePagination` and `messageSets` behavior used by `release-v13`. + - mark-read and `doMarkReadRequest` flows. + - paginator compatibility behavior where relevant. +- Record exact commands, results, and failures. + +**Acceptance Criteria:** + +- [x] Targeted `release-v13` tests for Channel/MessageList/Thread run against local JS SDK. +- [x] Any failures are categorized as real break, expected behavior shift, or test issue. +- [x] Compatibility report is committed to specs folder. + +## Execution order + +- **Phase 1 (serial):** Task 1 +- **Phase 2 (parallel):** Task 2, Task 3 +- **Phase 3 (serial):** Task 4 +- **Phase 4 (serial):** Task 5 +- **Phase 5 (serial):** Task 6 + +## File ownership summary + +| Task | Creates/Modifies | +| ------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| Task 1 | `specs/message-paginator-release-compat/decisions.md`, `specs/message-paginator-release-compat/spec.md` | +| Task 2 | `src/index.ts`, `test/typescript/unit-test.ts` | +| Task 3 | `src/pagination/paginators/BasePaginator.ts` | +| Task 4 | `test/unit/pagination/paginators/BasePaginator.test.ts` | +| Task 5 | `specs/message-paginator-release-compat/spec.md`, `specs/message-paginator-release-compat/decisions.md`, `specs/message-paginator-release-compat/breaking-change-summary.md` | +| Task 6 | `specs/message-paginator-release-compat/compatibility-report.release-v13.md` | diff --git a/specs/message-paginator-release-compat/spec.md b/specs/message-paginator-release-compat/spec.md new file mode 100644 index 0000000000..91eda96c94 --- /dev/null +++ b/specs/message-paginator-release-compat/spec.md @@ -0,0 +1,74 @@ +# Message Paginator Release Compatibility Spec + +## Problem Statement + +Branch `feat/message-paginator` introduces large pagination/runtime refactors. Before merging to `master`, we need a focused semver review to identify changes that can break existing consumers of `stream-chat`. + +## Goal + +Document concrete breaking-change risks and define mitigation tasks so merge/release can be done safely. + +Additionally, validate compatibility against `stream-chat-react@release-v13` using the local `stream-chat-js@feat/message-paginator` build. + +## Non-Goals + +- Re-implementing the feature set in this spec task. +- Exhaustive behavioral QA of all new pagination flows. + +## Breaking-Change Risk Summary + +### High Risk + +1. `BasePaginator` public contract changed while keeping the same export name + +- Evidence: + - Old API in `src/pagination/BasePaginator.ts` (deleted): `next/prev`, `hasNext/hasPrev`, `PaginationDirection = 'next' | 'prev'`, `cursor.next/cursor.prev`. + - New API in `src/pagination/paginators/BasePaginator.ts`: `toHead/toTail`, `hasMoreHead/hasMoreTail`, `PaginationDirection = 'headward' | 'tailward'`, `cursor.headward/cursor.tailward`. +- Impact: + - Consumers subclassing or directly using exported `BasePaginator` from `stream-chat` can fail at compile time and behavior level. + +2. Deep import paths removed from shipped `src/` tree + +- Evidence: + - Deleted files: `src/pagination/BasePaginator.ts`, `src/pagination/ReminderPaginator.ts`. + - Package ships `/src` (`package.json -> files`), so many consumers rely on internal deep imports despite `exports` map only exposing `.`. +- Impact: + - Runtime/module-resolution failure for imports like `stream-chat/src/pagination/BasePaginator` and `stream-chat/src/pagination/ReminderPaginator`. + +3. `MessageReceiptsTracker` constructor contract changed + +- Evidence: + - Old: `new MessageReceiptsTracker({ locateMessage })`. + - New: `new MessageReceiptsTracker({ channel, locateMessage? })` in `src/messageDelivery/MessageReceiptsTracker.ts`. +- Impact: + - External instantiation with previous options shape breaks (type and runtime). + +### Medium Risk + +4. Previously exported setup types no longer exported from root package surface + +- Evidence: + - `src/client.ts` no longer exports `MessageComposerSetupState`/related setup types. + - New types live under `src/configuration/types.ts` but root `src/index.ts` does not export `./configuration`. +- Impact: + - TS consumers importing these types from `'stream-chat'` or `'stream-chat/src/client'` can break. + +5. Undocumented but reachable `StreamChat._messageComposerSetupState` removed + +- Evidence: + - Property removed from `src/client.ts`; replaced by `instanceConfigurationService`. +- Impact: + - Integrations depending on this internal field break. + +## Lower-Risk (Mostly Additive) + +- New exports: `ChannelPaginatorsOrchestrator`, `EventHandlerPipeline`. +- New optimistic wrappers on `Channel`/`Thread` (`send/retry/update/delete...WithLocalUpdate`). +- `Thread.markAsRead` remains available as deprecated alias to `markRead`. + +## Success Criteria + +- Breaking points are either mitigated with compatibility shims/re-exports or explicitly released as major version changes. +- Test coverage is added for all compatibility shims. +- Release notes explicitly call out any intentional breaks. +- Cross-repo compatibility validation is executed against `stream-chat-react@release-v13` with local `stream-chat-js` artifacts. diff --git a/specs/message-paginator-release-compat/state.json b/specs/message-paginator-release-compat/state.json new file mode 100644 index 0000000000..f2339bd96a --- /dev/null +++ b/specs/message-paginator-release-compat/state.json @@ -0,0 +1,20 @@ +{ + "tasks": { + "task-1-confirm-public-interface-scope": "done", + "task-2-restore-root-export-coverage-for-configuration-types": "done", + "task-3-add-legacy-basepaginator-api-aliases": "done", + "task-4-add-regression-tests-for-alias-compatibility": "done", + "task-5-final-release-notes-and-compatibility-summary": "done", + "task-6-cross-repo-release-v13-compatibility-validation": "done" + }, + "flags": { + "blocked": false, + "needs-review": false + }, + "meta": { + "last_updated": "2026-03-05", + "worktree": "/Users/martincupela/Projects/stream/chat/stream-chat-js", + "branch": "feat/message-paginator", + "base_branch": "master" + } +} diff --git a/src/pagination/paginators/BasePaginator.ts b/src/pagination/paginators/BasePaginator.ts index 8758226bef..658a6b4a0f 100644 --- a/src/pagination/paginators/BasePaginator.ts +++ b/src/pagination/paginators/BasePaginator.ts @@ -211,6 +211,14 @@ export type ExecuteQueryReturnValue = { export type PaginationQueryReturnValue = { items: T[] } & { headward?: string; tailward?: string; + /** + * @deprecated Use `tailward` instead. + */ + next?: string; + /** + * @deprecated Use `headward` instead. + */ + prev?: string; }; export type PaginatorDebounceOptions = { debounceMs: number; @@ -449,6 +457,20 @@ export abstract class BasePaginator { return this.state.getLatestValue().hasMoreHead; } + /** + * @deprecated Use `hasMoreTail` instead. + */ + get hasNext() { + return this.hasMoreTail; + } + + /** + * @deprecated Use `hasMoreHead` instead. + */ + get hasPrev() { + return this.hasMoreHead; + } + get hasResults() { return Array.isArray(this.state.getLatestValue().items); } @@ -1967,7 +1989,15 @@ export abstract class BasePaginator { return { stateCandidate: stateUpdate, targetInterval: null }; } - const { items, headward, tailward } = results; + // Backward compatibility for custom BasePaginator subclasses: + // - old PaginationQueryReturnValue used next/prev + // - new contract uses tailward/headward + // + // Internal SDK paginators already return tailward/headward, so this fallback is + // only to keep non-migrated external subclasses working during transition. + const { items, headward, tailward, next, prev } = results; + const resolvedHeadward = headward ?? prev; + const resolvedTailward = tailward ?? next; stateUpdate.lastQueryError = undefined; const filteredItems = await this.filterQueryResults(items); @@ -2036,9 +2066,12 @@ export abstract class BasePaginator { stateUpdate.hasMoreTail = hasMoreTail; stateUpdate.hasMoreHead = hasMoreHead; } else { - stateUpdate.cursor = { tailward: tailward || null, headward: headward || null }; - stateUpdate.hasMoreTail = !!tailward; - stateUpdate.hasMoreHead = !!headward; + stateUpdate.cursor = { + tailward: resolvedTailward || null, + headward: resolvedHeadward || null, + }; + stateUpdate.hasMoreTail = !!resolvedTailward; + stateUpdate.hasMoreHead = !!resolvedHeadward; } } else { // todo: we could keep the offset in two directions (initial tailward offset would be taken from config.initialOffset) @@ -2093,6 +2126,18 @@ export abstract class BasePaginator { toHead = (params: Omit, 'direction' | 'queryShape'> = {}) => this.executeQuery({ direction: 'headward', ...params }); + /** + * @deprecated Use `toTail` instead. + */ + next = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.toTail(params); + + /** + * @deprecated Use `toHead` instead. + */ + prev = (params: Omit, 'direction' | 'queryShape'> = {}) => + this.toHead(params); + toTailDebounced = ( params: Omit, 'direction' | 'queryShape'> = {}, ) => { @@ -2105,6 +2150,24 @@ export abstract class BasePaginator { this._executeQueryDebounced({ direction: 'headward', ...params }); }; + /** + * @deprecated Use `toTailDebounced` instead. + */ + nextDebounced = ( + params: Omit, 'direction' | 'queryShape'> = {}, + ) => { + this.toTailDebounced(params); + }; + + /** + * @deprecated Use `toHeadDebounced` instead. + */ + prevDebounced = ( + params: Omit, 'direction' | 'queryShape'> = {}, + ) => { + this.toHeadDebounced(params); + }; + reload = async () => { await this.toTail({ reset: 'yes' }); }; diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index ad39d49264..a3665f5e20 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -102,6 +102,12 @@ const dataFieldFilterResolver: FieldToDataResolver = { resolve: (message, path) => resolveDotPathValue(message, path), }; +const getMessageCreatedAtTimestamp = (message: LocalMessage): number | null => { + if (!(message.created_at instanceof Date)) return null; + const timestamp = message.created_at.getTime(); + return Number.isFinite(timestamp) ? timestamp : null; +}; + export type MessagePaginatorOptions = { channel: Channel; id?: string; @@ -454,6 +460,14 @@ export class MessagePaginator extends BasePaginator { const ownUserId = this.channel.getClient().user?.id; @@ -462,14 +476,15 @@ export class MessagePaginator extends BasePaginator { + // Messages are expected in chronological order. We find: + // - lastReadMessageId: newest message with created_at <= lastReadAt + // - firstUnreadMessageId: first message with created_at > lastReadAt + // + // If the page starts after lastReadAt, the entire page is unread and the first message is + // used as unread anchor (legacy "whole channel is unread" behavior for this queried window). + const lastReadTimestamp = lastReadAt.getTime(); + if (!Number.isFinite(lastReadTimestamp) || !messages.length) { + return { firstUnreadMessageId: null, lastReadMessageId: null }; + } + + let firstUnreadMessageId: string | null = null; + let lastReadMessageId: string | null = null; + + for (const message of messages) { + const messageTimestamp = getMessageCreatedAtTimestamp(message); + if (messageTimestamp === null) continue; + + if (messageTimestamp <= lastReadTimestamp) { + lastReadMessageId = message.id; + } else if (!firstUnreadMessageId) { + firstUnreadMessageId = message.id; + } + } + + const firstMessageWithTimestamp = messages.find( + (message) => getMessageCreatedAtTimestamp(message) !== null, + ); + const firstMessageTimestamp = + firstMessageWithTimestamp && + getMessageCreatedAtTimestamp(firstMessageWithTimestamp); + if ( + firstMessageWithTimestamp && + typeof firstMessageTimestamp === 'number' && + lastReadTimestamp < firstMessageTimestamp + ) { + return { + firstUnreadMessageId: firstMessageWithTimestamp.id, + lastReadMessageId, + }; + } + + return { firstUnreadMessageId, lastReadMessageId }; }; emitMessageFocusSignal = ({ diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index 8d096464c0..07247c5682 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -396,6 +396,30 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.messages.length).to.be.equal(0); }); + it('message.truncate clears messagePaginator unread snapshot', function () { + channel.messagePaginator.setUnreadSnapshot({ + firstUnreadMessageId: 'm-1', + lastReadAt: new Date('2021-01-01T00:00:00.000Z'), + lastReadMessageId: 'm-0', + unreadCount: 7, + }); + + channel._handleChannelEvent({ + type: 'channel.truncated', + user: { id: 'id' }, + channel: { + truncated_at: new Date().toISOString(), + }, + }); + + expect(channel.messagePaginator.unreadStateSnapshot.getLatestValue()).toEqual({ + firstUnreadMessageId: null, + lastReadAt: null, + lastReadMessageId: null, + unreadCount: 0, + }); + }); + it('message.truncate removes messages up to specified date', function () { const messages = [ { created_at: '2021-01-01T00:01:00' }, @@ -721,6 +745,12 @@ describe('Channel _handleChannelEvent', function () { expect( channel.messageReceiptsTracker.getUserProgress(user.id)?.lastReadRef.msgId, ).toBe(event.last_read_message_id); + expect(channel.messagePaginator.unreadStateSnapshot.getLatestValue()).toEqual({ + firstUnreadMessageId: event.first_unread_message_id, + lastReadAt: new Date(event.last_read_at), + lastReadMessageId: event.last_read_message_id, + unreadCount: event.unread_messages, + }); }); it('should reconcile tracker with metadata patch for notification.mark_unread', () => { diff --git a/test/unit/pagination/paginators/BasePaginator.test.ts b/test/unit/pagination/paginators/BasePaginator.test.ts index 4e05c82052..ac54c07dd0 100644 --- a/test/unit/pagination/paginators/BasePaginator.test.ts +++ b/test/unit/pagination/paginators/BasePaginator.test.ts @@ -280,6 +280,24 @@ describe('BasePaginator', () => { expect(paginator.mockClientQuery).toHaveBeenCalledTimes(3); }); + it('supports legacy next/prev cursor fields from query response', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + + const nextPromise = paginator.toTail(); + await sleep(0); + + paginator.queryResolve({ + items: [{ id: 'id1' }], + next: 'next1', + prev: 'prev1', + }); + + await nextPromise; + expect(paginator.cursor).toEqual({ tailward: 'next1', headward: 'prev1' }); + expect(paginator.hasMoreTail).toBe(true); + expect(paginator.hasMoreHead).toBe(true); + }); + it('paginates to next pages (offset)', async () => { const paginator = new Paginator({ pageSize: 1 }); let nextPromise = paginator.toTail(); @@ -405,6 +423,88 @@ describe('BasePaginator', () => { vi.useRealTimers(); }); + it('supports legacy pagination aliases', async () => { + const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); + expect(paginator.hasNext).toBe(true); + expect(paginator.hasPrev).toBe(true); + + const nextPromise = paginator.next(); + await sleep(0); + paginator.queryResolve({ + items: [{ id: 'id1' }], + tailward: 'next1', + headward: 'prev1', + }); + await nextPromise; + expect(paginator.mockClientQuery).toHaveBeenNthCalledWith(1, { + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + + const prevPromise = paginator.prev(); + paginator.queryResolve({ + items: [{ id: 'id0' }], + tailward: 'next2', + headward: 'prev0', + }); + await prevPromise; + expect(paginator.mockClientQuery).toHaveBeenNthCalledWith(2, { + direction: 'headward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + }); + + it('supports legacy debounced pagination aliases', async () => { + vi.useFakeTimers(); + try { + const paginator = new Paginator({ + debounceMs: 2000, + initialCursor: ZERO_PAGE_CURSOR, + }); + + paginator.nextDebounced(); + vi.advanceTimersByTime(2000); + await toNextTick(); + paginator.queryResolve({ + items: [{ id: 'id2' }], + tailward: null, + headward: 'prev0', + }); + await paginator.queryPromise; + await toNextTick(); + + paginator.prevDebounced(); + vi.advanceTimersByTime(2000); + await toNextTick(); + paginator.queryResolve({ + items: [{ id: 'id-1' }], + tailward: 'next2', + headward: null, + }); + await paginator.queryPromise; + await toNextTick(); + + expect(paginator.mockClientQuery).toHaveBeenNthCalledWith(1, { + direction: 'tailward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + expect(paginator.mockClientQuery).toHaveBeenNthCalledWith(2, { + direction: 'headward', + queryShape: defaultNextQueryShape, + reset: undefined, + retryCount: 0, + }); + } finally { + vi.useRealTimers(); + } + }); + it('paginates to a previous page (cursor only)', async () => { const paginator = new Paginator({ initialCursor: ZERO_PAGE_CURSOR }); let nextPromise = paginator.toHead(); diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts index 94f6c4cab1..563552e8e8 100644 --- a/test/unit/pagination/paginators/MessagePaginator.test.ts +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -358,6 +358,114 @@ describe('MessagePaginator', () => { expect(ok).toBe(false); expect(jumpSpy).not.toHaveBeenCalled(); }); + + it('falls back to created_at_around query when unread ids are missing and lastReadAt exists', async () => { + const lastReadAt = new Date('2021-01-02T00:00:00.000Z'); + const channelWithReadState = { + cid: 'channel-id', + query: vi.fn(), + state: { + read: { + user1: { + first_unread_message_id: null, + last_read: lastReadAt, + last_read_message_id: null, + }, + }, + }, + getClient: () => ({ + user: { id: 'user1' }, + }), + } as unknown as Channel; + + const paginator = new MessagePaginator({ + channel: channelWithReadState, + itemIndex, + }); + const executeQuerySpy = vi.spyOn(paginator, 'executeQuery').mockResolvedValue({ + stateCandidate: { + items: [ + createMessage({ created_at: '2021-01-01T00:00:00.000Z', id: 'm-read' }), + createMessage({ created_at: '2021-01-03T00:00:00.000Z', id: 'm-unread' }), + ], + }, + targetInterval: null, + }); + const jumpSpy = vi.spyOn(paginator, 'jumpToMessage').mockResolvedValue(true); + + const ok = await paginator.jumpToTheFirstUnreadMessage({ pageSize: 25 }); + + expect(ok).toBe(true); + expect(executeQuerySpy).toHaveBeenCalledWith({ + queryShape: { created_at_around: lastReadAt.toISOString(), limit: 25 }, + updateState: false, + }); + expect(jumpSpy).toHaveBeenCalledWith( + 'm-unread', + expect.objectContaining({ focusReason: 'jump-to-first-unread' }), + ); + expect(paginator.unreadStateSnapshot.getLatestValue()).toEqual({ + firstUnreadMessageId: 'm-unread', + lastReadAt, + lastReadMessageId: 'm-read', + unreadCount: 0, + }); + }); + + it('hydrates firstUnreadMessageId when the queried page starts after lastReadAt', async () => { + const lastReadAt = new Date('2021-01-01T00:00:00.000Z'); + const channelWithReadState = { + cid: 'channel-id', + query: vi.fn(), + state: { + read: { + user1: { + first_unread_message_id: null, + last_read: lastReadAt, + last_read_message_id: null, + }, + }, + }, + getClient: () => ({ + user: { id: 'user1' }, + }), + } as unknown as Channel; + + const paginator = new MessagePaginator({ + channel: channelWithReadState, + itemIndex, + }); + vi.spyOn(paginator, 'executeQuery').mockResolvedValue({ + stateCandidate: { + items: [ + createMessage({ + created_at: '2021-01-02T00:00:00.000Z', + id: 'm-first-unread', + }), + createMessage({ + created_at: '2021-01-03T00:00:00.000Z', + id: 'm-newer-unread', + }), + ], + }, + targetInterval: null, + }); + const jumpSpy = vi.spyOn(paginator, 'jumpToMessage').mockResolvedValue(true); + + const ok = await paginator.jumpToTheFirstUnreadMessage(); + + expect(ok).toBe(true); + expect(jumpSpy).toHaveBeenCalledWith( + 'm-first-unread', + expect.objectContaining({ focusReason: 'jump-to-first-unread' }), + ); + expect(paginator.unreadStateSnapshot.getLatestValue()).toEqual({ + firstUnreadMessageId: 'm-first-unread', + lastReadAt, + lastReadMessageId: null, + unreadCount: 0, + }); + }); }); describe('filterQueryResults()', () => { From ed0ce7157cae861026ce142ae3b9f9552765767b Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 16:06:02 +0100 Subject: [PATCH 25/31] fix: nullify first_unread_message_id on message.read event --- src/channel.ts | 1 + test/unit/channel.test.js | 3 +++ 2 files changed, 4 insertions(+) diff --git a/src/channel.ts b/src/channel.ts index 8173abce78..3d5141da0d 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -2213,6 +2213,7 @@ export class Channel { ? (event.last_read_message_id ?? currentUserReadState?.last_delivered_message_id) : currentUserReadState?.last_delivered_message_id, + first_unread_message_id: undefined, user: eventUser, unread_messages: 0, }; diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index 07247c5682..dd281e94a7 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -804,6 +804,7 @@ describe('Channel _handleChannelEvent', function () { initialReadState = { last_read: new Date(1500).toISOString(), last_read_message_id: '6', + first_unread_message_id: 'first-unread-msg-id', user, unread_messages: initialCountUnread, last_delivered_at: new Date(1000).toISOString(), @@ -835,6 +836,7 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.read[user.id].last_read_message_id).toBe( event.last_read_message_id, ); + expect(channel.state.read[user.id].first_unread_message_id).toBeUndefined(); expect(channel.state.read[user.id].unread_messages).toBe(0); expect(new Date(channel.state.read[user.id].last_delivered_at).getTime()).toBe( new Date(messageReadEvent.created_at).getTime(), @@ -862,6 +864,7 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.read[anotherUser.id].last_read_message_id).toBe( event.last_read_message_id, ); + expect(channel.state.read[anotherUser.id].first_unread_message_id).toBeUndefined(); expect(channel.state.read[anotherUser.id].unread_messages).toBe(0); expect( new Date(channel.state.read[anotherUser.id].last_delivered_at).getTime(), From c972ee930ef77aec18d5dd49f0b15c543ff19ac6 Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 21:45:46 +0100 Subject: [PATCH 26/31] fix: update thread participant counts and reply counts on message.new and message.updated --- src/channel.ts | 2 + src/thread.ts | 65 +++++++++++++++++++++++++++++- test/unit/channel.test.js | 23 +++++++++++ test/unit/threads.test.ts | 84 ++++++++++++++++++++++++++++++++++++++- 4 files changed, 170 insertions(+), 4 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index 3d5141da0d..cb86550e01 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -6,6 +6,7 @@ import type { ReadStoreReconcileMeta } from './messageDelivery'; import { MessagePaginator } from './pagination/paginators'; import { MessageOperations } from './messageOperations'; import { + formatMessage, generateChannelTempCid, logChatPromiseExecution, messageSetPagination, @@ -2388,6 +2389,7 @@ export class Channel { if (event.message) { this._extendEventWithOwnReactions(event); channelState.addMessageSorted(event.message, false, false); + this.messagePaginator.ingestItem(formatMessage(event.message)); channelState._updateQuotedMessageReferences({ message: event.message }); if (event.message.pinned) { channelState.addPinnedMessage(event.message); diff --git a/src/thread.ts b/src/thread.ts index d25328cbe4..9f79009d65 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -236,6 +236,10 @@ export class Thread extends WithSubscriptions { this.messagePaginator = new MessagePaginator({ channel: this.channel, parentMessageId: this.id, + sort: DEFAULT_SORT, + paginatorOptions: { + pageSize: DEFAULT_PAGE_LIMIT, + }, }); this.messageComposer = new MessageComposer({ client, @@ -463,8 +467,12 @@ export class Thread extends WithSubscriptions { } const isOwnMessage = event.message.user?.id === this.client.userID; - const { active, read } = this.state.getLatestValue(); + const { active, read, replies } = this.state.getLatestValue(); + const hasReplyAlready = + replies.some((reply) => reply.id === event.message?.id) || + !!this.messagePaginator.getItem(event.message.id); + this.messagePaginator.ingestItem(formatMessage(event.message)); this.upsertReplyLocally({ message: event.message, // Message from current user could have been added optimistically, @@ -472,6 +480,10 @@ export class Thread extends WithSubscriptions { timestampChanged: isOwnMessage, }); + if (!hasReplyAlready) { + this.incrementReplyCountLocally(); + } + if (active) { this.throttledMarkRead(); } @@ -510,6 +522,21 @@ export class Thread extends WithSubscriptions { this.state.partialNext({ read: nextRead }); }).unsubscribe; + private incrementReplyCountLocally = () => { + this.state.next((current) => { + const nextReplyCount = current.replyCount + 1; + + return { + ...current, + parentMessage: { + ...current.parentMessage, + reply_count: nextReplyCount, + }, + replyCount: nextReplyCount, + }; + }); + }; + private subscribeRepliesRead = () => this.client.on('message.read', (event) => { if (!event.user || !event.created_at || !event.thread) return; @@ -579,6 +606,7 @@ export class Thread extends WithSubscriptions { return symbol; }; + // todo: can be removed with the next breaking change and use MessagePaginator only public deleteReplyLocally = ({ message }: { message: MessageResponse }) => { const { replies } = this.state.getLatestValue(); @@ -602,6 +630,7 @@ export class Thread extends WithSubscriptions { }); }; + // todo: can be removed with the next breaking change and use MessagePaginator only public upsertReplyLocally = ({ message, timestampChanged = false, @@ -629,6 +658,7 @@ export class Thread extends WithSubscriptions { })); }; + // todo: can be removed with the next breaking change and use MessagePaginator only public updateParentMessageLocally = ({ message }: { message: MessageResponse }) => { if (message.id !== this.id) { throw new Error('Message does not belong to this thread'); @@ -641,11 +671,15 @@ export class Thread extends WithSubscriptions { ...current, deletedAt: formattedMessage.deleted_at, parentMessage: formattedMessage, + participants: + normalizeThreadParticipants(message.thread_participants, current.channel.cid) ?? + current.participants, replyCount: message.reply_count ?? current.replyCount, }; }); }; + // todo: can be removed with the next breaking change and use MessagePaginator only public updateParentMessageOrReplyLocally = (message: MessageResponse) => { if (message.parent_id === this.id) { this.upsertReplyLocally({ message }); @@ -738,6 +772,7 @@ export class Thread extends WithSubscriptions { public markAsRead = ({ force = false }: { force?: boolean } = {}) => this.markRead({ force }); + // todo: can be removed with the next breaking change and use MessagePaginator only public queryReplies = ({ limit = DEFAULT_PAGE_LIMIT, sort = DEFAULT_SORT, @@ -745,12 +780,14 @@ export class Thread extends WithSubscriptions { }: QueryRepliesOptions = {}) => this.channel.getReplies(this.id, { limit, ...otherOptions }, sort); + // todo: can be removed with the next breaking change and use MessagePaginator only public loadNextPage = ({ limit = DEFAULT_PAGE_LIMIT }: { limit?: number } = {}) => this.loadPage(limit); + // todo: can be removed with the next breaking change and use MessagePaginator only public loadPrevPage = ({ limit = DEFAULT_PAGE_LIMIT }: { limit?: number } = {}) => this.loadPage(-limit); - + // todo: can be removed with the next breaking change and use MessagePaginator only private loadPage = async (count: number) => { const { pagination } = this.state.getLatestValue(); const [loadingKey, cursorKey, insertionMethodKey] = @@ -802,6 +839,30 @@ export class Thread extends WithSubscriptions { }; } +type MessageThreadParticipant = NonNullable< + MessageResponse['thread_participants'] +>[number]; +type ThreadParticipant = NonNullable[number]; + +const normalizeThreadParticipants = ( + participants: MessageResponse['thread_participants'] | undefined, + channelCid: string, +): ThreadResponse['thread_participants'] | undefined => { + if (!participants) return undefined; + + const nowIso = new Date().toISOString(); + + return participants.map( + (participant: MessageThreadParticipant): ThreadParticipant => ({ + channel_cid: channelCid, + created_at: nowIso, + last_read_at: nowIso, + user: participant, + user_id: participant.id, + }), + ); +}; + const formatReadState = (read: ReadResponse[]): ThreadReadState => read.reduce((state, userRead) => { state[userRead.user.id] = { diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index dd281e94a7..daa9f26cb4 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -373,6 +373,29 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.unreadCount).to.be.equal(30); }); + it('message.updated syncs reply metadata into messagePaginator', function () { + const parentMessage = generateMsg({ + id: 'parent-message-id', + reply_count: 1, + thread_participants: [{ id: 'user-1' }], + }); + + channel.messagePaginator.ingestItem(parentMessage); + + channel._handleChannelEvent({ + type: 'message.updated', + message: { + ...parentMessage, + reply_count: 29, + thread_participants: [{ id: 'user-1' }, { id: 'user-2' }], + }, + }); + + const parentFromPaginator = channel.messagePaginator.getItem(parentMessage.id); + expect(parentFromPaginator?.reply_count).to.be.equal(29); + expect(parentFromPaginator?.thread_participants).to.have.length(2); + }); + it('does not override the delivery information in the read status', () => {}); it('message.truncate removes all messages if "truncated_at" is "now"', function () { diff --git a/test/unit/threads.test.ts b/test/unit/threads.test.ts index 26d9721c5b..f73140a2ed 100644 --- a/test/unit/threads.test.ts +++ b/test/unit/threads.test.ts @@ -99,6 +99,8 @@ describe('Threads 2.0', () => { expect(thread.id).to.equal(parentMessageResponse.id); // @ts-expect-error `name` is a custom property expect(thread.channel.data?.name).to.equal(channelResponse.name); + expect(thread.messagePaginator.sort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.pageSize).to.equal(50); }); it('initializes properly without threadData', () => { @@ -114,6 +116,8 @@ describe('Threads 2.0', () => { expect(state.pagination.prevCursor).to.be.null; expect(state.pagination.nextCursor).to.be.null; expect(state.read).to.have.keys([TEST_USER_ID]); + expect(thread.messagePaginator.sort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.pageSize).to.equal(50); }); it('throws if minimal init parent message id is missing', () => { @@ -236,11 +240,15 @@ describe('Threads 2.0', () => { expect(stateBefore.replyCount).to.equal(0); expect(stateBefore.parentMessage.text).to.equal(parentMessageResponse.text); + const nextParticipants = [ + { id: 'participant-1' }, + ] as unknown as ThreadResponse['thread_participants']; const updatedMessage = generateMsg({ + deleted_at: new Date().toISOString(), id: parentMessageResponse.id, - text: 'aaa', reply_count: 10, - deleted_at: new Date().toISOString(), + text: 'aaa', + thread_participants: nextParticipants, }) as MessageResponse; thread.updateParentMessageLocally({ message: updatedMessage }); @@ -249,6 +257,8 @@ describe('Threads 2.0', () => { expect(stateAfter.deletedAt).to.be.not.null; expect(stateAfter.deletedAt!.toISOString()).to.equal(updatedMessage.deleted_at); expect(stateAfter.replyCount).to.equal(updatedMessage.reply_count); + expect(stateAfter.participants).to.have.lengthOf(1); + expect(stateAfter.participants?.[0].user_id).to.equal('participant-1'); expect(stateAfter.parentMessage.text).to.equal(updatedMessage.text); }); }); @@ -999,6 +1009,76 @@ describe('Threads 2.0', () => { thread.unregisterSubscriptions(); }); + it('increments local reply_count on new reply', () => { + const thread = createTestThread({ + reply_count: 0, + read: [ + { + last_read: new Date().toISOString(), + user: { id: TEST_USER_ID }, + unread_messages: 0, + }, + ], + }); + thread.registerSubscriptions(); + + const newMessage = generateMsg({ + parent_id: thread.id, + user: { id: 'bob' }, + }) as MessageResponse; + + client.dispatchEvent({ + type: 'message.new', + message: newMessage, + user: { id: 'bob' }, + }); + + const stateAfter = thread.state.getLatestValue(); + expect(stateAfter.replyCount).to.equal(1); + expect(stateAfter.parentMessage.reply_count).to.equal(1); + + thread.unregisterSubscriptions(); + }); + + it('does not increment local reply_count for duplicate message.new events', () => { + const existingReply = generateMsg({ + parent_id: parentMessageResponse.id, + user: { id: 'bob' }, + }) as MessageResponse; + const thread = createTestThread({ + latest_replies: [existingReply], + reply_count: 1, + read: [ + { + user: { id: TEST_USER_ID }, + last_read: new Date().toISOString(), + unread_messages: 0, + }, + ], + }); + thread.registerSubscriptions(); + + thread.state.next((current) => ({ + ...current, + parentMessage: { + ...current.parentMessage, + reply_count: 1, + }, + })); + + client.dispatchEvent({ + type: 'message.new', + message: existingReply, + user: { id: 'bob' }, + }); + + const stateAfter = thread.state.getLatestValue(); + expect(stateAfter.replyCount).to.equal(1); + expect(stateAfter.parentMessage.reply_count).to.equal(1); + + thread.unregisterSubscriptions(); + }); + it('handles receiving a reply that was previously optimistically added', () => { const thread = createTestThread({ latest_replies: [generateMsg() as MessageResponse], From 43d365faf5e6c9045e6486ac13d5ba9b5e9767b0 Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 23:07:27 +0100 Subject: [PATCH 27/31] feat: decouple request sort from in-memory item order via BasePaginator itemOrderComparator --- specs/message-paginator/decisions.md | 31 +++++ specs/message-paginator/plan.md | 111 ++++++++++++++++++ specs/message-paginator/spec.md | 29 +++++ specs/message-paginator/state.json | 14 +++ src/pagination/paginators/BasePaginator.ts | 41 ++++--- src/pagination/paginators/MessagePaginator.ts | 59 ++++++++-- src/thread.ts | 4 +- .../paginators/MessagePaginator.test.ts | 73 ++++++++++++ test/unit/threads.test.ts | 4 + 9 files changed, 342 insertions(+), 24 deletions(-) create mode 100644 specs/message-paginator/decisions.md create mode 100644 specs/message-paginator/plan.md create mode 100644 specs/message-paginator/spec.md create mode 100644 specs/message-paginator/state.json diff --git a/specs/message-paginator/decisions.md b/specs/message-paginator/decisions.md new file mode 100644 index 0000000000..47a102cc06 --- /dev/null +++ b/specs/message-paginator/decisions.md @@ -0,0 +1,31 @@ +# Decisions + +## 2026-03-05 - Decouple request sort from in-memory ordering + +- Decision: `MessagePaginator` will keep request `sort` configurable for backend calls, but internal interval/state ordering remains chronological (oldest -> newest). +- Why: Backend sort should not redefine paginator semantics used by Channel/Thread traversal and cursor/head-tail logic. + +## 2026-03-05 - Thread requests newest-first while preserving chronological iteration + +- Decision: `Thread` will request replies using `created_at: -1`, while paginator output remains oldest -> newest. +- Why: This satisfies thread loading expectations without changing consumer iteration assumptions. + +## 2026-03-05 - Do not modify BasePaginator contract + +- Decision: Decoupling will be implemented entirely in `MessagePaginator` via explicit request sort and item order handling. +- Why: `BasePaginator` is used by multiple subclasses and changing its contract would risk cross-paginator regressions. + +## 2026-03-05 - Preserve `sort` option as backward-compatible alias + +- Decision: Keep `MessagePaginatorOptions.sort` working as an alias for request sorting and add explicit `requestSort`. +- Why: Existing integrations may already pass `sort`; alias keeps semver compatibility while making intent explicit. + +## 2026-03-05 - Canonicalize query pages inside MessagePaginator + +- Decision: Normalize queried message pages to canonical chronological order before cursor derivation and interval ingestion. +- Why: `BasePaginator` interval/head-tail semantics in `MessagePaginator` assume chronological item ordering. + +## 2026-03-05 - Additive BasePaginator item-order extension + +- Decision: Add optional `itemOrderComparator` to `BasePaginator` options/config and use it for interval/item ordering, while defaulting to `sortComparator`. +- Why: This keeps backward compatibility (`itemOrder = requestOrder` by default) and lets specific paginators decouple backend request order from in-memory ordering. diff --git a/specs/message-paginator/plan.md b/specs/message-paginator/plan.md new file mode 100644 index 0000000000..f72222e861 --- /dev/null +++ b/specs/message-paginator/plan.md @@ -0,0 +1,111 @@ +# Worktree + +- Path: `/Users/martincupela/Projects/stream/chat/stream-chat-js` +- Branch: `feat/message-paginator` +- Base branch: `master` + +Task plan assumes self-contained tasks; same-file tasks are explicitly chained to avoid overlap. + +## Task 1: Define Decoupled Ordering Contract + +**File(s) to create/modify:** `specs/message-paginator/spec.md`, `specs/message-paginator/decisions.md` + +**Dependencies:** None + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Document that request sort and in-memory paginator order are separate concerns. +- Capture rationale and boundary with `ChannelState.messageSets`. + +**Acceptance Criteria:** + +- [x] Spec states desired behavior and constraints. +- [x] Decision log records why decoupling is required. + +## Task 2: Implement MessagePaginator Decoupling + +**File(s) to create/modify:** `src/pagination/paginators/MessagePaginator.ts`, `src/thread.ts` + +**Dependencies:** Task 1 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Introduce generic item-order comparator support in `BasePaginator`. +- Introduce explicit request sort and item-order semantics in `MessagePaginator` options. +- Keep backend request sort configurable. +- Keep internal paginator comparator/order chronological and independent from request sort. +- Ensure cursor derivation works even if backend returns pages in reverse order. +- Keep `Thread` request sort newest-first and default page size behavior. + +**Acceptance Criteria:** + +- [x] BasePaginator has additive item-order comparator support, defaulting to existing behavior. +- [x] Thread paginator requests `created_at: -1` while `state.items` ordering remains chronological. +- [x] Channel paginator behavior remains unchanged. +- [x] Cursors/head-tail flags remain correct in tests. + +## Task 3: Add Regression Tests + +**File(s) to create/modify:** `test/unit/pagination/paginators/MessagePaginator.test.ts`, `test/unit/threads.test.ts` + +**Dependencies:** Task 2 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Add tests proving request sort does not redefine item iteration order. +- Verify thread defaults (`sort`, page size) and query behavior. +- Run regression tests for other paginators extending `BasePaginator` to confirm compatibility. + +**Acceptance Criteria:** + +- [x] Unit tests fail before implementation and pass after. +- [x] New assertions cover both request call params and returned item ordering. +- [x] Existing tests for other paginator subclasses pass without modifications in their implementations. + +## Task 4: Reflect Results in Ralph State + +**File(s) to create/modify:** `specs/message-paginator/state.json`, `specs/message-paginator/decisions.md`, `specs/message-paginator/plan.md` + +**Dependencies:** Task 3 + +**Status:** done + +**Owner:** codex + +**Scope:** + +- Update task statuses and summary of outcomes. +- Record any follow-up risks. + +**Acceptance Criteria:** + +- [x] state.json mirrors real task status. +- [x] decisions.md has append-only entries for key choices. + +## Execution Order + +1. Phase 1 (serial): Task 1 +2. Phase 2 (serial, same-file dependency): Task 2 +3. Phase 3 (serial, same-file dependency): Task 3 +4. Phase 4 (serial): Task 4 + +## File Ownership Summary + +| Task | Creates/Modifies | +| ------ | --------------------------------------------------------------------------------------------------------------- | +| Task 1 | `specs/message-paginator/spec.md`, `specs/message-paginator/decisions.md` | +| Task 2 | `src/pagination/paginators/MessagePaginator.ts`, `src/thread.ts` | +| Task 3 | `test/unit/pagination/paginators/MessagePaginator.test.ts`, `test/unit/threads.test.ts` | +| Task 4 | `specs/message-paginator/state.json`, `specs/message-paginator/decisions.md`, `specs/message-paginator/plan.md` | diff --git a/specs/message-paginator/spec.md b/specs/message-paginator/spec.md new file mode 100644 index 0000000000..26a1740884 --- /dev/null +++ b/specs/message-paginator/spec.md @@ -0,0 +1,29 @@ +# Message Paginator: Request Sort vs Internal Order + +## Goal + +Decouple backend request sort parameters from in-memory message ordering in `MessagePaginator` using a generic `BasePaginator` ordering extension, so consumers can request newest-first pages while still iterating messages oldest-to-newest. + +## Success Criteria + +- `MessagePaginator` can call backend APIs (`channel.query` / `channel.getReplies`) with configurable `sort` values. +- `BasePaginator` supports a generic item-order comparator that controls interval/item ordering. +- For paginators that do not provide item-order comparator, behavior remains unchanged (item order follows existing request/comparator semantics). +- `MessagePaginator` exposes explicit request sorting configuration separate from item ordering semantics. +- `MessagePaginator.state.items` remain in stable chronological order (oldest -> newest) regardless of request sort. +- Cursor/head-tail semantics remain correct for message pagination after the decoupling. +- `Thread` can request replies with `created_at: -1` without changing paginator output order. +- Unit tests cover the decoupled behavior. + +## Constraints + +- Keep backward compatibility for existing channel-level pagination behavior. +- Preserve existing public exports and avoid breaking API removals. +- Do not rely on `ChannelState.messageSets` for paginator ordering behavior. +- `BasePaginator` extension must be additive and backward compatible. + +## Non-Goals + +- Rewriting legacy `Thread.state.replies` pagination flow. +- Refactoring unrelated paginator types. +- UI-level rendering changes. diff --git a/specs/message-paginator/state.json b/specs/message-paginator/state.json new file mode 100644 index 0000000000..abfb1bda7c --- /dev/null +++ b/specs/message-paginator/state.json @@ -0,0 +1,14 @@ +{ + "active_task": null, + "tasks": { + "Task 1": "done", + "Task 2": "done", + "Task 3": "done", + "Task 4": "done" + }, + "flags": { + "blocked": false, + "needs-review": false + }, + "last_updated": "2026-03-05" +} diff --git a/src/pagination/paginators/BasePaginator.ts b/src/pagination/paginators/BasePaginator.ts index 658a6b4a0f..52bdf89a76 100644 --- a/src/pagination/paginators/BasePaginator.ts +++ b/src/pagination/paginators/BasePaginator.ts @@ -302,6 +302,11 @@ export type PaginatorOptions = { initialOffset?: number; /** If item index is provided, this index ensures updates in a single place and all consumers have access to a single source of data. */ itemIndex?: ItemIndex; + /** + * Comparator defining in-memory item ordering for interval math and visible list rendering. + * Defaults to `sortComparator` to preserve existing paginator behavior. + */ + itemOrderComparator?: (a: T, b: T) => number; /** * Will prevent changing the index of existing items in state. * If true, an item that is already visible keeps its relative position in the current items array when updated. @@ -320,6 +325,7 @@ type OptionalPaginatorConfigFields = | 'initialCursor' | 'initialOffset' | 'itemIndex' + | 'itemOrderComparator' | 'throwErrors'; export type BasePaginatorConfig = Pick< @@ -537,6 +543,10 @@ export abstract class BasePaginator { return this.boostComparator; } + protected get itemOrderComparator() { + return this.config.itemOrderComparator ?? this.sortComparator; + } + get intervalComparator() { return (a: AnyInterval, b: AnyInterval) => { const aEdges = this.getIntervalPaginationEdges(a); @@ -657,7 +667,7 @@ export abstract class BasePaginator { const seqDistance = (boostB.seq ?? 0) - (boostA.seq ?? 0); if (seqDistance !== 0) return seqDistance > 0 ? 1 : -1; } - return this.sortComparator(a, b); + return this.itemOrderComparator(a, b); }; /** @@ -719,7 +729,7 @@ export abstract class BasePaginator { } makeInterval({ page, isHead, isTail }: MakeIntervalParams): Interval { - const sorted = [...page].sort((a, b) => this.sortComparator(a, b)); + const sorted = [...page].sort((a, b) => this.itemOrderComparator(a, b)); return { id: this.generateIntervalId(page), // Default semantics: @@ -815,20 +825,20 @@ export abstract class BasePaginator { } protected compareIntervalHeadEdges(a: T, b: T): number { - const cmp = this.sortComparator(a, b); + const cmp = this.itemOrderComparator(a, b); return this.intervalSortDirection === 'asc' ? cmp : -cmp; } protected aIsMoreHeadwardThanB(a: T, b: T): boolean { return this.intervalItemIdsAreHeadFirst - ? this.sortComparator(a, b) === ComparisonResult.A_PRECEDES_B - : this.sortComparator(b, a) === ComparisonResult.A_PRECEDES_B; + ? this.itemOrderComparator(a, b) === ComparisonResult.A_PRECEDES_B + : this.itemOrderComparator(b, a) === ComparisonResult.A_PRECEDES_B; } protected aIsMoreTailwardThanB(a: T, b: T): boolean { return this.intervalItemIdsAreHeadFirst - ? this.sortComparator(b, a) === ComparisonResult.A_PRECEDES_B - : this.sortComparator(a, b) === ComparisonResult.A_PRECEDES_B; + ? this.itemOrderComparator(b, a) === ComparisonResult.A_PRECEDES_B + : this.itemOrderComparator(a, b) === ComparisonResult.A_PRECEDES_B; } protected getHeadIntervalFromSortedIntervals( @@ -874,8 +884,8 @@ export abstract class BasePaginator { const bBounds = this.getIntervalSortBounds(b); if (!aBounds || !bBounds) return false; return ( - this.sortComparator(aBounds.start, bBounds.end) <= 0 && - this.sortComparator(bBounds.start, aBounds.end) <= 0 + this.itemOrderComparator(aBounds.start, bBounds.end) <= 0 && + this.itemOrderComparator(bBounds.start, aBounds.end) <= 0 ); } @@ -904,8 +914,8 @@ export abstract class BasePaginator { // Strict overlap if: // a.first <= b.last && b.first <= a.last if ( - this.sortComparator(aBounds.start, bBounds.end) <= 0 && - this.sortComparator(bBounds.start, aBounds.end) <= 0 + this.itemOrderComparator(aBounds.start, bBounds.end) <= 0 && + this.itemOrderComparator(bBounds.start, aBounds.end) <= 0 ) return true; @@ -936,7 +946,10 @@ export abstract class BasePaginator { const sortBounds = this.getIntervalSortBounds(interval); if (!sortBounds) return false; const { start, end } = sortBounds; - if (this.sortComparator(start, item) <= 0 && this.sortComparator(item, end) <= 0) + if ( + this.itemOrderComparator(start, item) <= 0 && + this.itemOrderComparator(item, end) <= 0 + ) return true; const edges = this.getIntervalPaginationEdges(interval); @@ -973,7 +986,7 @@ export abstract class BasePaginator { itemIdentityEquals: (item1, item2) => this.getItemId(item1) === this.getItemId(item2), // inter-interval operation sorts using the base comparator - compare: this.sortComparator.bind(this), + compare: this.itemOrderComparator.bind(this), }); if (insertionIndex > -1) { merged.splice(insertionIndex, 0, item); @@ -1086,7 +1099,7 @@ export abstract class BasePaginator { itemIdentityEquals: (item1, item2) => this.getItemId(item1) === this.getItemId(item2), // items in intervals are not sorted by effectiveComparator - compare: this.sortComparator.bind(this), + compare: this.itemOrderComparator.bind(this), plateauScan: true, }); } diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index a3665f5e20..10f0b4a955 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -113,6 +113,19 @@ export type MessagePaginatorOptions = { id?: string; itemIndex?: ItemIndex; parentMessageId?: string; + /** + * Sort passed to backend message/replies query. + * Does not affect in-memory item ordering. + */ + requestSort?: MessagePaginatorSort; + /** + * @deprecated Use `requestSort` instead. + */ + sort?: MessagePaginatorSort; + /** + * In-memory ordering for items exposed by paginator state. + */ + itemOrder?: MessagePaginatorSort; paginatorOptions?: PaginatorOptions; /** * Controls whether `jumpToTheFirstUnreadMessage()` should prefer the `unreadStateSnapshot` @@ -142,8 +155,8 @@ export type UnreadSnapshotState = { }; /** - * MessagePaginator does not allow for sorting or filtering the items, because it is based on channe.query() and - * not client.search() calls. So the paginator just updates the cursor. + * MessagePaginator allows configuring backend request sort, while keeping internal item ordering stable. + * Filtering of ingested items is still limited to local predicates (`filterQueryResults`). */ export class MessagePaginator extends BasePaginator { private readonly _id: string; @@ -158,7 +171,8 @@ export class MessagePaginator extends BasePaginator; private clearMessageFocusSignalTimeoutId: ReturnType | null = null; private messageFocusSignalToken = 0; - protected _sort = DEFAULT_BACKEND_SORT; + protected _requestSort = DEFAULT_BACKEND_SORT; + protected _itemOrder: MessagePaginatorSort = DEFAULT_BACKEND_SORT; protected _nextQueryShape: MessageQueryShape | undefined; sortComparator: (a: LocalMessage, b: LocalMessage) => number; /** @@ -186,9 +200,14 @@ export class MessagePaginator extends BasePaginator item.id }), parentMessageId, + requestSort, + sort, + itemOrder, paginatorOptions, unreadReferencePolicy = 'snapshot', }: MessagePaginatorOptions) { + const resolvedRequestSort = requestSort ?? sort ?? DEFAULT_BACKEND_SORT; + const resolvedItemOrder = itemOrder ?? resolvedRequestSort; super({ hasPaginationQueryShapeChanged, initialCursor: ZERO_PAGE_CURSOR, @@ -200,7 +219,8 @@ export class MessagePaginator extends BasePaginator({ lastReadAt: null, @@ -212,7 +232,16 @@ export class MessagePaginator extends BasePaginator({ - sort: this._sort, + sort: this._requestSort, + resolvePathValue: resolveDotPathValue, + tiebreaker: (l, r) => { + const leftId = this.getItemId(l); + const rightId = this.getItemId(r); + return leftId < rightId ? -1 : leftId > rightId ? 1 : 0; + }, + }); + this.config.itemOrderComparator = makeComparator({ + sort: this._itemOrder, resolvePathValue: resolveDotPathValue, tiebreaker: (l, r) => { const leftId = this.getItemId(l); @@ -228,7 +257,15 @@ export class MessagePaginator extends BasePaginator items.filter(this.shouldIncludeMessageInInterval.bind(this)); + + private getCanonicalQueryItems(items: LocalMessage[]): LocalMessage[] { + return [...items].sort(this.itemOrderComparator); + } } const makeDeriveCursor = diff --git a/src/thread.ts b/src/thread.ts index 9f79009d65..54b6d25970 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -74,6 +74,7 @@ export type ThreadReadState = Record; const DEFAULT_PAGE_LIMIT = 50; const DEFAULT_SORT: { created_at: AscDesc }[] = [{ created_at: -1 }]; +const DEFAULT_ITEM_ORDER: { created_at: AscDesc } = { created_at: 1 }; // TODO: remove this once we move to API v2 export const THREAD_RESPONSE_RESERVED_KEYS: Record = { active_participant_count: true, @@ -236,7 +237,8 @@ export class Thread extends WithSubscriptions { this.messagePaginator = new MessagePaginator({ channel: this.channel, parentMessageId: this.id, - sort: DEFAULT_SORT, + requestSort: DEFAULT_SORT, + itemOrder: DEFAULT_ITEM_ORDER, paginatorOptions: { pageSize: DEFAULT_PAGE_LIMIT, }, diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts index 563552e8e8..fc54efbebe 100644 --- a/test/unit/pagination/paginators/MessagePaginator.test.ts +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -83,6 +83,44 @@ describe('MessagePaginator', () => { expect(paginator.sort).toEqual({ created_at: 1 }); expect(paginator.config.doRequest).toBe(doRequest); }); + + it('respects provided sort option', () => { + const paginator = new MessagePaginator({ + channel, + sort: [{ created_at: -1 }], + }); + + expect(paginator.sort).toEqual([{ created_at: -1 }]); + expect(paginator.requestSort).toEqual([{ created_at: -1 }]); + expect(paginator.itemOrder).toEqual([{ created_at: -1 }]); + + const newer = createMessage({ id: 'b', created_at: '2021-01-01T00:00:00.000Z' }); + const older = createMessage({ id: 'a', created_at: '2020-01-01T00:00:00.000Z' }); + expect(paginator.sortComparator(older, newer)).toBeGreaterThan(0); + }); + + it('prefers requestSort over deprecated sort alias', () => { + const paginator = new MessagePaginator({ + channel, + requestSort: [{ created_at: 1 }], + sort: [{ created_at: -1 }], + }); + + expect(paginator.requestSort).toEqual([{ created_at: 1 }]); + expect(paginator.sort).toEqual([{ created_at: 1 }]); + expect(paginator.itemOrder).toEqual([{ created_at: 1 }]); + }); + + it('uses itemOrder when provided to decouple in-memory order from request sort', () => { + const paginator = new MessagePaginator({ + channel, + requestSort: [{ created_at: -1 }], + itemOrder: [{ created_at: 1 }], + }); + + expect(paginator.requestSort).toEqual([{ created_at: -1 }]); + expect(paginator.itemOrder).toEqual([{ created_at: 1 }]); + }); }); describe('query shape handling', () => { @@ -218,6 +256,41 @@ describe('MessagePaginator', () => { expect(result.items[0].created_at).toBeInstanceOf(Date); expect(result.items[1].created_at).toBeInstanceOf(Date); }); + + it('keeps items ordered chronologically when itemOrder is ascending and request sort is descending', async () => { + const messages = [ + { id: 'newest-reply', created_at: '2022-01-03T00:00:00.000Z' }, + { id: 'middle-reply', created_at: '2022-01-02T00:00:00.000Z' }, + { id: 'oldest-reply', created_at: '2022-01-01T00:00:00.000Z' }, + ]; + (channel.getReplies as unknown as ReturnType).mockResolvedValue({ + messages, + }); + const paginator = new MessagePaginator({ + channel, + itemIndex, + parentMessageId: 'parent-1', + requestSort: [{ created_at: -1 }], + itemOrder: [{ created_at: 1 }], + }); + // @ts-expect-error setting protected field for test coverage + paginator._nextQueryShape = { id_gt: 'from-cursor', limit: 30 }; + + const result = await paginator.query({}); + + expect(channel.getReplies).toHaveBeenCalledWith( + 'parent-1', + { id_gt: 'from-cursor', limit: 30 }, + [{ created_at: -1 }], + ); + expect(result.items.map((message) => message.id)).toEqual([ + 'oldest-reply', + 'middle-reply', + 'newest-reply', + ]); + expect(result.tailward).toBe('oldest-reply'); + expect(result.headward).toBe('newest-reply'); + }); }); describe('jumpToMessage()', () => { diff --git a/test/unit/threads.test.ts b/test/unit/threads.test.ts index f73140a2ed..13e39ee250 100644 --- a/test/unit/threads.test.ts +++ b/test/unit/threads.test.ts @@ -100,6 +100,8 @@ describe('Threads 2.0', () => { // @ts-expect-error `name` is a custom property expect(thread.channel.data?.name).to.equal(channelResponse.name); expect(thread.messagePaginator.sort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.requestSort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.itemOrder).to.deep.equal({ created_at: 1 }); expect(thread.messagePaginator.pageSize).to.equal(50); }); @@ -117,6 +119,8 @@ describe('Threads 2.0', () => { expect(state.pagination.nextCursor).to.be.null; expect(state.read).to.have.keys([TEST_USER_ID]); expect(thread.messagePaginator.sort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.requestSort).to.deep.equal([{ created_at: -1 }]); + expect(thread.messagePaginator.itemOrder).to.deep.equal({ created_at: 1 }); expect(thread.messagePaginator.pageSize).to.equal(50); }); From 24441990b7588e03b73fa8e847c0caf8d18325ac Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 23:08:41 +0100 Subject: [PATCH 28/31] fix: make Channel's MessagePaginator ingest message on message.new --- src/channel.ts | 2 ++ test/unit/channel.test.js | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/src/channel.ts b/src/channel.ts index cb86550e01..9bc9483c02 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -2329,6 +2329,8 @@ export class Channel { channelState.addPinnedMessage(event.message); } + this.messagePaginator.ingestItem(formatMessage(event.message)); + // do not increase the unread count - the back-end does not increase the count neither in the following cases: // 1. the message is mine // 2. the message is a thread reply from any user diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index daa9f26cb4..30c635055f 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -335,6 +335,18 @@ describe('Channel _handleChannelEvent', function () { expect(channel.state.unreadCount).to.be.equal(100); }); + it('message.new ingests message into messagePaginator even for own messages', function () { + const message = generateMsg({ id: 'own-message-id', user }); + + channel._handleChannelEvent({ + type: 'message.new', + user, + message, + }); + + expect(channel.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + }); + it('message.new increment unreadCount properly', function () { channel.state.unreadCount = 20; channel._handleChannelEvent({ From a97105629cb276190f27d4b9701d8839238a1f0a Mon Sep 17 00:00:00 2001 From: martincupela Date: Thu, 5 Mar 2026 23:32:40 +0100 Subject: [PATCH 29/31] fix: make Channel's MessagePaginator react to channel.truncated, user.messages.deleted and message.deleted events --- src/channel.ts | 25 ++-- src/pagination/paginators/MessagePaginator.ts | 52 +++++++- test/unit/channel.test.js | 113 ++++++++++++++++++ .../paginators/MessagePaginator.test.ts | 79 ++++++++++++ 4 files changed, 260 insertions(+), 9 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index 9bc9483c02..b2ef0e9107 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -2294,8 +2294,13 @@ export class Channel { case 'message.deleted': if (event.message) { this._extendEventWithOwnReactions(event); - if (event.hard_delete) channelState.removeMessage(event.message); - else channelState.addMessageSorted(event.message, false, false); + if (event.hard_delete) { + channelState.removeMessage(event.message); + this.messagePaginator.removeItem({ id: event.message.id }); + } else { + channelState.addMessageSorted(event.message, false, false); + this.messagePaginator.ingestItem(formatMessage(event.message)); + } channelState.removeQuotedMessageReferences(event.message); @@ -2306,11 +2311,15 @@ export class Channel { break; case 'user.messages.deleted': if (event.user) { - this.state.deleteUserMessages( - event.user, - !!event.hard_delete, - new Date(event.created_at ?? Date.now()), - ); + const deletedAt = new Date(event.created_at ?? Date.now()); + const hardDelete = !!event.hard_delete; + this.messagePaginator.applyMessageDeletionForUser({ + userId: event.user.id, + hardDelete, + deletedAt, + }); + + this.state.deleteUserMessages(event.user, hardDelete, deletedAt); } break; case 'message.new': @@ -2431,7 +2440,7 @@ export class Channel { } } - this.messagePaginator.clearUnreadSnapshot(); + this.messagePaginator.clearStateAndCache(); break; case 'member.added': diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index 10f0b4a955..87055df480 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -26,7 +26,7 @@ import type { } from '../../types'; import type { Channel } from '../../channel'; import { StateStore } from '../../store'; -import { formatMessage, generateUUIDv4 } from '../../utils'; +import { formatMessage, generateUUIDv4, toDeletedMessage } from '../../utils'; import { makeComparator } from '../sortCompiler'; import type { FieldToDataResolver } from '../types.normalization'; import { resolveDotPathValue } from '../utility.normalization'; @@ -692,6 +692,56 @@ export class MessagePaginator extends BasePaginator { + this.resetState(); + this._itemIndex.clear(); + this.clearUnreadSnapshot(); + this.clearMessageFocusSignal(); + }; + + applyMessageDeletionForUser = ({ + userId, + hardDelete = false, + deletedAt, + }: { + userId: string; + hardDelete?: boolean; + deletedAt: Date; + }) => { + const loadedMessages = this.items ?? []; + + for (const message of loadedMessages) { + if (message.user?.id === userId) { + if (hardDelete) { + this.removeItem({ id: message.id }); + } else { + this.ingestItem( + toDeletedMessage({ + message, + hardDelete, + deletedAt, + }) as LocalMessage, + ); + } + continue; + } + + if ( + message.quoted_message?.user?.id === userId && + message.quoted_message.type !== 'deleted' + ) { + this.ingestItem({ + ...message, + quoted_message: toDeletedMessage({ + message: message.quoted_message, + hardDelete, + deletedAt, + }) as LocalMessage, + }); + } + } + }; + filterQueryResults = (items: LocalMessage[]) => items.filter(this.shouldIncludeMessageInInterval.bind(this)); diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index 30c635055f..492df3bf5a 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -432,6 +432,12 @@ describe('Channel _handleChannelEvent', function () { }); it('message.truncate clears messagePaginator unread snapshot', function () { + const cachedMessage = generateMsg({ id: 'truncate-cached-message-id' }); + channel.messagePaginator.setItems({ + valueOrFactory: [cachedMessage], + isFirstPage: true, + isLastPage: true, + }); channel.messagePaginator.setUnreadSnapshot({ firstUnreadMessageId: 'm-1', lastReadAt: new Date('2021-01-01T00:00:00.000Z'), @@ -453,6 +459,8 @@ describe('Channel _handleChannelEvent', function () { lastReadMessageId: null, unreadCount: 0, }); + expect(channel.messagePaginator.items).toBeUndefined(); + expect(channel.messagePaginator.getItem(cachedMessage.id)).toBeUndefined(); }); it('message.truncate removes messages up to specified date', function () { @@ -540,6 +548,38 @@ describe('Channel _handleChannelEvent', function () { ).to.be.ok; }); + it('message.deleted hard delete removes message from messagePaginator', function () { + const message = generateMsg({ id: 'hard-delete-message-id', silent: true }); + channel.messagePaginator.ingestItem(message); + expect(channel.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + + channel._handleChannelEvent({ + type: 'message.deleted', + user: { id: 'id' }, + hard_delete: true, + message, + }); + + expect( + channel.messagePaginator.items?.find((m) => m.id === message.id), + ).toBeUndefined(); + }); + + it('message.deleted soft delete updates message in messagePaginator', function () { + const message = generateMsg({ id: 'soft-delete-message-id', text: 'before delete' }); + channel.messagePaginator.ingestItem(message); + + const deletedAt = new Date().toISOString(); + channel._handleChannelEvent({ + type: 'message.deleted', + user: { id: 'id' }, + message: { ...message, deleted_at: deletedAt }, + }); + + const itemFromPaginator = channel.messagePaginator.getItem(message.id); + expect(itemFromPaginator?.deleted_at?.toISOString()).to.equal(deletedAt); + }); + describe('user.messages.deleted', () => { const bannedUser = { id: 'banned-user' }; const otherUser = { id: 'other-user' }; @@ -718,6 +758,79 @@ describe('Channel _handleChannelEvent', function () { channel.state.pinnedMessages.forEach(check); Object.values(channel.state.threads).forEach((replies) => replies.forEach(check)); }); + + it('updates messagePaginator items on soft delete', () => { + const deletedAt = new Date('2025-02-01T14:01:30.000Z'); + const bannedMessage = generateMsg({ id: 'mp-soft-banned', user: bannedUser }); + const quoteCarrier = generateMsg({ + id: 'mp-soft-quote-carrier', + quoted_message: bannedMessage, + quoted_message_id: bannedMessage.id, + user: otherUser, + }); + channel.messagePaginator.setItems({ + valueOrFactory: [bannedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + channel._handleChannelEvent({ + type: 'user.messages.deleted', + cid: channel.cid, + channel_type: channel.type, + channel_id: channel.id, + user: bannedUser, + soft_delete: true, + created_at: deletedAt.toISOString(), + }); + + const deletedFromPaginator = channel.messagePaginator.getItem(bannedMessage.id); + expect(deletedFromPaginator?.type).to.equal('deleted'); + expect(deletedFromPaginator?.deleted_at?.toISOString()).to.equal( + deletedAt.toISOString(), + ); + + const quoteCarrierFromPaginator = channel.messagePaginator.getItem(quoteCarrier.id); + expect(quoteCarrierFromPaginator?.quoted_message?.type).to.equal('deleted'); + expect( + quoteCarrierFromPaginator?.quoted_message?.deleted_at?.toISOString(), + ).to.equal(deletedAt.toISOString()); + }); + + it('updates messagePaginator items on hard delete', () => { + const deletedAt = new Date('2025-02-01T14:01:30.000Z'); + const bannedMessage = generateMsg({ id: 'mp-hard-banned', user: bannedUser }); + const quoteCarrier = generateMsg({ + id: 'mp-hard-quote-carrier', + quoted_message: bannedMessage, + quoted_message_id: bannedMessage.id, + user: otherUser, + }); + channel.messagePaginator.setItems({ + valueOrFactory: [bannedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + channel._handleChannelEvent({ + type: 'user.messages.deleted', + cid: channel.cid, + channel_type: channel.type, + channel_id: channel.id, + user: bannedUser, + hard_delete: true, + created_at: deletedAt.toISOString(), + }); + + expect( + channel.messagePaginator.items?.find((m) => m.id === bannedMessage.id), + ).toBeUndefined(); + const quoteCarrierFromPaginator = channel.messagePaginator.getItem(quoteCarrier.id); + expect(quoteCarrierFromPaginator?.quoted_message?.type).to.equal('deleted'); + expect( + quoteCarrierFromPaginator?.quoted_message?.deleted_at?.toISOString(), + ).to.equal(deletedAt.toISOString()); + }); }); describe('notification.mark_unread', () => { diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts index fc54efbebe..a79b96de96 100644 --- a/test/unit/pagination/paginators/MessagePaginator.test.ts +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -579,6 +579,85 @@ describe('MessagePaginator', () => { }); }); + describe('applyMessageDeletionForUser()', () => { + it('soft deletes user messages and quoted messages in paginator items', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const deletedAt = new Date('2025-02-01T14:01:30.000Z'); + + const bannedUser = { id: 'banned-user' }; + const otherUser = { id: 'other-user' }; + const bannedMessage = createMessage({ id: 'banned-message', user: bannedUser }); + const quoteCarrier = createMessage({ + id: 'quote-carrier', + quoted_message: bannedMessage, + quoted_message_id: bannedMessage.id, + user: otherUser, + }); + + paginator.setItems({ + valueOrFactory: [bannedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + paginator.applyMessageDeletionForUser({ + userId: bannedUser.id, + hardDelete: false, + deletedAt, + }); + + const deletedFromPaginator = paginator.getItem(bannedMessage.id); + expect(deletedFromPaginator?.type).toBe('deleted'); + expect(deletedFromPaginator?.deleted_at?.toISOString()).toBe( + deletedAt.toISOString(), + ); + + const quoteCarrierFromPaginator = paginator.getItem(quoteCarrier.id); + expect(quoteCarrierFromPaginator?.quoted_message?.type).toBe('deleted'); + expect(quoteCarrierFromPaginator?.quoted_message?.deleted_at?.toISOString()).toBe( + deletedAt.toISOString(), + ); + }); + + it('hard deletes user messages and marks quoted messages as deleted', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const deletedAt = new Date('2025-02-01T14:01:30.000Z'); + + const bannedUser = { id: 'banned-user' }; + const otherUser = { id: 'other-user' }; + const bannedMessage = createMessage({ + id: 'banned-message-hard', + user: bannedUser, + }); + const quoteCarrier = createMessage({ + id: 'quote-carrier-hard', + quoted_message: bannedMessage, + quoted_message_id: bannedMessage.id, + user: otherUser, + }); + + paginator.setItems({ + valueOrFactory: [bannedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + paginator.applyMessageDeletionForUser({ + userId: bannedUser.id, + hardDelete: true, + deletedAt, + }); + + expect(paginator.items?.find((m) => m.id === bannedMessage.id)).toBeUndefined(); + + const quoteCarrierFromPaginator = paginator.getItem(quoteCarrier.id); + expect(quoteCarrierFromPaginator?.quoted_message?.type).toBe('deleted'); + expect(quoteCarrierFromPaginator?.quoted_message?.deleted_at?.toISOString()).toBe( + deletedAt.toISOString(), + ); + }); + }); + describe.todo('postQueryReconcile and deriveCursor for', () => {}); describe('linear pagination', () => { describe('updates the hasMoreTail flag only if the first message on page is the first message in interval', () => { From 08d207324cadefe91d7a0ea72416d9cc5a0babeb Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 6 Mar 2026 00:43:07 +0100 Subject: [PATCH 30/31] fix: reflect quoted message update among MessagePaginator items and reflect reaction events --- src/channel.ts | 23 +- src/pagination/paginators/MessagePaginator.ts | 20 ++ src/thread.ts | 15 ++ test/unit/channel.test.js | 224 ++++++++++++++++++ .../paginators/MessagePaginator.test.ts | 36 +++ test/unit/threads.test.ts | 207 +++++++++++++++- 6 files changed, 521 insertions(+), 4 deletions(-) diff --git a/src/channel.ts b/src/channel.ts index b2ef0e9107..029d8f89cc 100644 --- a/src/channel.ts +++ b/src/channel.ts @@ -2294,13 +2294,15 @@ export class Channel { case 'message.deleted': if (event.message) { this._extendEventWithOwnReactions(event); + const formattedMessage = formatMessage(event.message); if (event.hard_delete) { channelState.removeMessage(event.message); this.messagePaginator.removeItem({ id: event.message.id }); } else { channelState.addMessageSorted(event.message, false, false); - this.messagePaginator.ingestItem(formatMessage(event.message)); + this.messagePaginator.ingestItem(formattedMessage); } + this.messagePaginator.reflectQuotedMessageUpdate(formattedMessage); channelState.removeQuotedMessageReferences(event.message); @@ -2338,7 +2340,9 @@ export class Channel { channelState.addPinnedMessage(event.message); } - this.messagePaginator.ingestItem(formatMessage(event.message)); + if (!isThreadMessage) { + this.messagePaginator.ingestItem(formatMessage(event.message)); + } // do not increase the unread count - the back-end does not increase the count neither in the following cases: // 1. the message is mine @@ -2399,8 +2403,12 @@ export class Channel { case 'message.undeleted': if (event.message) { this._extendEventWithOwnReactions(event); + const formattedMessage = formatMessage(event.message); channelState.addMessageSorted(event.message, false, false); - this.messagePaginator.ingestItem(formatMessage(event.message)); + if (!event.message.parent_id) { + this.messagePaginator.ingestItem(formattedMessage); + this.messagePaginator.reflectQuotedMessageUpdate(formattedMessage); + } channelState._updateQuotedMessageReferences({ message: event.message }); if (event.message.pinned) { channelState.addPinnedMessage(event.message); @@ -2553,12 +2561,18 @@ export class Channel { if (event.message && event.reaction) { const { message, reaction } = event; event.message = channelState.addReaction(reaction, message) as MessageResponse; + if (!event.message?.parent_id) { + this.messagePaginator.ingestItem(formatMessage(event.message)); + } } break; case 'reaction.deleted': if (event.message && event.reaction) { const { message, reaction } = event; event.message = channelState.removeReaction(reaction, message); + if (event.message && !event.message.parent_id) { + this.messagePaginator.ingestItem(formatMessage(event.message)); + } } break; case 'reaction.updated': @@ -2570,6 +2584,9 @@ export class Channel { message, true, ) as MessageResponse; + if (!event.message?.parent_id) { + this.messagePaginator.ingestItem(formatMessage(event.message)); + } } break; case 'channel.hidden': { diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index 87055df480..12f3b4be5e 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -742,6 +742,26 @@ export class MessagePaginator extends BasePaginator { + const cachedMessages = this._itemIndex.values(); + + for (const cachedMessage of cachedMessages) { + if (cachedMessage.quoted_message_id !== message.id) continue; + + this.ingestItem({ + ...cachedMessage, + quoted_message: message, + }); + } + }; + filterQueryResults = (items: LocalMessage[]) => items.filter(this.shouldIncludeMessageInInterval.bind(this)); diff --git a/src/thread.ts b/src/thread.ts index 54b6d25970..d9ef2286c6 100644 --- a/src/thread.ts +++ b/src/thread.ts @@ -565,6 +565,7 @@ export class Thread extends WithSubscriptions { private subscribeMessageDeleted = () => this.client.on('message.deleted', (event) => { if (!event.message) return; + const formattedMessage = formatMessage(event.message); // Deleted message is a reply of this thread if (event.message.parent_id === this.id) { @@ -580,11 +581,14 @@ export class Thread extends WithSubscriptions { if (event.message.id === this.id) { this.updateParentMessageLocally({ message: event.message }); } + + this.messagePaginator.reflectQuotedMessageUpdate(formattedMessage); }).unsubscribe; private subscribeMessageUpdated = () => { const eventTypes: EventTypes[] = [ 'message.updated', + 'message.undeleted', 'reaction.new', 'reaction.deleted', 'reaction.updated', @@ -595,6 +599,17 @@ export class Thread extends WithSubscriptions { this.client.on(eventType, (event) => { if (event.message) { this.updateParentMessageOrReplyLocally(event.message); + if ( + ['reaction.new', 'reaction.deleted', 'reaction.updated'].includes( + eventType, + ) && + event.message.parent_id === this.id + ) { + this.messagePaginator.ingestItem(formatMessage(event.message)); + } + this.messagePaginator.reflectQuotedMessageUpdate( + formatMessage(event.message), + ); } }).unsubscribe, ); diff --git a/test/unit/channel.test.js b/test/unit/channel.test.js index 492df3bf5a..e1a54d2b1b 100644 --- a/test/unit/channel.test.js +++ b/test/unit/channel.test.js @@ -347,6 +347,22 @@ describe('Channel _handleChannelEvent', function () { expect(channel.messagePaginator.getItem(message.id)?.id).to.equal(message.id); }); + it('message.new ignores thread replies in messagePaginator', function () { + const message = generateMsg({ + id: 'thread-reply-message-id', + parent_id: 'parent-message-id', + user: { id: 'another-user' }, + }); + + channel._handleChannelEvent({ + type: 'message.new', + user: message.user, + message, + }); + + expect(channel.messagePaginator.getItem(message.id)).to.be.undefined; + }); + it('message.new increment unreadCount properly', function () { channel.state.unreadCount = 20; channel._handleChannelEvent({ @@ -408,6 +424,105 @@ describe('Channel _handleChannelEvent', function () { expect(parentFromPaginator?.thread_participants).to.have.length(2); }); + it('message.updated ignores thread replies in messagePaginator', function () { + const parentMessage = generateMsg({ id: 'thread-parent-id' }); + const threadReply = generateMsg({ + id: 'thread-reply-id', + parent_id: parentMessage.id, + text: 'before update', + }); + + channel.messagePaginator.ingestItem(parentMessage); + channel._handleChannelEvent({ + type: 'message.updated', + message: { ...threadReply, text: 'after update' }, + }); + + expect(channel.messagePaginator.getItem(threadReply.id)).to.be.undefined; + }); + + it('message.updated syncs quoted_message references in messagePaginator', function () { + const quotedMessage = generateMsg({ + id: 'quoted-message-id', + text: 'before update', + }); + const quoteCarrier = generateMsg({ + id: 'quote-carrier-id', + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }); + + channel.messagePaginator.setItems({ + valueOrFactory: [quotedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + channel._handleChannelEvent({ + type: 'message.updated', + message: { + ...quotedMessage, + text: 'after update', + }, + }); + + expect( + channel.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.text, + ).to.equal('after update'); + }); + + it('message.undeleted ignores thread replies in messagePaginator', function () { + const parentMessage = generateMsg({ id: 'thread-parent-id-2' }); + const threadReply = generateMsg({ + id: 'thread-reply-id-2', + parent_id: parentMessage.id, + text: 'undeleted reply', + }); + + channel.messagePaginator.ingestItem(parentMessage); + channel._handleChannelEvent({ + type: 'message.undeleted', + message: threadReply, + }); + + expect(channel.messagePaginator.getItem(threadReply.id)).to.be.undefined; + }); + + it('message.undeleted syncs quoted_message references in messagePaginator', function () { + const quotedMessage = generateMsg({ + id: 'quoted-message-id-undeleted', + type: 'deleted', + text: 'before undelete', + }); + const quoteCarrier = generateMsg({ + id: 'quote-carrier-id-undeleted', + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }); + + channel.messagePaginator.setItems({ + valueOrFactory: [quotedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + channel._handleChannelEvent({ + type: 'message.undeleted', + message: { + ...quotedMessage, + type: 'regular', + text: 'after undelete', + }, + }); + + expect( + channel.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.text, + ).to.equal('after undelete'); + expect( + channel.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.type, + ).to.equal('regular'); + }); + it('does not override the delivery information in the read status', () => {}); it('message.truncate removes all messages if "truncated_at" is "now"', function () { @@ -580,6 +695,115 @@ describe('Channel _handleChannelEvent', function () { expect(itemFromPaginator?.deleted_at?.toISOString()).to.equal(deletedAt); }); + it('message.deleted syncs quoted_message references in messagePaginator', function () { + const quotedMessage = generateMsg({ + id: 'quoted-message-id-on-delete', + text: 'before delete', + }); + const quoteCarrier = generateMsg({ + id: 'quote-carrier-id-on-delete', + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }); + + channel.messagePaginator.setItems({ + valueOrFactory: [quotedMessage, quoteCarrier], + isFirstPage: true, + isLastPage: true, + }); + + channel._handleChannelEvent({ + type: 'message.deleted', + user: { id: 'id' }, + message: { + ...quotedMessage, + type: 'deleted', + text: 'after delete', + deleted_at: new Date().toISOString(), + }, + }); + + expect( + channel.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.type, + ).to.equal('deleted'); + }); + + it('reaction.new ingests message into messagePaginator for non-thread messages', function () { + const message = generateMsg({ id: 'reaction-channel-message-id' }); + + channel._handleChannelEvent({ + type: 'reaction.new', + message, + reaction: { + type: 'love', + user_id: 'user-1', + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(channel.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + }); + + it('reaction.new ignores thread replies in messagePaginator', function () { + const message = generateMsg({ + id: 'reaction-thread-message-id', + parent_id: 'thread-parent-id', + }); + + channel._handleChannelEvent({ + type: 'reaction.new', + message, + reaction: { + type: 'love', + user_id: 'user-1', + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(channel.messagePaginator.getItem(message.id)).to.be.undefined; + }); + + ['reaction.deleted', 'reaction.updated'].forEach((eventType) => { + it(`${eventType} ingests message into messagePaginator for non-thread messages`, function () { + const message = generateMsg({ id: `${eventType}-channel-message-id` }); + + channel._handleChannelEvent({ + type: eventType, + message, + reaction: { + type: 'love', + user_id: 'user-1', + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(channel.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + }); + + it(`${eventType} ignores thread replies in messagePaginator`, function () { + const message = generateMsg({ + id: `${eventType}-thread-message-id`, + parent_id: 'thread-parent-id', + }); + + channel._handleChannelEvent({ + type: eventType, + message, + reaction: { + type: 'love', + user_id: 'user-1', + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(channel.messagePaginator.getItem(message.id)).to.be.undefined; + }); + }); + describe('user.messages.deleted', () => { const bannedUser = { id: 'banned-user' }; const otherUser = { id: 'other-user' }; diff --git a/test/unit/pagination/paginators/MessagePaginator.test.ts b/test/unit/pagination/paginators/MessagePaginator.test.ts index a79b96de96..b95f6e4b70 100644 --- a/test/unit/pagination/paginators/MessagePaginator.test.ts +++ b/test/unit/pagination/paginators/MessagePaginator.test.ts @@ -658,6 +658,42 @@ describe('MessagePaginator', () => { }); }); + describe('reflectQuotedMessageUpdate()', () => { + it('updates quoted_message for cached items that quote provided message', () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const quoted = createMessage({ + id: 'quoted-1', + text: 'before update', + }); + const quoteCarrier = createMessage({ + id: 'carrier-1', + quoted_message_id: quoted.id, + quoted_message: quoted, + }); + const nonCarrier = createMessage({ + id: 'other-1', + quoted_message_id: 'another-quoted-id', + }); + + paginator.setItems({ + valueOrFactory: [quoted, quoteCarrier, nonCarrier], + isFirstPage: true, + isLastPage: true, + }); + + const updatedQuoted = { + ...quoted, + text: 'after update', + }; + paginator.reflectQuotedMessageUpdate(updatedQuoted); + + expect(paginator.getItem(quoteCarrier.id)?.quoted_message?.text).toBe( + 'after update', + ); + expect(paginator.getItem(nonCarrier.id)?.quoted_message).toBeNull(); + }); + }); + describe.todo('postQueryReconcile and deriveCursor for', () => {}); describe('linear pagination', () => { describe('updates the hasMoreTail flag only if the first message on page is the first message in interval', () => { diff --git a/test/unit/threads.test.ts b/test/unit/threads.test.ts index 13e39ee250..47b6e5fc1d 100644 --- a/test/unit/threads.test.ts +++ b/test/unit/threads.test.ts @@ -2,7 +2,7 @@ import { generateChannel } from './test-utils/generateChannel'; import { generateMsg } from './test-utils/generateMessage'; import { generateThreadResponse } from './test-utils/generateThreadResponse'; import { getClientWithUser } from './test-utils/getClient'; -import { generateUUIDv4 as uuidv4 } from '../../src/utils'; +import { formatMessage, generateUUIDv4 as uuidv4 } from '../../src/utils'; import sinon from 'sinon'; import { @@ -1269,6 +1269,43 @@ describe('Threads 2.0', () => { parentMessage.deleted_at, ); }); + + it('reflects quoted_message updates in messagePaginator cache', () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + + const quotedMessage = generateMsg({ + id: uuidv4(), + text: 'before delete', + }) as MessageResponse; + const quoteCarrier = generateMsg({ + id: uuidv4(), + parent_id: thread.id, + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }) as MessageResponse; + + thread.messagePaginator.setItems({ + valueOrFactory: [quoteCarrier].map(formatMessage), + isFirstPage: true, + isLastPage: true, + }); + + client.dispatchEvent({ + type: 'message.deleted', + message: { + ...quotedMessage, + type: 'deleted', + deleted_at: new Date().toISOString(), + }, + }); + + expect( + thread.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.type, + ).to.equal('deleted'); + + thread.unregisterSubscriptions(); + }); }); describe('Events: message.updated, reaction.new, reaction.deleted', () => { @@ -1298,6 +1335,174 @@ describe('Threads 2.0', () => { thread.unregisterSubscriptions(); }); }); + + it('ingests "reaction.new" message into thread messagePaginator when parent_id matches thread.id', () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + const message = generateMsg({ + id: uuidv4(), + parent_id: thread.id, + }) as MessageResponse; + + client.dispatchEvent({ + type: 'reaction.new', + message, + reaction: { + type: 'love', + user_id: TEST_USER_ID, + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(thread.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + + thread.unregisterSubscriptions(); + }); + + it('ignores "reaction.new" message in thread messagePaginator when parent_id does not match thread.id', () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + const message = generateMsg({ + id: uuidv4(), + parent_id: uuidv4(), + }) as MessageResponse; + + client.dispatchEvent({ + type: 'reaction.new', + message, + reaction: { + type: 'love', + user_id: TEST_USER_ID, + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(thread.messagePaginator.getItem(message.id)).to.be.undefined; + + thread.unregisterSubscriptions(); + }); + + (['reaction.deleted', 'reaction.updated'] as const).forEach((eventType) => { + it(`ingests "${eventType}" message into thread messagePaginator when parent_id matches thread.id`, () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + const message = generateMsg({ + id: uuidv4(), + parent_id: thread.id, + }) as MessageResponse; + + client.dispatchEvent({ + type: eventType, + message, + reaction: { + type: 'love', + user_id: TEST_USER_ID, + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(thread.messagePaginator.getItem(message.id)?.id).to.equal(message.id); + + thread.unregisterSubscriptions(); + }); + + it(`ignores "${eventType}" message in thread messagePaginator when parent_id does not match thread.id`, () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + const message = generateMsg({ + id: uuidv4(), + parent_id: uuidv4(), + }) as MessageResponse; + + client.dispatchEvent({ + type: eventType, + message, + reaction: { + type: 'love', + user_id: TEST_USER_ID, + message_id: message.id, + created_at: new Date().toISOString(), + }, + }); + + expect(thread.messagePaginator.getItem(message.id)).to.be.undefined; + + thread.unregisterSubscriptions(); + }); + }); + + it('reflects quoted_message updates in messagePaginator on "message.updated"', () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + + const quotedMessage = generateMsg({ + id: uuidv4(), + text: 'before update', + }) as MessageResponse; + const quoteCarrier = generateMsg({ + id: uuidv4(), + parent_id: thread.id, + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }) as MessageResponse; + + thread.messagePaginator.setItems({ + valueOrFactory: [quoteCarrier].map(formatMessage), + isFirstPage: true, + isLastPage: true, + }); + + client.dispatchEvent({ + type: 'message.updated', + message: { ...quotedMessage, text: 'after update' }, + }); + + expect( + thread.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.text, + ).to.equal('after update'); + + thread.unregisterSubscriptions(); + }); + + it('reflects quoted_message updates in messagePaginator on "message.undeleted"', () => { + const thread = createTestThread(); + thread.registerSubscriptions(); + + const quotedMessage = generateMsg({ + id: uuidv4(), + text: 'before undelete', + type: 'deleted', + }) as MessageResponse; + const quoteCarrier = generateMsg({ + id: uuidv4(), + parent_id: thread.id, + quoted_message_id: quotedMessage.id, + quoted_message: quotedMessage, + }) as MessageResponse; + + thread.messagePaginator.setItems({ + valueOrFactory: [quoteCarrier].map(formatMessage), + isFirstPage: true, + isLastPage: true, + }); + + client.dispatchEvent({ + type: 'message.undeleted', + message: { ...quotedMessage, type: 'regular', text: 'after undelete' }, + }); + + expect( + thread.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.text, + ).to.equal('after undelete'); + expect( + thread.messagePaginator.getItem(quoteCarrier.id)?.quoted_message?.type, + ).to.equal('regular'); + + thread.unregisterSubscriptions(); + }); }); }); }); From 31eedab4702b10e2b593c41d7431c47748d1726e Mon Sep 17 00:00:00 2001 From: martincupela Date: Fri, 6 Mar 2026 01:19:32 +0100 Subject: [PATCH 31/31] fix: emit new paginator state always when jumping to a message --- src/pagination/paginators/MessagePaginator.ts | 3 +- .../paginators/MessagePaginator.test.ts | 51 +++++++++++++++++++ 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/src/pagination/paginators/MessagePaginator.ts b/src/pagination/paginators/MessagePaginator.ts index 12f3b4be5e..82b0a95d15 100644 --- a/src/pagination/paginators/MessagePaginator.ts +++ b/src/pagination/paginators/MessagePaginator.ts @@ -451,8 +451,8 @@ export class MessagePaginator extends BasePaginator { // jumping back to the head interval should restore its tailward cursor expect(paginator.cursor?.tailward).toBe('m8'); }); + + it('emits merged state when jump resolves inside the active interval', async () => { + const paginator = new MessagePaginator({ channel, itemIndex }); + const existing = createMessage({ + cid: 'channel-id', + id: 'm-existing', + created_at: '2020-01-01T00:00:00.000Z', + }); + const target = createMessage({ + cid: 'channel-id', + id: 'm-target', + created_at: '2020-01-02T00:00:00.000Z', + }); + + const activeInterval = paginator.ingestPage({ + page: [existing], + isHead: true, + isTail: true, + setActive: true, + }); + + const partialNextSpy = vi.spyOn(paginator.state, 'partialNext'); + vi.spyOn(paginator, 'executeQuery').mockImplementation(async () => { + itemIndex.setOne(target); + if (activeInterval?.itemIds) { + activeInterval.itemIds = [existing.id, target.id]; + } + return { + stateCandidate: { + hasMoreHead: false, + hasMoreTail: false, + items: [existing, target], + isLoading: false, + }, + targetInterval: activeInterval ?? null, + }; + }); + + const ok = await paginator.jumpToMessage(target.id); + + expect(ok).toBe(true); + expect(partialNextSpy).toHaveBeenCalledWith( + expect.objectContaining({ + items: expect.arrayContaining([ + expect.objectContaining({ id: existing.id }), + expect.objectContaining({ id: target.id }), + ]), + }), + ); + expect(paginator.items?.map((m) => m.id)).toEqual([existing.id, target.id]); + }); }); describe.todo('jumpToTheLatestMessage', () => {});