Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 59 additions & 4 deletions apps/sim/app/api/workflows/route.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm'
import { and, asc, eq, gt, gte, inArray, isNull, lt, min, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
Expand All @@ -27,6 +27,9 @@ export async function GET(request: NextRequest) {
const startTime = Date.now()
const url = new URL(request.url)
const workspaceId = url.searchParams.get('workspaceId')
const cursor = url.searchParams.get('cursor')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

cursor parsed but never applied to the query

The cursor variable is read from the query params but is never used in either of the two database queries. As a result, passing ?cursor=<id> is silently ignored and the server always returns the first page — making cursor-based pagination completely non-functional for callers who attempt to use it.

The gt operator imported on line 4 was presumably intended for a WHERE clause like .where(and(...existingWhere..., gt(workflow.id, cursor))), but that call was never added. Since gt is also unused, this will trigger a lint/TypeScript error.

To properly implement cursor-based offset, the WHERE clause needs to be extended, e.g.:

// In the workspaceId branch
.where(
  cursor
    ? and(eq(workflow.workspaceId, workspaceId), gt(workflow.id, cursor))
    : eq(workflow.workspaceId, workspaceId)
)

Note also that using gt(workflow.id, cursor) only gives correct pagination when ordering solely by id. With the current compound ordering [asc(sortOrder), asc(createdAt), asc(id)], a pure id > cursor filter will skip or repeat rows where sortOrder or createdAt differs between pages. A correct keyset cursor needs to encode all three sort columns.

const limitParam = url.searchParams.get('limit')
const limit = Math.min(Math.max(parseInt(limitParam || '100', 10) || 100, 1), 500)
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Silent data truncation for callers expecting all workflows

High Severity

The new default limit of 100 silently truncates results for all existing callers, none of which handle nextCursor pagination. fetchWorkflows in workflows.ts only loads the first 100 workflows into the sidebar registry. use-export-workspace.ts only exports the first 100 workflows, producing silently incomplete ZIP backups. fetchDeployedWorkflows in workflow-mcp-servers.ts may miss deployed workflows. These are the exact large-workspace scenarios this PR targets.

Fix in Cursor Fix in Web


try {
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
Expand Down Expand Up @@ -66,12 +69,48 @@ export async function GET(request: NextRequest) {

const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]

// Fetch limit+1 to detect if there are more pages
const fetchLimit = limit + 1

// Build cursor condition for keyset pagination
// Cursor is base64-encoded JSON: { s: sortOrder, c: createdAt, i: id }
let cursorCondition = null
if (cursor) {
try {
const decoded = JSON.parse(Buffer.from(cursor, 'base64').toString())
const cursorSortOrder = decoded.s
const cursorCreatedAt = new Date(decoded.c)
const cursorId = decoded.i
// Keyset pagination for ORDER BY sortOrder ASC, createdAt ASC, id ASC:
// (sortOrder > cursorSortOrder) OR
// (sortOrder = cursorSortOrder AND createdAt > cursorCreatedAt) OR
// (sortOrder = cursorSortOrder AND createdAt = cursorCreatedAt AND id > cursorId)
cursorCondition = or(
gt(workflow.sortOrder, cursorSortOrder),
and(
eq(workflow.sortOrder, cursorSortOrder),
or(
gt(workflow.createdAt, cursorCreatedAt),
and(eq(workflow.createdAt, cursorCreatedAt), gt(workflow.id, cursorId))
)
)
)
} catch {
// Invalid cursor - ignore and return first page
}
}

if (workspaceId) {
const whereClause = cursorCondition
? and(eq(workflow.workspaceId, workspaceId), cursorCondition)
: eq(workflow.workspaceId, workspaceId)

workflows = await db
.select()
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
.where(whereClause)
.orderBy(...orderByClause)
.limit(fetchLimit)
} else {
const workspacePermissionRows = await db
.select({ workspaceId: permissions.entityId })
Expand All @@ -81,14 +120,30 @@ export async function GET(request: NextRequest) {
if (workspaceIds.length === 0) {
return NextResponse.json({ data: [] }, { status: 200 })
}
const whereClause = cursorCondition
? and(inArray(workflow.workspaceId, workspaceIds), cursorCondition)
: inArray(workflow.workspaceId, workspaceIds)

workflows = await db
.select()
.from(workflow)
.where(inArray(workflow.workspaceId, workspaceIds))
.where(whereClause)
.orderBy(...orderByClause)
.limit(fetchLimit)
}

// Determine if there are more results and compute next cursor
const hasMore = workflows.length > limit
const data = hasMore ? workflows.slice(0, limit) : workflows
let nextCursor = null
if (hasMore && data.length > 0) {
const last = data[data.length - 1]
nextCursor = Buffer.from(
JSON.stringify({ s: last.sortOrder, c: last.createdAt, i: last.id })
).toString('base64')
}

return NextResponse.json({ data: workflows }, { status: 200 })
return NextResponse.json({ data, nextCursor }, { status: 200 })
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(`[${requestId}] Workflow fetch error after ${elapsed}ms`, error)
Expand Down