From ae1c83df4e78e0a43571ce6610ea334da5d81493 Mon Sep 17 00:00:00 2001 From: Alan Daniel Date: Mon, 13 Apr 2026 13:36:11 -0400 Subject: [PATCH] Stabilize GitHub API rate limits with installation token caching and GraphQL migration Migrate core search queries (pulls, issues, reviews) from REST to GraphQL for lower rate-limit cost. Cache GitHub App installation tokens in memory and KV with automatic refresh. Add rate-limit header logging, expand webhook revalidation signals, and simplify client-side refresh to a signal-based hook. --- .../issues/detail/issue-detail-page.tsx | 35 +- .../components/layouts/dashboard-layout.tsx | 2 - .../src/components/layouts/dashboard-tabs.tsx | 8 - .../pulls/detail/pull-detail-activity.tsx | 3 +- .../pulls/detail/pull-detail-page.tsx | 39 +- .../components/pulls/review/review-page.tsx | 52 +- apps/dashboard/src/lib/auth-runtime.ts | 8 +- .../src/lib/github-cache-invalidation.test.ts | 23 +- .../src/lib/github-request-policy.ts | 91 +- apps/dashboard/src/lib/github-revalidation.ts | 40 +- apps/dashboard/src/lib/github.functions.ts | 2563 ++++++++++++++--- apps/dashboard/src/lib/github.server.test.ts | 87 +- apps/dashboard/src/lib/github.server.ts | 337 ++- .../src/lib/use-github-revalidation.ts | 286 -- .../src/lib/use-github-signal-refresh.ts | 103 + .../src/routes/_protected/$owner/index.tsx | 19 +- .../dashboard/src/routes/_protected/index.tsx | 30 +- .../src/routes/_protected/issues.tsx | 6 +- .../dashboard/src/routes/_protected/pulls.tsx | 6 +- .../src/routes/_protected/reviews.tsx | 6 +- .../src/routes/api/webhooks/github.ts | 38 + apps/dashboard/src/routes/setup.tsx | 2 +- apps/dashboard/wrangler.jsonc | 5 +- docs/github-cache-architecture.md | 120 +- 24 files changed, 3034 insertions(+), 875 deletions(-) delete mode 100644 apps/dashboard/src/lib/use-github-revalidation.ts create mode 100644 apps/dashboard/src/lib/use-github-signal-refresh.ts diff --git a/apps/dashboard/src/components/issues/detail/issue-detail-page.tsx b/apps/dashboard/src/components/issues/detail/issue-detail-page.tsx index f916bcb..4643ee3 100644 --- a/apps/dashboard/src/components/issues/detail/issue-detail-page.tsx +++ b/apps/dashboard/src/components/issues/detail/issue-detail-page.tsx @@ -1,6 +1,7 @@ import { Skeleton } from "@diffkit/ui/components/skeleton"; import { useQuery } from "@tanstack/react-query"; import { getRouteApi } from "@tanstack/react-router"; +import { useMemo } from "react"; import { DetailPageLayout, DetailPageSkeletonLayout, @@ -10,6 +11,8 @@ import { githubIssuePageQueryOptions, githubQueryKeys, } from "#/lib/github.query"; +import { githubRevalidationSignalKeys } from "#/lib/github-revalidation"; +import { useGitHubSignalRefresh } from "#/lib/use-github-signal-refresh"; import { useHasMounted } from "#/lib/use-has-mounted"; import { useRegisterTab } from "#/lib/use-register-tab"; import { IssueDetailActivitySection } from "./issue-detail-activity"; @@ -22,13 +25,35 @@ export function IssueDetailPage() { const { user } = routeApi.useRouteContext(); const { owner, repo, issueId } = routeApi.useParams(); const issueNumber = Number(issueId); - const scope = { userId: user.id }; + const scope = useMemo(() => ({ userId: user.id }), [user.id]); + const input = useMemo( + () => ({ owner, repo, issueNumber }), + [owner, repo, issueNumber], + ); const hasMounted = useHasMounted(); + const pageQueryKey = useMemo( + () => githubQueryKeys.issues.page(scope, input), + [scope, input], + ); + const webhookRefreshTargets = useMemo( + () => [ + { + queryKey: pageQueryKey, + signalKeys: [githubRevalidationSignalKeys.issueEntity(input)], + }, + ], + [pageQueryKey, input], + ); const pageQuery = useQuery({ - ...githubIssuePageQueryOptions(scope, { owner, repo, issueNumber }), + ...githubIssuePageQueryOptions(scope, input), enabled: hasMounted, }); + useGitHubSignalRefresh({ + enabled: + hasMounted && pageQuery.data !== undefined && !pageQuery.isFetching, + targets: webhookRefreshTargets, + }); const issue = pageQuery.data?.detail; const comments = pageQuery.data?.comments; @@ -62,11 +87,7 @@ export function IssueDetailPage() { events={events} commentPagination={commentPagination} eventPagination={eventPagination} - pageQueryKey={githubQueryKeys.issues.page(scope, { - owner, - repo, - issueNumber, - })} + pageQueryKey={pageQueryKey} isFetching={pageQuery.isFetching} owner={owner} repo={repo} diff --git a/apps/dashboard/src/components/layouts/dashboard-layout.tsx b/apps/dashboard/src/components/layouts/dashboard-layout.tsx index 9e1ae1e..5cb05fe 100644 --- a/apps/dashboard/src/components/layouts/dashboard-layout.tsx +++ b/apps/dashboard/src/components/layouts/dashboard-layout.tsx @@ -6,7 +6,6 @@ import { githubMyIssuesQueryOptions, githubMyPullsQueryOptions, } from "#/lib/github.query"; -import { useGitHubRevalidation } from "#/lib/use-github-revalidation"; import { useHasMounted } from "#/lib/use-has-mounted"; import { useMediaQuery } from "#/lib/use-media-query"; import { DashboardBottomBar } from "./dashboard-bottombar"; @@ -42,7 +41,6 @@ export function DashboardLayout() { const { user } = routeApi.useRouteContext(); const scope = { userId: user.id }; const hasMounted = useHasMounted(); - useGitHubRevalidation(user.id); const pullsQuery = useQuery({ ...githubMyPullsQueryOptions(scope), diff --git a/apps/dashboard/src/components/layouts/dashboard-tabs.tsx b/apps/dashboard/src/components/layouts/dashboard-tabs.tsx index 6e3b1e5..7bb1f20 100644 --- a/apps/dashboard/src/components/layouts/dashboard-tabs.tsx +++ b/apps/dashboard/src/components/layouts/dashboard-tabs.tsx @@ -116,14 +116,6 @@ export function DashboardTabs({ tabsReady, routerRef }: DashboardTabsProps) { } }, [pathname, scrollRef, updateScrollState]); - useEffect(() => { - if (!tabsReady || openTabs.length === 0) return; - - void Promise.allSettled( - openTabs.map((tab) => preloadRouteOnce(routerRef.current, tab.url)), - ); - }, [tabsReady, openTabs, routerRef]); - const handleCloseTab = useCallback( (id: string, tabUrl: string) => { const isActive = pathname === tabUrl; diff --git a/apps/dashboard/src/components/pulls/detail/pull-detail-activity.tsx b/apps/dashboard/src/components/pulls/detail/pull-detail-activity.tsx index 29ff637..df6fe9b 100644 --- a/apps/dashboard/src/components/pulls/detail/pull-detail-activity.tsx +++ b/apps/dashboard/src/components/pulls/detail/pull-detail-activity.tsx @@ -222,7 +222,7 @@ function getInvolvedUsers( return users; } -// ── Merge status section — owns its own polling query ──────────────── +// ── Merge status section ───────────────────────────────────────────── function MergeStatusSection({ scope, @@ -237,7 +237,6 @@ function MergeStatusSection({ }) { const statusQuery = useQuery({ ...githubPullStatusQueryOptions(scope, { owner, repo, pullNumber }), - refetchOnWindowFocus: "always", }); const status = statusQuery.data ?? null; diff --git a/apps/dashboard/src/components/pulls/detail/pull-detail-page.tsx b/apps/dashboard/src/components/pulls/detail/pull-detail-page.tsx index cb597b0..9adb80b 100644 --- a/apps/dashboard/src/components/pulls/detail/pull-detail-page.tsx +++ b/apps/dashboard/src/components/pulls/detail/pull-detail-page.tsx @@ -1,6 +1,7 @@ import { Skeleton } from "@diffkit/ui/components/skeleton"; import { useQuery } from "@tanstack/react-query"; import { getRouteApi } from "@tanstack/react-router"; +import { useMemo } from "react"; import { DetailPageLayout, DetailPageSkeletonLayout, @@ -11,6 +12,8 @@ import { githubQueryKeys, githubViewerQueryOptions, } from "#/lib/github.query"; +import { githubRevalidationSignalKeys } from "#/lib/github-revalidation"; +import { useGitHubSignalRefresh } from "#/lib/use-github-signal-refresh"; import { useHasMounted } from "#/lib/use-has-mounted"; import { useRegisterTab } from "#/lib/use-register-tab"; import { PullBodySection } from "./pull-body-section"; @@ -24,17 +27,43 @@ export function PullDetailPage() { const { user } = routeApi.useRouteContext(); const { owner, repo, pullId } = routeApi.useParams(); const pullNumber = Number(pullId); - const scope = { userId: user.id }; + const scope = useMemo(() => ({ userId: user.id }), [user.id]); + const input = useMemo( + () => ({ owner, repo, pullNumber }), + [owner, repo, pullNumber], + ); const hasMounted = useHasMounted(); + const pageQueryKey = useMemo( + () => githubQueryKeys.pulls.page(scope, input), + [scope, input], + ); + const webhookRefreshTargets = useMemo( + () => [ + { + queryKey: pageQueryKey, + signalKeys: [githubRevalidationSignalKeys.pullEntity(input)], + }, + { + queryKey: githubQueryKeys.pulls.status(scope, input), + signalKeys: [githubRevalidationSignalKeys.pullEntity(input)], + }, + ], + [pageQueryKey, scope, input], + ); const pageQuery = useQuery({ - ...githubPullPageQueryOptions(scope, { owner, repo, pullNumber }), + ...githubPullPageQueryOptions(scope, input), enabled: hasMounted, }); const viewerQuery = useQuery({ ...githubViewerQueryOptions(scope), enabled: hasMounted, }); + useGitHubSignalRefresh({ + enabled: + hasMounted && pageQuery.data !== undefined && !pageQuery.isFetching, + targets: webhookRefreshTargets, + }); const pr = pageQuery.data?.detail; const comments = pageQuery.data?.comments; @@ -88,11 +117,7 @@ export function PullDetailPage() { events={events} commentPagination={commentPagination} eventPagination={eventPagination} - pageQueryKey={githubQueryKeys.pulls.page(scope, { - owner, - repo, - pullNumber, - })} + pageQueryKey={pageQueryKey} isFetching={pageQuery.isFetching} pr={pr} owner={owner} diff --git a/apps/dashboard/src/components/pulls/review/review-page.tsx b/apps/dashboard/src/components/pulls/review/review-page.tsx index d40c0a2..109c3d7 100644 --- a/apps/dashboard/src/components/pulls/review/review-page.tsx +++ b/apps/dashboard/src/components/pulls/review/review-page.tsx @@ -52,6 +52,8 @@ import type { PullFileSummary, PullReviewComment, } from "#/lib/github.types"; +import { githubRevalidationSignalKeys } from "#/lib/github-revalidation"; +import { useGitHubSignalRefresh } from "#/lib/use-github-signal-refresh"; import { useRegisterTab } from "#/lib/use-register-tab"; import { checkPermissionWarning } from "#/lib/warning-store"; import type { ReviewDiffPaneHandle } from "./review-diff-pane"; @@ -95,9 +97,44 @@ export function ReviewPage() { const { user } = routeApi.useRouteContext(); const { owner, repo, pullId } = routeApi.useParams(); const pullNumber = Number(pullId); - const scope = { userId: user.id }; + const scope = useMemo(() => ({ userId: user.id }), [user.id]); const queryClient = useQueryClient(); - const input = { owner, repo, pullNumber }; + const input = useMemo( + () => ({ owner, repo, pullNumber }), + [owner, repo, pullNumber], + ); + const pageQueryKey = useMemo( + () => githubQueryKeys.pulls.page(scope, input), + [scope, input], + ); + const fileSummariesQueryKey = useMemo( + () => githubQueryKeys.pulls.fileSummaries(scope, input), + [scope, input], + ); + const filesQueryKey = useMemo( + () => githubQueryKeys.pulls.files(scope, input), + [scope, input], + ); + const reviewCommentsQueryKey = useMemo( + () => githubQueryKeys.pulls.reviewComments(scope, input), + [scope, input], + ); + const pullSignalKey = githubRevalidationSignalKeys.pullEntity(input); + const webhookRefreshTargets = useMemo( + () => [ + { queryKey: pageQueryKey, signalKeys: [pullSignalKey] }, + { queryKey: fileSummariesQueryKey, signalKeys: [pullSignalKey] }, + { queryKey: filesQueryKey, signalKeys: [pullSignalKey] }, + { queryKey: reviewCommentsQueryKey, signalKeys: [pullSignalKey] }, + ], + [ + pageQueryKey, + fileSummariesQueryKey, + filesQueryKey, + reviewCommentsQueryKey, + pullSignalKey, + ], + ); const diffPaneRef = useRef(null); // Stable store for active file — updates bypass ReviewPage renders entirely @@ -116,7 +153,7 @@ export function ReviewPage() { }); const filesQuery = useInfiniteQuery({ - queryKey: githubQueryKeys.pulls.files(scope, input), + queryKey: filesQueryKey, initialPageParam: 1, queryFn: ({ pageParam }) => getPullFiles({ @@ -137,6 +174,15 @@ export function ReviewPage() { enabled: hasDiffPayload, refetchOnWindowFocus: false, }); + useGitHubSignalRefresh({ + enabled: + pageQuery.data !== undefined && + !pageQuery.isFetching && + !fileSummariesQuery.isFetching && + !filesQuery.isFetching && + !reviewCommentsQuery.isFetching, + targets: webhookRefreshTargets, + }); const pr = pageQuery.data?.detail ?? null; const sidebarFiles = fileSummariesQuery.data ?? []; diff --git a/apps/dashboard/src/lib/auth-runtime.ts b/apps/dashboard/src/lib/auth-runtime.ts index b5f19da..9354248 100644 --- a/apps/dashboard/src/lib/auth-runtime.ts +++ b/apps/dashboard/src/lib/auth-runtime.ts @@ -21,7 +21,9 @@ export async function getGitHubClientByUserId( throttle: { enabled: false }, }); - configureGitHubRequestPolicies(octokit); + configureGitHubRequestPolicies(octokit, { + tokenLabel: `oauth:user:${userId}`, + }); return octokit; } @@ -40,7 +42,9 @@ export async function getGitHubAppUserClientByUserId( throttle: { enabled: false }, }); - configureGitHubRequestPolicies(octokit); + configureGitHubRequestPolicies(octokit, { + tokenLabel: `app-user:${userId}`, + }); return octokit; } diff --git a/apps/dashboard/src/lib/github-cache-invalidation.test.ts b/apps/dashboard/src/lib/github-cache-invalidation.test.ts index ad2c97b..788ca59 100644 --- a/apps/dashboard/src/lib/github-cache-invalidation.test.ts +++ b/apps/dashboard/src/lib/github-cache-invalidation.test.ts @@ -11,7 +11,11 @@ describe("getGitHubWebhookRevalidationSignalKeys", () => { }, pull_request: { number: 42 }, }), - ).toEqual(["pulls.mine", "pull:stylessh/havana#42"]); + ).toEqual([ + "pulls.mine", + "repoMeta:stylessh/havana", + "pull:stylessh/havana#42", + ]); }); it("treats issue comments on pull requests as pull signals", () => { @@ -42,7 +46,22 @@ describe("getGitHubWebhookRevalidationSignalKeys", () => { number: 9, }, }), - ).toEqual(["issues.mine", "issue:stylessh/havana#9"]); + ).toEqual([ + "issues.mine", + "repoMeta:stylessh/havana", + "issue:stylessh/havana#9", + ]); + }); + + it("maps push webhook events to repo metadata and code signals", () => { + expect( + getGitHubWebhookRevalidationSignalKeys("push", { + repository: { + name: "havana", + owner: { login: "stylessh" }, + }, + }), + ).toEqual(["repoMeta:stylessh/havana", "repoCode:stylessh/havana"]); }); it("extracts pull signals from check_run webhook payloads", () => { diff --git a/apps/dashboard/src/lib/github-request-policy.ts b/apps/dashboard/src/lib/github-request-policy.ts index 30d0a1a..f70bf4f 100644 --- a/apps/dashboard/src/lib/github-request-policy.ts +++ b/apps/dashboard/src/lib/github-request-policy.ts @@ -1,5 +1,6 @@ import { getRequest } from "@tanstack/react-start/server"; import type { Octokit as OctokitType } from "octokit"; +import { debug } from "./debug"; const GITHUB_READ_RETRY_COUNT = 1; export const GITHUB_REQUEST_TIMEOUT_MS = 12_000; @@ -9,12 +10,24 @@ type GitHubRequestOptions = Parameters< >[1] extends (options: infer Options) => unknown ? Options & { method?: string; + url?: string; request?: { retries?: number; signal?: AbortSignal; }; } : never; +type GitHubRateLimitResponse = { + status?: number; + headers?: Record; +}; +type GitHubRateLimitError = { + status?: number; + response?: GitHubRateLimitResponse; +}; +type GitHubRequestPolicyOptions = { + tokenLabel?: string; +}; function isSafeGitHubRetryMethod(method: string | undefined) { return method === "GET" || method === "HEAD" || method === "OPTIONS"; @@ -44,7 +57,62 @@ function getIncomingRequestSignal(): AbortSignal | undefined { } } -export function configureGitHubRequestPolicies(octokit: OctokitType) { +function normalizeHeaderValue(value: string | number | undefined) { + return typeof value === "number" ? String(value) : value; +} + +function parseRateLimitReset(value: string | undefined) { + if (!value) { + return null; + } + + const seconds = Number.parseInt(value, 10); + if (!Number.isFinite(seconds)) { + return null; + } + + return new Date(seconds * 1_000).toISOString(); +} + +function logGitHubRateLimit({ + options, + response, + tokenLabel, +}: { + options: GitHubRequestOptions; + response: GitHubRateLimitResponse; + tokenLabel: string; +}) { + const headers = response.headers ?? {}; + const remaining = normalizeHeaderValue(headers["x-ratelimit-remaining"]); + const limit = normalizeHeaderValue(headers["x-ratelimit-limit"]); + const used = normalizeHeaderValue(headers["x-ratelimit-used"]); + const reset = normalizeHeaderValue(headers["x-ratelimit-reset"]); + const resource = normalizeHeaderValue(headers["x-ratelimit-resource"]); + + if (!remaining && !limit && !used && !reset) { + return; + } + + debug("github-rate-limit", "request completed", { + token: tokenLabel, + method: options.method, + url: options.url, + status: response.status, + resource, + limit: limit ? Number(limit) : null, + remaining: remaining ? Number(remaining) : null, + used: used ? Number(used) : null, + resetAt: parseRateLimitReset(reset), + }); +} + +export function configureGitHubRequestPolicies( + octokit: OctokitType, + options: GitHubRequestPolicyOptions = {}, +) { + const tokenLabel = options.tokenLabel ?? "unknown"; + octokit.hook.before("request", (options: GitHubRequestOptions) => { const requestOptions = options.request ?? {}; options.request = requestOptions; @@ -55,4 +123,25 @@ export function configureGitHubRequestPolicies(octokit: OctokitType) { getIncomingRequestSignal(), ); }); + + octokit.hook.after("request", (response, requestOptions) => { + logGitHubRateLimit({ + options: requestOptions as GitHubRequestOptions, + response: response as GitHubRateLimitResponse, + tokenLabel, + }); + }); + + octokit.hook.error("request", (error, requestOptions) => { + const rateLimitError = error as GitHubRateLimitError; + if (rateLimitError.response) { + logGitHubRateLimit({ + options: requestOptions as GitHubRequestOptions, + response: rateLimitError.response, + tokenLabel, + }); + } + + throw error; + }); } diff --git a/apps/dashboard/src/lib/github-revalidation.ts b/apps/dashboard/src/lib/github-revalidation.ts index 3690df5..c10d112 100644 --- a/apps/dashboard/src/lib/github-revalidation.ts +++ b/apps/dashboard/src/lib/github-revalidation.ts @@ -3,6 +3,8 @@ import type { Tab } from "./tab-store"; export const githubRevalidationSignalKeys = { pullsMine: "pulls.mine", issuesMine: "issues.mine", + repoMeta: (input: { owner: string; repo: string }) => + `repoMeta:${input.owner}/${input.repo}`, repoLabels: (input: { owner: string; repo: string }) => `repoLabels:${input.owner}/${input.repo}`, repoCollaborators: (input: { owner: string; repo: string }) => @@ -178,13 +180,23 @@ export function getGitHubWebhookRevalidationSignalKeys( return typeof pullNumber === "number" ? [ githubRevalidationSignalKeys.pullsMine, + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), githubRevalidationSignalKeys.pullEntity({ owner: repository.owner, repo: repository.repo, pullNumber, }), ] - : [githubRevalidationSignalKeys.pullsMine]; + : [ + githubRevalidationSignalKeys.pullsMine, + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), + ]; } if ( @@ -212,6 +224,10 @@ export function getGitHubWebhookRevalidationSignalKeys( return issueIdentity.isPullRequest ? [ githubRevalidationSignalKeys.pullsMine, + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), githubRevalidationSignalKeys.pullEntity({ owner: repository.owner, repo: repository.repo, @@ -220,6 +236,10 @@ export function getGitHubWebhookRevalidationSignalKeys( ] : [ githubRevalidationSignalKeys.issuesMine, + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), githubRevalidationSignalKeys.issueEntity({ owner: repository.owner, repo: repository.repo, @@ -253,6 +273,10 @@ export function getGitHubWebhookRevalidationSignalKeys( if (event === "push") { return [ + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), githubRevalidationSignalKeys.repoCode({ owner: repository.owner, repo: repository.repo, @@ -260,8 +284,18 @@ export function getGitHubWebhookRevalidationSignalKeys( ]; } - if (event === "delete") { - return [githubRevalidationSignalKeys.pullsMine]; + if (event === "create" || event === "delete") { + return [ + githubRevalidationSignalKeys.pullsMine, + githubRevalidationSignalKeys.repoMeta({ + owner: repository.owner, + repo: repository.repo, + }), + githubRevalidationSignalKeys.repoCode({ + owner: repository.owner, + repo: repository.repo, + }), + ]; } if (event === "check_run") { diff --git a/apps/dashboard/src/lib/github.functions.ts b/apps/dashboard/src/lib/github.functions.ts index 02766c3..1ee394f 100644 --- a/apps/dashboard/src/lib/github.functions.ts +++ b/apps/dashboard/src/lib/github.functions.ts @@ -33,11 +33,11 @@ import type { PullSummary, RepoBranch, RepoCollaborator, - RepoContributor, RepoContributorsResult, RepoOverview, RepositoryRef, RepoTreeEntry, + RequestedTeam, RequestReviewersInput, SetLabelsInput, SubmitReviewInput, @@ -79,6 +79,16 @@ type GitHubContext = { session: AuthSession; octokit: GitHubClient; }; +type GitHubSearchOwnerScope = { + login: string; + targetType: GitHubInstallationTargetType; +}; +type GitHubGraphQLSearchSource = { + label: string; + context: GitHubContext; + owner?: GitHubSearchOwnerScope; + excludeOwners?: GitHubSearchOwnerScope[]; +}; type GitHubRestResponse = { data: TData; @@ -92,6 +102,255 @@ type SearchItem = Awaited< type SearchResult = Awaited< ReturnType >["data"]; +type GitHubGraphQLRateLimit = { + cost: number; + remaining: number; + resetAt: string; +}; +type GitHubGraphQLActor = { + login: string; + avatarUrl: string; + url: string; + __typename?: string; +} | null; +type GitHubGraphQLLabel = { + name: string; + color: string; + description: string | null; +}; +type GitHubGraphQLRepositoryRef = { + name: string; + nameWithOwner: string; + url: string; + owner: { + login: string; + }; +}; +type GitHubGraphQLCommentNode = { + id: string; + databaseId: number | null; + body: string; + createdAt: string; + author: GitHubGraphQLActor; +}; +type GitHubGraphQLCommentConnection = { + totalCount: number; + nodes: Array | null; +}; +type GitHubGraphQLSearchConnection = { + nodes: Array | null; +}; +type GitHubGraphQLPullSearchNode = { + __typename: "PullRequest"; + databaseId: number | null; + number: number; + title: string; + state: string; + isDraft: boolean; + createdAt: string; + updatedAt: string; + closedAt: string | null; + mergedAt: string | null; + url: string; + comments: { + totalCount: number; + }; + author: GitHubGraphQLActor; + labels: { + nodes: Array | null; + } | null; + repository: GitHubGraphQLRepositoryRef; +}; +type GitHubGraphQLIssueSearchNode = { + __typename: "Issue"; + databaseId: number | null; + number: number; + title: string; + state: string; + stateReason: string | null; + createdAt: string; + updatedAt: string; + closedAt: string | null; + url: string; + comments: { + totalCount: number; + }; + author: GitHubGraphQLActor; + labels: { + nodes: Array | null; + } | null; + repository: GitHubGraphQLRepositoryRef; +}; +type GitHubGraphQLRepoOverviewResponse = { + repository: { + databaseId: number | null; + name: string; + nameWithOwner: string; + description: string | null; + isPrivate: boolean; + isFork: boolean; + defaultBranchRef: { + name: string; + target: { + __typename: string; + oid?: string; + message?: string; + committedDate?: string; + author?: { + user?: { + login: string; + avatarUrl: string; + url: string; + } | null; + } | null; + } | null; + } | null; + stargazerCount: number; + forkCount: number; + watchers: { + totalCount: number; + }; + primaryLanguage: { + name: string; + } | null; + licenseInfo: { + spdxId: string | null; + } | null; + repositoryTopics: { + nodes: Array<{ + topic: { + name: string; + }; + } | null> | null; + }; + url: string; + owner: { + login: string; + avatarUrl: string; + }; + branches: { + totalCount: number; + }; + tags: { + totalCount: number; + }; + pullRequests: { + totalCount: number; + }; + issues: { + totalCount: number; + }; + hasDiscussionsEnabled: boolean; + } | null; + rateLimit: GitHubGraphQLRateLimit; +}; +type GitHubGraphQLReviewRequestNode = { + requestedReviewer: + | ({ + __typename: "User"; + login: string; + avatarUrl: string; + url: string; + } & Record) + | { + __typename: "Team"; + name: string; + slug: string; + url: string; + } + | null; +}; +type GitHubGraphQLPullPageResponse = { + repository: { + pullRequest: { + databaseId: number | null; + number: number; + title: string; + state: string; + isDraft: boolean; + createdAt: string; + updatedAt: string; + closedAt: string | null; + mergedAt: string | null; + url: string; + body: string; + additions: number; + deletions: number; + changedFiles: number; + comments: { totalCount: number }; + author: GitHubGraphQLActor; + labels: { + nodes: Array | null; + } | null; + headRefName: string; + headRefOid: string; + baseRefName: string; + merged: boolean; + mergeCommit: { oid: string } | null; + mergedBy: GitHubGraphQLActor; + mergeable: string; + mergeStateStatus: string; + repository: GitHubGraphQLRepositoryRef; + reviewThreads: { totalCount: number }; + reviewRequests: { + nodes: Array | null; + } | null; + commits: { + totalCount: number; + nodes: Array<{ + commit: { + oid: string; + message: string; + committedDate: string; + author: { + user?: { + login: string; + avatarUrl: string; + url: string; + } | null; + } | null; + }; + } | null> | null; + }; + firstComments: GitHubGraphQLCommentConnection; + lastComments: GitHubGraphQLCommentConnection; + } | null; + }; + rateLimit: GitHubGraphQLRateLimit; +}; +type GitHubGraphQLIssuePageResponse = { + repository: { + issue: { + databaseId: number | null; + number: number; + title: string; + state: string; + stateReason: string | null; + createdAt: string; + updatedAt: string; + closedAt: string | null; + url: string; + body: string; + comments: { totalCount: number }; + author: GitHubGraphQLActor; + labels: { + nodes: Array | null; + } | null; + repository: GitHubGraphQLRepositoryRef; + assignees: { + nodes: Array | null; + } | null; + milestone: { + title: string; + description: string | null; + dueOn: string | null; + } | null; + firstComments: GitHubGraphQLCommentConnection; + lastComments: GitHubGraphQLCommentConnection; + } | null; + }; + rateLimit: GitHubGraphQLRateLimit; +}; type AuthenticatedUserRepo = Awaited< ReturnType >["data"][number]; @@ -149,6 +408,12 @@ type GitHubUserTeam = { type RepoState = "all" | "closed" | "open"; type PullSort = "created" | "long-running" | "popularity" | "updated"; type IssueSort = "comments" | "created" | "updated"; +const GITHUB_OPERATION_TIMEOUT_MS = 15_000; +const GITHUB_PAGINATED_OPERATION_TIMEOUT_MS = 25_000; +const GITHUB_MIN_OPERATION_TIMEOUT_MS = 1_000; +const MY_SEARCH_BUCKET_LIMIT = 30; +const MY_SEARCH_SOURCE_TIMEOUT_MS = 8_000; +const MY_SEARCH_TOTAL_TIMEOUT_MS = 18_000; // --------------------------------------------------------------------------- // Entity-scoped cache busting helpers @@ -366,26 +631,6 @@ export type CommandPaletteSearchInput = { perPage?: number; }; -const myPullRoleDefinitions = [ - { key: "reviewRequested", role: "review-requested" }, - { key: "assigned", role: "assigned" }, - { key: "authored", role: "author" }, - { key: "mentioned", role: "mentioned" }, - { key: "involved", role: "involved" }, -] as const satisfies Array<{ - key: keyof MyPullsResult; - role: PullSearchRole; -}>; - -const myIssueRoleDefinitions = [ - { key: "assigned", role: "assigned" }, - { key: "authored", role: "author" }, - { key: "mentioned", role: "mentioned" }, -] as const satisfies Array<{ - key: keyof MyIssuesResult; - role: IssueSearchRole; -}>; - function clampPerPage(value: number | undefined, fallback = 30) { if (!Number.isFinite(value)) { return fallback; @@ -449,6 +694,33 @@ function parseRepositoryRef( ); } +function mapGraphQLActor(actor: GitHubGraphQLActor): GitHubActor | null { + if (!actor?.login) { + return null; + } + + return { + login: actor.login, + avatarUrl: actor.avatarUrl, + url: actor.url, + type: actor.__typename ?? "User", + }; +} + +function mapGraphQLRepositoryRef( + repository: GitHubGraphQLRepositoryRef, +): RepositoryRef { + const [ownerFromName, repoFromName] = repository.nameWithOwner.split("/"); + const owner = repository.owner.login || ownerFromName; + + return { + name: repository.name || repoFromName, + owner, + fullName: repository.nameWithOwner, + url: repository.url, + }; +} + function mapActor(user: GitHubApiUser | null | undefined): GitHubActor | null { if (!user?.login) { return null; @@ -529,6 +801,22 @@ function mapLabels(labels: Array | null | undefined) { .filter((label): label is GitHubLabel => Boolean(label)); } +function mapGraphQLLabels( + labels: { nodes: Array | null } | null, +) { + return (labels?.nodes ?? []).flatMap((label) => + label + ? [ + { + name: label.name, + color: label.color, + description: label.description, + }, + ] + : [], + ); +} + function mapPullSummary( pull: { id: number; @@ -689,6 +977,366 @@ function mapIssueSearchItems(items: SearchItem[]) { .filter((item): item is IssueSummary => Boolean(item)); } +function mapGraphQLPullSearchNode( + node: GitHubGraphQLPullSearchNode | null, +): PullSummary | null { + if (!node || node.__typename !== "PullRequest" || node.databaseId == null) { + return null; + } + + return { + id: node.databaseId, + number: node.number, + title: node.title, + state: node.state.toLowerCase(), + isDraft: node.isDraft, + createdAt: node.createdAt, + updatedAt: node.updatedAt, + closedAt: node.closedAt, + mergedAt: node.mergedAt, + comments: node.comments.totalCount, + url: node.url, + author: mapGraphQLActor(node.author), + labels: mapGraphQLLabels(node.labels), + repository: mapGraphQLRepositoryRef(node.repository), + }; +} + +function mapGraphQLIssueSearchNode( + node: GitHubGraphQLIssueSearchNode | null, +): IssueSummary | null { + if (!node || node.__typename !== "Issue" || node.databaseId == null) { + return null; + } + + return { + id: node.databaseId, + number: node.number, + title: node.title, + state: node.state.toLowerCase(), + stateReason: node.stateReason?.toLowerCase() ?? null, + createdAt: node.createdAt, + updatedAt: node.updatedAt, + closedAt: node.closedAt, + comments: node.comments.totalCount, + url: node.url, + author: mapGraphQLActor(node.author), + labels: mapGraphQLLabels(node.labels), + repository: mapGraphQLRepositoryRef(node.repository), + }; +} + +function numericIdFromGraphQLId(id: string) { + let hash = 0; + for (let i = 0; i < id.length; i++) { + hash = (hash * 31 + id.charCodeAt(i)) | 0; + } + return Math.abs(hash); +} + +function mapGraphQLComments( + ...connections: GitHubGraphQLCommentConnection[] +): IssueComment[] { + const byId = new Map(); + + for (const connection of connections) { + for (const node of connection.nodes ?? []) { + if (!node) { + continue; + } + + const id = node.databaseId ?? numericIdFromGraphQLId(node.id); + byId.set(id, { + id, + body: node.body, + createdAt: node.createdAt, + author: mapGraphQLActor(node.author), + }); + } + } + + return [...byId.values()].sort( + (a, b) => Date.parse(a.createdAt) - Date.parse(b.createdAt), + ); +} + +function getLoadedCommentPages(totalComments: number) { + const totalPages = Math.max(1, Math.ceil(totalComments / COMMENTS_PER_PAGE)); + return totalPages === 1 ? [1] : [1, totalPages]; +} + +function mapGraphQLPullDetail( + pull: NonNullable, +): PullDetail { + const requestedReviewers: GitHubActor[] = []; + const requestedTeams: RequestedTeam[] = []; + + for (const node of pull.reviewRequests?.nodes ?? []) { + const reviewer = node?.requestedReviewer; + if (!reviewer) { + continue; + } + + if (reviewer.__typename === "Team") { + requestedTeams.push({ + slug: reviewer.slug, + name: reviewer.name, + url: reviewer.url, + }); + continue; + } + + const actor = mapGraphQLActor(reviewer); + if (actor) { + requestedReviewers.push(actor); + } + } + + return { + id: pull.databaseId ?? 0, + number: pull.number, + title: pull.title, + state: pull.state.toLowerCase(), + isDraft: pull.isDraft, + createdAt: pull.createdAt, + updatedAt: pull.updatedAt, + closedAt: pull.closedAt, + mergedAt: pull.mergedAt, + comments: pull.comments.totalCount, + url: pull.url, + author: mapGraphQLActor(pull.author), + labels: mapGraphQLLabels(pull.labels), + repository: mapGraphQLRepositoryRef(pull.repository), + body: pull.body, + additions: pull.additions, + deletions: pull.deletions, + changedFiles: pull.changedFiles, + commits: pull.commits.totalCount, + reviewComments: pull.reviewThreads.totalCount, + headRefName: pull.headRefName, + headSha: pull.headRefOid, + baseRefName: pull.baseRefName, + isMerged: pull.merged, + mergeCommitSha: pull.mergeCommit?.oid ?? null, + mergedBy: mapGraphQLActor(pull.mergedBy), + mergeable: + pull.mergeable === "MERGEABLE" + ? true + : pull.mergeable === "CONFLICTING" + ? false + : null, + mergeableState: pull.mergeStateStatus?.toLowerCase() ?? null, + requestedReviewers, + requestedTeams, + }; +} + +function mapGraphQLPullCommits( + pull: NonNullable, +): PullCommit[] { + return (pull.commits.nodes ?? []).flatMap((node) => { + if (!node) { + return []; + } + + return [ + { + sha: node.commit.oid, + message: node.commit.message, + createdAt: node.commit.committedDate, + author: mapGraphQLActor( + node.commit.author?.user + ? { + ...node.commit.author.user, + __typename: "User", + } + : null, + ), + }, + ]; + }); +} + +function mapGraphQLIssueDetail( + issue: NonNullable, +): IssueDetail { + return { + id: issue.databaseId ?? 0, + number: issue.number, + title: issue.title, + state: issue.state.toLowerCase(), + stateReason: issue.stateReason?.toLowerCase() ?? null, + createdAt: issue.createdAt, + updatedAt: issue.updatedAt, + closedAt: issue.closedAt, + comments: issue.comments.totalCount, + url: issue.url, + author: mapGraphQLActor(issue.author), + labels: mapGraphQLLabels(issue.labels), + repository: mapGraphQLRepositoryRef(issue.repository), + body: issue.body, + assignees: (issue.assignees?.nodes ?? []) + .map((assignee) => mapGraphQLActor(assignee)) + .filter((assignee): assignee is GitHubActor => Boolean(assignee)), + milestone: issue.milestone + ? { + title: issue.milestone.title, + description: issue.milestone.description, + dueOn: issue.milestone.dueOn, + } + : null, + }; +} + +function mapGraphQLRepoOverview( + repository: NonNullable, +): RepoOverview { + const latestCommitTarget = repository.defaultBranchRef?.target; + const latestCommit = + latestCommitTarget?.__typename === "Commit" && latestCommitTarget.oid + ? { + sha: latestCommitTarget.oid, + message: latestCommitTarget.message ?? "", + date: latestCommitTarget.committedDate ?? "", + author: mapGraphQLActor( + latestCommitTarget.author?.user + ? { + ...latestCommitTarget.author.user, + __typename: "User", + } + : null, + ), + } + : null; + + return { + id: repository.databaseId ?? 0, + name: repository.name, + fullName: repository.nameWithOwner, + description: repository.description, + isPrivate: repository.isPrivate, + isFork: repository.isFork, + defaultBranch: repository.defaultBranchRef?.name ?? "main", + stars: repository.stargazerCount, + forks: repository.forkCount, + watchers: repository.watchers.totalCount, + language: repository.primaryLanguage?.name ?? null, + license: repository.licenseInfo?.spdxId ?? null, + topics: (repository.repositoryTopics.nodes ?? []).flatMap((node) => + node ? [node.topic.name] : [], + ), + url: repository.url, + owner: repository.owner.login, + ownerAvatarUrl: repository.owner.avatarUrl, + branchCount: repository.branches.totalCount, + tagCount: repository.tags.totalCount, + openPullCount: repository.pullRequests.totalCount, + openIssueCount: repository.issues.totalCount, + hasDiscussions: repository.hasDiscussionsEnabled, + latestCommit, + }; +} + +function createGraphQLResponseMetadata( + rateLimit: GitHubGraphQLRateLimit | null | undefined, +) { + return createGitHubResponseMetadata(200, { + "x-ratelimit-remaining": + typeof rateLimit?.remaining === "number" + ? String(rateLimit.remaining) + : undefined, + "x-ratelimit-reset": rateLimit?.resetAt + ? String(Math.floor(new Date(rateLimit.resetAt).getTime() / 1_000)) + : undefined, + }); +} + +function mergeGraphQLRateLimits( + rateLimits: Array, +): GitHubGraphQLRateLimit | null { + const presentRateLimits = rateLimits.filter( + (rateLimit): rateLimit is GitHubGraphQLRateLimit => Boolean(rateLimit), + ); + if (presentRateLimits.length === 0) { + return null; + } + + return { + cost: presentRateLimits.reduce( + (total, rateLimit) => total + rateLimit.cost, + 0, + ), + remaining: Math.min( + ...presentRateLimits.map((rateLimit) => rateLimit.remaining), + ), + resetAt: presentRateLimits + .map((rateLimit) => rateLimit.resetAt) + .sort() + .at(-1) as string, + }; +} + +class GitHubOperationTimeoutError extends Error { + constructor( + label: string, + readonly timeoutMs: number, + ) { + super(`${label} timed out after ${timeoutMs}ms`); + this.name = "GitHubOperationTimeoutError"; + } +} + +function getRemainingSearchTimeoutMs(deadlineAt: number, maxTimeoutMs: number) { + return Math.max(0, Math.min(maxTimeoutMs, deadlineAt - Date.now())); +} + +async function withGitHubOperationTimeout( + label: string, + timeoutMs: number, + task: (signal: AbortSignal) => Promise, +) { + if (timeoutMs <= 0) { + throw new GitHubOperationTimeoutError(label, timeoutMs); + } + + const controller = new AbortController(); + let timeoutId: ReturnType | null = null; + const taskPromise = task(controller.signal); + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + controller.abort(); + reject(new GitHubOperationTimeoutError(label, timeoutMs)); + }, timeoutMs); + }); + + try { + return await Promise.race([taskPromise, timeoutPromise]); + } finally { + taskPromise.catch(() => {}); + if (timeoutId) { + clearTimeout(timeoutId); + } + controller.abort(); + } +} + +async function executeGitHubGraphQL( + context: GitHubContext, + label: string, + query: string, + variables: Record, +) { + return withGitHubOperationTimeout( + label, + GITHUB_OPERATION_TIMEOUT_MS, + (signal) => + context.octokit.graphql(query, { + ...variables, + request: { signal }, + }), + ); +} + async function safeCommandPaletteSearch({ fallback, label, @@ -696,10 +1344,14 @@ async function safeCommandPaletteSearch({ }: { fallback: T; label: string; - task: () => Promise; + task: (signal: AbortSignal) => Promise; }) { try { - return await task(); + return await withGitHubOperationTimeout( + `github command search ${label}`, + GITHUB_OPERATION_TIMEOUT_MS, + task, + ); } catch (error) { console.error(`[github-command-search] failed to search ${label}`, error); return fallback; @@ -836,6 +1488,80 @@ async function getGitHubAppUserInstallations(userId: string): Promise<{ } } +async function getGitHubAuthenticatedOrganizations( + context: GitHubContext, +): Promise { + try { + const organizationsResponse = await context.octokit.request( + "GET /user/orgs", + { + per_page: 100, + }, + ); + const payload = + organizationsResponse.data as GitHubAuthenticatedOrgPayload[]; + return payload.flatMap((organization) => { + if (!organization.id || !organization.login) { + return []; + } + + return [ + { + id: organization.id, + login: organization.login, + avatarUrl: organization.avatar_url ?? null, + }, + ]; + }); + } catch (error) { + console.error("[github-access] failed to load organizations", error); + return []; + } +} + +async function getGitHubContextForInstallation( + baseContext: GitHubContext, + installation: GitHubAppInstallation, +) { + return getOrCreateCachedContext( + `installation:${installation.id}`, + async () => { + if (installation.suspendedAt) { + return null; + } + + try { + debug("github-access", "creating installation client", { + owner: installation.account.login, + installationId: installation.id, + }); + const { getGitHubInstallationClient } = await import("./github.server"); + const installationOctokit = await getGitHubInstallationClient( + installation.id, + ); + // Eagerly authenticate to verify the installation token is valid. + // auth-app authenticates lazily by default, so without this the + // try/catch cannot catch auth failures. + await installationOctokit.auth({ type: "installation" }); + debug("github-access", "installation client ready", { + owner: installation.account.login, + }); + return { + ...baseContext, + octokit: installationOctokit, + }; + } catch (error) { + console.error( + "[github-access] installation client failed", + installation.account.login, + error, + ); + return null; + } + }, + ); +} + async function getGitHubContextForOwner(owner: string) { return getOrCreateCachedContext(`owner:${owner}`, async () => { const context = await getGitHubContext(); @@ -857,32 +1583,19 @@ async function getGitHubContextForOwner(owner: string) { return context; } - try { - debug("github-access", "creating installation client", { - owner, - installationId: installation.id, - }); - const { getGitHubInstallationClient } = await import("./github.server"); - const installationOctokit = await getGitHubInstallationClient( - installation.id, - ); - // Eagerly authenticate to verify the installation token is valid. - // auth-app authenticates lazily by default, so without this the - // try/catch cannot catch auth failures. - await installationOctokit.auth({ type: "installation" }); - debug("github-access", "installation client ready", { owner }); - return { - ...context, - octokit: installationOctokit, - }; - } catch (error) { + const installationContext = await getGitHubContextForInstallation( + context, + installation, + ); + if (!installationContext) { console.error( "[github-access] installation client failed, falling back to OAuth token", owner, - error, ); return context; } + + return installationContext; }); } @@ -1238,17 +1951,26 @@ function buildUserSearchQuery({ state, username, owner, + ownerType, repo, + excludeOwners, }: { itemType: "issue" | "pr"; role: IssueSearchRole | PullSearchRole; state: RepoState; username: string; owner?: string; + ownerType?: GitHubInstallationTargetType; repo?: string; + excludeOwners?: GitHubSearchOwnerScope[]; }) { const stateFilter = state === "all" ? "" : ` is:${state}`; - const scopeFilter = owner && repo ? ` repo:${owner}/${repo}` : ""; + const scopeFilter = + owner && repo + ? ` repo:${owner}/${repo}` + : owner + ? ` ${buildOwnerSearchQualifier({ login: owner, targetType: ownerType })}` + : ""; const roleFilter = role === "all" ? ` involves:${username}` @@ -1261,8 +1983,20 @@ function buildUserSearchQuery({ : role === "involved" ? ` involves:${username}` : ` author:${username}`; + const exclusionFilter = + excludeOwners + ?.map((scope) => ` -${buildOwnerSearchQualifier(scope)}`) + .join("") ?? ""; - return `is:${itemType}${stateFilter}${scopeFilter}${roleFilter} archived:false`; + return `is:${itemType}${stateFilter}${scopeFilter}${roleFilter}${exclusionFilter} archived:false`; +} + +function buildOwnerSearchQualifier(scope: { + login: string; + targetType?: GitHubInstallationTargetType; +}) { + const qualifier = scope.targetType === "Organization" ? "org" : "user"; + return `${qualifier}:${scope.login}`; } function buildConditionalHeaders(conditionals: GitHubConditionalHeaders) { @@ -1299,11 +2033,17 @@ function normalizeResponseHeaders(headers: Record) { async function executeGitHubRequest( request: ( headers: Record, + signal: AbortSignal, ) => Promise>, conditionals: GitHubConditionalHeaders, + label = "github request", ): Promise> { try { - const response = await request(buildConditionalHeaders(conditionals)); + const response = await withGitHubOperationTimeout( + label, + GITHUB_OPERATION_TIMEOUT_MS, + (signal) => request(buildConditionalHeaders(conditionals), signal), + ); return { kind: "success", @@ -1354,6 +2094,7 @@ async function getCachedGitHubRequest({ cacheMode?: "legacy" | "split"; request: ( headers: Record, + signal: AbortSignal, ) => Promise>; mapData: (data: TGitHubData) => TResult; }) { @@ -1366,7 +2107,11 @@ async function getCachedGitHubRequest({ namespaceKeys, cacheMode, fetcher: async (conditionals) => { - const result = await executeGitHubRequest(request, conditionals); + const result = await executeGitHubRequest( + request, + conditionals, + `github ${resource}`, + ); if (result.kind === "not-modified") { return result; @@ -1399,7 +2144,10 @@ async function getCachedPaginatedGitHubRequest({ signalKeys?: string[]; namespaceKeys?: string[]; cacheMode?: "legacy" | "split"; - request: (page: number) => Promise>; + request: ( + page: number, + signal: AbortSignal, + ) => Promise>; mapData: (items: TGitHubItem[]) => TResult; pageSize?: number; }) { @@ -1415,9 +2163,25 @@ async function getCachedPaginatedGitHubRequest({ const items: TGitHubItem[] = []; let page = 1; let metadata = createGitHubResponseMetadata(200, {}); + const deadlineAt = Date.now() + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS; while (true) { - const response = await request(page); + const timeoutMs = getRemainingSearchTimeoutMs( + deadlineAt, + GITHUB_OPERATION_TIMEOUT_MS, + ); + if (timeoutMs < GITHUB_MIN_OPERATION_TIMEOUT_MS) { + throw new GitHubOperationTimeoutError( + `github ${resource} page ${page}`, + timeoutMs, + ); + } + + const response = await withGitHubOperationTimeout( + `github ${resource} page ${page}`, + timeoutMs, + (signal) => request(page, signal), + ); if (page === 1) { metadata = createGitHubResponseMetadata( response.status, @@ -1445,17 +2209,35 @@ async function getCachedPaginatedGitHubRequest({ async function listPaginatedGitHubItems({ request, getItems, + label = "github paginated items", pageSize = 100, }: { - request: (page: number) => Promise>; + request: ( + page: number, + signal: AbortSignal, + ) => Promise>; getItems: (data: unknown) => TItem[]; + label?: string; pageSize?: number; }) { const items: TItem[] = []; let page = 1; + const deadlineAt = Date.now() + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS; while (true) { - const response = await request(page); + const timeoutMs = getRemainingSearchTimeoutMs( + deadlineAt, + GITHUB_OPERATION_TIMEOUT_MS, + ); + if (timeoutMs < GITHUB_MIN_OPERATION_TIMEOUT_MS) { + throw new GitHubOperationTimeoutError(`${label} page ${page}`, timeoutMs); + } + + const response = await withGitHubOperationTimeout( + `${label} page ${page}`, + timeoutMs, + (signal) => request(page, signal), + ); const pageItems = getItems(response.data); items.push(...pageItems); @@ -1666,28 +2448,42 @@ async function getCachedRepoReviewerBots( ], cacheMode: "split", fetcher: async () => { - const installations = await listReviewerBotInstallations(context, params); - const matchingInstallations: GitHubReviewerBotInstallationPayload[] = []; - const ownAppSlug = getGitHubAppSlug(); - - for (const installation of installations) { - if ( - installation.app_slug !== ownAppSlug && - installationCanReviewPullRequests(installation) && - (await installationHasRepositoryAccess(context, installation, params)) - ) { - matchingInstallations.push(installation); - } - } + const bots = await withGitHubOperationTimeout( + `github reviewer bots ${params.owner}/${params.repo}`, + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS, + async () => { + const installations = await listReviewerBotInstallations( + context, + params, + ); + const matchingInstallations: GitHubReviewerBotInstallationPayload[] = + []; + const ownAppSlug = getGitHubAppSlug(); + + for (const installation of installations) { + if ( + installation.app_slug !== ownAppSlug && + installationCanReviewPullRequests(installation) && + (await installationHasRepositoryAccess( + context, + installation, + params, + )) + ) { + matchingInstallations.push(installation); + } + } - const bots = ( - await Promise.all( - matchingInstallations.map((installation) => - mapReviewerBotInstallation(context, installation), - ), - ) - ).filter((candidate): candidate is RepoCollaborator => - Boolean(candidate), + return ( + await Promise.all( + matchingInstallations.map((installation) => + mapReviewerBotInstallation(context, installation), + ), + ) + ).filter((candidate): candidate is RepoCollaborator => + Boolean(candidate), + ); + }, ); return { @@ -2070,7 +2866,9 @@ async function getCrossReferencesViaGraphQL( data: { owner: string; repo: string; issueNumber: number }, ): Promise { try { - const response = await context.octokit.graphql( + const response = await executeGitHubGraphQL( + context, + `github cross references ${data.owner}/${data.repo}#${data.issueNumber}`, `query ($owner: String!, $repo: String!, $number: Int!) { repository(owner: $owner, name: $repo) { issueOrPullRequest(number: $number) { @@ -2387,14 +3185,189 @@ async function getPullStatusResult( return { kind: "success", - data: await computePullStatus(context, data, pullForStatus), + data: await withGitHubOperationTimeout( + `github pull status ${data.owner}/${data.repo}#${data.pullNumber}`, + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS, + () => computePullStatus(context, data, pullForStatus), + ), metadata: createGitHubResponseMetadata(200, {}), }; }, }); } -async function getPullPageDataResult( +async function getPullPageDataViaGraphQL( + context: GitHubContext, + data: PullFromRepoInput, +): Promise { + const pullNamespaceKey = githubRevalidationSignalKeys.pullEntity({ + owner: data.owner, + repo: data.repo, + pullNumber: data.pullNumber, + }); + + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "pulls.pageData.graphql.v1", + params: data, + freshForMs: githubCachePolicy.detail.staleTimeMs, + signalKeys: [pullNamespaceKey], + namespaceKeys: [pullNamespaceKey], + cacheMode: "split", + fetcher: async () => { + const [response, timelineResult] = await Promise.all([ + executeGitHubGraphQL( + context, + `github pull page ${data.owner}/${data.repo}#${data.pullNumber}`, + `query($owner: String!, $repo: String!, $number: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $number) { + databaseId + number + title + state + isDraft + createdAt + updatedAt + closedAt + mergedAt + url + body + additions + deletions + changedFiles + comments { totalCount } + author { __typename login avatarUrl url } + labels(first: 20) { + nodes { name color description } + } + headRefName + headRefOid + baseRefName + merged + mergeCommit { oid } + mergedBy { __typename login avatarUrl url } + mergeable + mergeStateStatus + repository { + name + nameWithOwner + url + owner { login } + } + reviewThreads(first: 1) { totalCount } + reviewRequests(first: 100) { + nodes { + requestedReviewer { + __typename + ... on User { + login + avatarUrl + url + } + ... on Team { + name + slug + url + } + } + } + } + commits(first: 100) { + totalCount + nodes { + commit { + oid + message + committedDate + author { + user { + login + avatarUrl + url + } + } + } + } + } + firstComments: comments(first: 30) { + totalCount + nodes { + id + databaseId + body + createdAt + author { __typename login avatarUrl url } + } + } + lastComments: comments(last: 30) { + totalCount + nodes { + id + databaseId + body + createdAt + author { __typename login avatarUrl url } + } + } + } + } + rateLimit { + cost + remaining + resetAt + } + }`, + { + owner: data.owner, + repo: data.repo, + number: data.pullNumber, + }, + ), + getTimelineEventsResult(context, { + owner: data.owner, + repo: data.repo, + issueNumber: data.pullNumber, + }), + ]); + + const pull = response.repository.pullRequest; + if (!pull) { + throw new Error( + `Pull request not found: ${data.owner}/${data.repo}#${data.pullNumber}`, + ); + } + + const totalComments = pull.comments.totalCount; + const loadedPages = getLoadedCommentPages(totalComments); + const detail = mapGraphQLPullDetail(pull); + const events = timelineResult.events; + + return { + kind: "success", + data: { + detail, + comments: mapGraphQLComments(pull.firstComments, pull.lastComments), + commits: mapGraphQLPullCommits(pull), + events, + commentPagination: { + totalCount: totalComments, + perPage: COMMENTS_PER_PAGE, + loadedPages, + }, + eventPagination: { + loadedPages: [1], + hasMore: timelineResult.hasMore, + }, + headRefDeleted: deriveHeadRefDeleted(events), + }, + metadata: createGraphQLResponseMetadata(response.rateLimit), + }; + }, + }); +} + +async function getPullPageDataViaRest( context: GitHubContext, data: PullFromRepoInput, ): Promise { @@ -2445,6 +3418,18 @@ async function getPullPageDataResult( }; } +async function getPullPageDataResult( + context: GitHubContext, + data: PullFromRepoInput, +): Promise { + try { + return await getPullPageDataViaGraphQL(context, data); + } catch (error) { + console.error("[pull-page:gql] failed, falling back to REST", error); + return getPullPageDataViaRest(context, data); + } +} + async function getIssueDetailResult( context: GitHubContext, data: IssueFromRepoInput, @@ -2526,7 +3511,139 @@ async function getIssueCommentsResult( }); } -async function getIssuePageDataResult( +async function getIssuePageDataViaGraphQL( + context: GitHubContext, + data: IssueFromRepoInput, +): Promise { + const issueNamespaceKey = githubRevalidationSignalKeys.issueEntity({ + owner: data.owner, + repo: data.repo, + issueNumber: data.issueNumber, + }); + + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "issues.pageData.graphql.v1", + params: data, + freshForMs: githubCachePolicy.detail.staleTimeMs, + signalKeys: [issueNamespaceKey], + namespaceKeys: [issueNamespaceKey], + cacheMode: "split", + fetcher: async () => { + const [response, timelineResult] = await Promise.all([ + executeGitHubGraphQL( + context, + `github issue page ${data.owner}/${data.repo}#${data.issueNumber}`, + `query($owner: String!, $repo: String!, $number: Int!) { + repository(owner: $owner, name: $repo) { + issue(number: $number) { + databaseId + number + title + state + stateReason + createdAt + updatedAt + closedAt + url + body + comments { totalCount } + author { __typename login avatarUrl url } + labels(first: 20) { + nodes { name color description } + } + repository { + name + nameWithOwner + url + owner { login } + } + assignees(first: 20) { + nodes { + __typename + login + avatarUrl + url + } + } + milestone { + title + description + dueOn + } + firstComments: comments(first: 30) { + totalCount + nodes { + id + databaseId + body + createdAt + author { __typename login avatarUrl url } + } + } + lastComments: comments(last: 30) { + totalCount + nodes { + id + databaseId + body + createdAt + author { __typename login avatarUrl url } + } + } + } + } + rateLimit { + cost + remaining + resetAt + } + }`, + { + owner: data.owner, + repo: data.repo, + number: data.issueNumber, + }, + ), + getTimelineEventsResult(context, { + owner: data.owner, + repo: data.repo, + issueNumber: data.issueNumber, + }), + ]); + + const issue = response.repository.issue; + if (!issue) { + throw new Error( + `Issue not found: ${data.owner}/${data.repo}#${data.issueNumber}`, + ); + } + + const totalComments = issue.comments.totalCount; + + return { + kind: "success", + data: { + detail: mapGraphQLIssueDetail(issue), + comments: mapGraphQLComments(issue.firstComments, issue.lastComments), + events: timelineResult.events, + commentPagination: { + totalCount: totalComments, + perPage: COMMENTS_PER_PAGE, + loadedPages: getLoadedCommentPages(totalComments), + }, + eventPagination: { + loadedPages: [1], + hasMore: timelineResult.hasMore, + }, + }, + metadata: createGraphQLResponseMetadata(response.rateLimit), + }; + }, + }); +} + +async function getIssuePageDataViaRest( context: GitHubContext, data: IssueFromRepoInput, ): Promise { @@ -2569,28 +3686,16 @@ async function getIssuePageDataResult( }; } -async function runWithConcurrency( - tasks: Array<() => Promise>, - concurrency = 2, -) { - const results = new Array(tasks.length); - let nextIndex = 0; - - const workers = Array.from( - { length: Math.min(Math.max(concurrency, 1), tasks.length) }, - () => - (async () => { - while (nextIndex < tasks.length) { - const taskIndex = nextIndex; - nextIndex += 1; - results[taskIndex] = await tasks[taskIndex](); - } - })(), - ); - - await Promise.all(workers); - - return results; +async function getIssuePageDataResult( + context: GitHubContext, + data: IssueFromRepoInput, +): Promise { + try { + return await getIssuePageDataViaGraphQL(context, data); + } catch (error) { + console.error("[issue-page:gql] failed, falling back to REST", error); + return getIssuePageDataViaRest(context, data); + } } async function getViewer(context: GitHubContext): Promise { @@ -2616,75 +3721,543 @@ async function resolveUsername(context: GitHubContext, username?: string) { return viewer.login; } -async function getMyPullSlice({ +type GraphQLMyPullsResponse = { + reviewRequested: GitHubGraphQLSearchConnection; + assigned: GitHubGraphQLSearchConnection; + authored: GitHubGraphQLSearchConnection; + mentioned: GitHubGraphQLSearchConnection; + involved: GitHubGraphQLSearchConnection; + rateLimit: GitHubGraphQLRateLimit; +}; + +type GraphQLMyIssuesResponse = { + assigned: GitHubGraphQLSearchConnection; + authored: GitHubGraphQLSearchConnection; + mentioned: GitHubGraphQLSearchConnection; + rateLimit: GitHubGraphQLRateLimit; +}; + +function mapGraphQLPullSearchConnection( + connection: GitHubGraphQLSearchConnection, +) { + return (connection.nodes ?? []) + .map((node) => mapGraphQLPullSearchNode(node)) + .filter((item): item is PullSummary => Boolean(item)); +} + +function mapGraphQLIssueSearchConnection( + connection: GitHubGraphQLSearchConnection, +) { + return (connection.nodes ?? []) + .map((node) => mapGraphQLIssueSearchNode(node)) + .filter((item): item is IssueSummary => Boolean(item)); +} + +function ownerScopeKey(scope: GitHubSearchOwnerScope) { + return `${scope.targetType}:${normalizeLogin(scope.login)}`; +} + +function toSearchOwnerScope( + installation: GitHubAppInstallation, +): GitHubSearchOwnerScope | null { + if ( + installation.suspendedAt || + (installation.targetType !== "Organization" && + installation.targetType !== "User") + ) { + return null; + } + + return { + login: installation.account.login, + targetType: installation.targetType, + }; +} + +function addExcludedOwnerScope( + excludedOwners: Map, + scope: GitHubSearchOwnerScope, +) { + excludedOwners.set(ownerScopeKey(scope), scope); +} + +async function getMySearchSources( + context: GitHubContext, + viewerLogin: string, + deadlineAt: number, +): Promise { + let installations: GitHubAppInstallation[] = []; + let organizations: GitHubOrganization[] = []; + try { + const [installationResult, organizationResult] = + await withGitHubOperationTimeout( + "github search source discovery", + getRemainingSearchTimeoutMs(deadlineAt, MY_SEARCH_SOURCE_TIMEOUT_MS), + () => + Promise.all([ + getGitHubAppUserInstallations(context.session.user.id), + getGitHubAuthenticatedOrganizations(context), + ]), + ); + installations = installationResult.installations; + organizations = organizationResult; + } catch (error) { + console.error("[github-search] failed to discover search sources", error); + } + + const sources: GitHubGraphQLSearchSource[] = []; + const excludedOAuthOwners = new Map(); + + for (const organization of organizations) { + addExcludedOwnerScope(excludedOAuthOwners, { + login: organization.login, + targetType: "Organization", + }); + } + + for (const installation of installations) { + const owner = toSearchOwnerScope(installation); + if (!owner) { + continue; + } + + const contextTimeoutMs = getRemainingSearchTimeoutMs( + deadlineAt, + MY_SEARCH_SOURCE_TIMEOUT_MS, + ); + if (contextTimeoutMs < GITHUB_MIN_OPERATION_TIMEOUT_MS) { + debug("github-search", "skipping remaining installations, deadline low", { + remainingMs: contextTimeoutMs, + }); + break; + } + + if (owner.targetType === "Organization") { + addExcludedOwnerScope(excludedOAuthOwners, owner); + } + if ( + owner.targetType === "User" && + installation.repositorySelection === "all" && + normalizeLogin(owner.login) === normalizeLogin(viewerLogin) + ) { + addExcludedOwnerScope(excludedOAuthOwners, owner); + } + + const installationContext = await withGitHubOperationTimeout( + `github installation context ${installation.id}`, + contextTimeoutMs, + () => getGitHubContextForInstallation(context, installation), + ).catch((error) => { + console.error( + "[github-search] failed to create installation source", + installation.account.login, + error, + ); + return null; + }); + if (!installationContext) { + continue; + } + + sources.push({ + label: `installation:${installation.id}:${owner.login}`, + context: installationContext, + owner, + }); + } + + sources.push({ + label: "oauth:fallback", + context, + excludeOwners: [...excludedOAuthOwners.values()], + }); + + return sources; +} + +function mergeSearchBuckets< + TItem extends { + number: number; + updatedAt: string; + repository: RepositoryRef; + }, +>(buckets: TItem[][]) { + const byKey = new Map(); + + for (const bucket of buckets) { + for (const item of bucket) { + const key = `${normalizeLogin(item.repository.fullName)}#${item.number}`; + const existing = byKey.get(key); + if ( + !existing || + Date.parse(item.updatedAt) > Date.parse(existing.updatedAt) + ) { + byKey.set(key, item); + } + } + } + + return [...byKey.values()] + .sort((a, b) => Date.parse(b.updatedAt) - Date.parse(a.updatedAt)) + .slice(0, MY_SEARCH_BUCKET_LIMIT); +} + +function mergeMyPullsResults(results: MyPullsResult[]): MyPullsResult { + return { + reviewRequested: mergeSearchBuckets( + results.map((result) => result.reviewRequested), + ), + assigned: mergeSearchBuckets(results.map((result) => result.assigned)), + authored: mergeSearchBuckets(results.map((result) => result.authored)), + mentioned: mergeSearchBuckets(results.map((result) => result.mentioned)), + involved: mergeSearchBuckets(results.map((result) => result.involved)), + }; +} + +function mergeMyIssuesResults(results: MyIssuesResult[]): MyIssuesResult { + return { + assigned: mergeSearchBuckets(results.map((result) => result.assigned)), + authored: mergeSearchBuckets(results.map((result) => result.authored)), + mentioned: mergeSearchBuckets(results.map((result) => result.mentioned)), + }; +} + +function buildSourceSearchQuery({ + itemType, + role, + source, + username, +}: { + itemType: "issue" | "pr"; + role: IssueSearchRole | PullSearchRole; + source: GitHubGraphQLSearchSource; + username: string; +}) { + return buildUserSearchQuery({ + itemType, + role, + state: "open", + username, + owner: source.owner?.login, + ownerType: source.owner?.targetType, + excludeOwners: source.excludeOwners, + }); +} + +async function getMyPullsResult({ context, username, - roleKey, - role, }: { context: GitHubContext; username: string; - roleKey: keyof MyPullsResult; - role: PullSearchRole; }) { - return getCachedGitHubRequest({ - context, - resource: `pulls.mine.${roleKey}`, - params: { username, role }, + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "pulls.mine.graphql.v2", + params: { username }, freshForMs: githubCachePolicy.list.staleTimeMs, signalKeys: [githubRevalidationSignalKeys.pullsMine], namespaceKeys: [githubRevalidationSignalKeys.pullsMine], cacheMode: "split", - request: (headers) => - context.octokit.rest.search.issuesAndPullRequests({ - q: buildUserSearchQuery({ - itemType: "pr", - role, - state: "open", - username, - }), - per_page: 30, - sort: "updated", - order: "desc", - headers, - }), - mapData: (result) => mapPullSearchItems(result.items), + fetcher: async () => { + const deadlineAt = Date.now() + MY_SEARCH_TOTAL_TIMEOUT_MS; + const sources = await getMySearchSources(context, username, deadlineAt); + const results: MyPullsResult[] = []; + const rateLimits: GitHubGraphQLRateLimit[] = []; + + for (const source of sources) { + const sourceTimeoutMs = getRemainingSearchTimeoutMs( + deadlineAt, + MY_SEARCH_SOURCE_TIMEOUT_MS, + ); + if (sourceTimeoutMs < GITHUB_MIN_OPERATION_TIMEOUT_MS) { + debug("github-search", "stopping pull search, deadline low", { + remainingMs: sourceTimeoutMs, + }); + break; + } + + try { + const response = await withGitHubOperationTimeout( + `github pull search ${source.label}`, + sourceTimeoutMs, + (signal) => + source.context.octokit.graphql( + `query( + $reviewRequestedQuery: String! + $assignedQuery: String! + $authoredQuery: String! + $mentionedQuery: String! + $involvedQuery: String! + ) { + reviewRequested: search(type: ISSUE, first: 30, query: $reviewRequestedQuery) { + nodes { ...PullSearchFields } + } + assigned: search(type: ISSUE, first: 30, query: $assignedQuery) { + nodes { ...PullSearchFields } + } + authored: search(type: ISSUE, first: 30, query: $authoredQuery) { + nodes { ...PullSearchFields } + } + mentioned: search(type: ISSUE, first: 30, query: $mentionedQuery) { + nodes { ...PullSearchFields } + } + involved: search(type: ISSUE, first: 30, query: $involvedQuery) { + nodes { ...PullSearchFields } + } + rateLimit { + cost + remaining + resetAt + } + } + + fragment PullSearchFields on PullRequest { + __typename + databaseId + number + title + state + isDraft + createdAt + updatedAt + closedAt + mergedAt + url + comments { + totalCount + } + author { + __typename + login + avatarUrl + url + } + labels(first: 20) { + nodes { + name + color + description + } + } + repository { + name + nameWithOwner + url + owner { + login + } + } + }`, + { + reviewRequestedQuery: buildSourceSearchQuery({ + itemType: "pr", + role: "review-requested", + source, + username, + }), + assignedQuery: buildSourceSearchQuery({ + itemType: "pr", + role: "assigned", + source, + username, + }), + authoredQuery: buildSourceSearchQuery({ + itemType: "pr", + role: "author", + source, + username, + }), + mentionedQuery: buildSourceSearchQuery({ + itemType: "pr", + role: "mentioned", + source, + username, + }), + involvedQuery: buildSourceSearchQuery({ + itemType: "pr", + role: "involved", + source, + username, + }), + request: { signal }, + }, + ), + ); + + results.push({ + reviewRequested: mapGraphQLPullSearchConnection( + response.reviewRequested, + ), + assigned: mapGraphQLPullSearchConnection(response.assigned), + authored: mapGraphQLPullSearchConnection(response.authored), + mentioned: mapGraphQLPullSearchConnection(response.mentioned), + involved: mapGraphQLPullSearchConnection(response.involved), + }); + rateLimits.push(response.rateLimit); + } catch (error) { + console.error( + "[github-search] failed to load pull requests", + source.label, + error, + ); + } + } + + return { + kind: "success", + data: mergeMyPullsResults(results), + metadata: createGraphQLResponseMetadata( + mergeGraphQLRateLimits(rateLimits), + ), + }; + }, }); } -async function getMyIssueSlice({ +async function getMyIssuesResult({ context, username, - roleKey, - role, }: { context: GitHubContext; username: string; - roleKey: keyof MyIssuesResult; - role: IssueSearchRole; }) { - return getCachedGitHubRequest({ - context, - resource: `issues.mine.${roleKey}`, - params: { username, role }, + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "issues.mine.graphql.v2", + params: { username }, freshForMs: githubCachePolicy.list.staleTimeMs, signalKeys: [githubRevalidationSignalKeys.issuesMine], namespaceKeys: [githubRevalidationSignalKeys.issuesMine], cacheMode: "split", - request: (headers) => - context.octokit.rest.search.issuesAndPullRequests({ - q: buildUserSearchQuery({ - itemType: "issue", - role, - state: "open", - username, - }), - per_page: 30, - sort: "updated", - order: "desc", - headers, - }), - mapData: (result) => mapIssueSearchItems(result.items), + fetcher: async () => { + const deadlineAt = Date.now() + MY_SEARCH_TOTAL_TIMEOUT_MS; + const sources = await getMySearchSources(context, username, deadlineAt); + const results: MyIssuesResult[] = []; + const rateLimits: GitHubGraphQLRateLimit[] = []; + + for (const source of sources) { + const sourceTimeoutMs = getRemainingSearchTimeoutMs( + deadlineAt, + MY_SEARCH_SOURCE_TIMEOUT_MS, + ); + if (sourceTimeoutMs < GITHUB_MIN_OPERATION_TIMEOUT_MS) { + debug("github-search", "stopping issue search, deadline low", { + remainingMs: sourceTimeoutMs, + }); + break; + } + + try { + const response = await withGitHubOperationTimeout( + `github issue search ${source.label}`, + sourceTimeoutMs, + (signal) => + source.context.octokit.graphql( + `query( + $assignedQuery: String! + $authoredQuery: String! + $mentionedQuery: String! + ) { + assigned: search(type: ISSUE, first: 30, query: $assignedQuery) { + nodes { ...IssueSearchFields } + } + authored: search(type: ISSUE, first: 30, query: $authoredQuery) { + nodes { ...IssueSearchFields } + } + mentioned: search(type: ISSUE, first: 30, query: $mentionedQuery) { + nodes { ...IssueSearchFields } + } + rateLimit { + cost + remaining + resetAt + } + } + + fragment IssueSearchFields on Issue { + __typename + databaseId + number + title + state + stateReason + createdAt + updatedAt + closedAt + url + comments { + totalCount + } + author { + __typename + login + avatarUrl + url + } + labels(first: 20) { + nodes { + name + color + description + } + } + repository { + name + nameWithOwner + url + owner { + login + } + } + }`, + { + assignedQuery: buildSourceSearchQuery({ + itemType: "issue", + role: "assigned", + source, + username, + }), + authoredQuery: buildSourceSearchQuery({ + itemType: "issue", + role: "author", + source, + username, + }), + mentionedQuery: buildSourceSearchQuery({ + itemType: "issue", + role: "mentioned", + source, + username, + }), + request: { signal }, + }, + ), + ); + + results.push({ + assigned: mapGraphQLIssueSearchConnection(response.assigned), + authored: mapGraphQLIssueSearchConnection(response.authored), + mentioned: mapGraphQLIssueSearchConnection(response.mentioned), + }); + rateLimits.push(response.rateLimit); + } catch (error) { + console.error( + "[github-search] failed to load issues", + source.label, + error, + ); + } + } + + return { + kind: "success", + data: mergeMyIssuesResults(results), + metadata: createGraphQLResponseMetadata( + mergeGraphQLRateLimits(rateLimits), + ), + }; + }, }); } @@ -2746,39 +4319,28 @@ export const getGitHubAppAccessState = createServerFn({ return null; } - const viewer = await getViewer(context); + const viewer = await withGitHubOperationTimeout( + "github app access viewer", + GITHUB_OPERATION_TIMEOUT_MS, + () => getViewer(context), + ); const appSlug = getGitHubAppSlug(); const appAuthorizationUrl = buildGitHubAppAuthorizePath(); const publicInstallUrl = buildGitHubAppInstallUrl(appSlug); - const { installations, installationsAvailable } = - await getGitHubAppUserInstallations(context.session.user.id); - - let organizations: GitHubOrganization[] = []; - try { - const organizationsResponse = await context.octokit.request( - "GET /user/orgs", - { - per_page: 100, - }, - ); - const payload = - organizationsResponse.data as GitHubAuthenticatedOrgPayload[]; - organizations = payload.flatMap((organization) => { - if (!organization.id || !organization.login) { - return []; - } + const [ + { installations, installationsAvailable }, + authenticatedOrganizations, + ] = await withGitHubOperationTimeout( + "github app access state", + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS, + () => + Promise.all([ + getGitHubAppUserInstallations(context.session.user.id), + getGitHubAuthenticatedOrganizations(context), + ]), + ); - return [ - { - id: organization.id, - login: organization.login, - avatarUrl: organization.avatar_url ?? null, - }, - ]; - }); - } catch (error) { - console.error("[github-access] failed to load organizations", error); - } + let organizations = authenticatedOrganizations; const viewerLogin = viewer.login; const personalInstallation = @@ -2888,13 +4450,14 @@ export const searchCommandPaletteGitHub = createServerFn({ method: "GET" }) safeCommandPaletteSearch({ label: "pull requests", fallback: [] as SearchItem[], - task: async () => { + task: async (signal) => { const response = await context.octokit.rest.search.issuesAndPullRequests({ q: `${query} is:pr involves:${login} archived:false`, per_page: perPage, sort: "updated", order: "desc", + request: { signal }, }); return response.data.items; }, @@ -2902,13 +4465,14 @@ export const searchCommandPaletteGitHub = createServerFn({ method: "GET" }) safeCommandPaletteSearch({ label: "issues", fallback: [] as SearchItem[], - task: async () => { + task: async (signal) => { const response = await context.octokit.rest.search.issuesAndPullRequests({ q: `${query} is:issue involves:${login} archived:false`, per_page: perPage, sort: "updated", order: "desc", + request: { signal }, }); return response.data.items; }, @@ -2945,32 +4509,7 @@ export const getMyPulls = createServerFn({ method: "GET" }).handler( } const viewer = await getViewer(context); - const slices = await runWithConcurrency( - myPullRoleDefinitions.map((definition) => async () => ({ - key: definition.key, - data: await getMyPullSlice({ - context, - username: viewer.login, - roleKey: definition.key, - role: definition.role, - }), - })), - 2, - ); - - return slices.reduce( - (accumulator, slice) => { - accumulator[slice.key] = slice.data; - return accumulator; - }, - { - reviewRequested: [], - assigned: [], - authored: [], - mentioned: [], - involved: [], - }, - ); + return getMyPullsResult({ context, username: viewer.login }); }, ); @@ -3124,30 +4663,7 @@ export const getMyIssues = createServerFn({ method: "GET" }).handler( } const viewer = await getViewer(context); - const slices = await runWithConcurrency( - myIssueRoleDefinitions.map((definition) => async () => ({ - key: definition.key, - data: await getMyIssueSlice({ - context, - username: viewer.login, - roleKey: definition.key, - role: definition.role, - }), - })), - 2, - ); - - return slices.reduce( - (accumulator, slice) => { - accumulator[slice.key] = slice.data; - return accumulator; - }, - { - assigned: [], - authored: [], - mentioned: [], - }, - ); + return getMyIssuesResult({ context, username: viewer.login }); }, ); @@ -3827,10 +5343,15 @@ export const getOrgTeams = createServerFn({ method: "GET" }) })), }).catch(() => []); - const teamsWithAccess = await Promise.all( - teams.map(async (team) => - (await orgTeamHasRepoAccess(context, data, team)) ? team : null, - ), + const teamsWithAccess = await withGitHubOperationTimeout( + `github org team access ${data.org}/${data.repo}`, + GITHUB_PAGINATED_OPERATION_TIMEOUT_MS, + () => + Promise.all( + teams.map(async (team) => + (await orgTeamHasRepoAccess(context, data, team)) ? team : null, + ), + ), ); return teamsWithAccess.filter((team): team is OrgTeam => Boolean(team)); @@ -4104,7 +5625,9 @@ export const getUserContributions = createServerFn({ method: "GET" }) }; }; }; - } = await context.octokit.graphql( + } = await executeGitHubGraphQL( + context, + `github user contributions ${data.username}`, `query($username: String!) { user(login: $username) { contributionsCollection { @@ -4177,7 +5700,9 @@ export const getUserPinnedRepos = createServerFn({ method: "GET" }) }>; }; }; - } = await context.octokit.graphql( + } = await executeGitHubGraphQL( + context, + `github user pinned repos ${data.username}`, `query($username: String!) { user(login: $username) { pinnedItems(first: 6, types: REPOSITORY) { @@ -4430,106 +5955,113 @@ type RepoOverviewInput = { export const getRepoOverview = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return null; - const [ - repoRes, - branchesRes, - tagsRes, - commitsRes, - openPullsRes, - openIssuesRes, - ] = await Promise.all([ - context.octokit.rest.repos.get({ - owner: data.owner, - repo: data.repo, - }), - context.octokit.rest.repos.listBranches({ - owner: data.owner, - repo: data.repo, - per_page: 1, - }), - context.octokit.rest.repos.listTags({ - owner: data.owner, - repo: data.repo, - per_page: 1, - }), - context.octokit.rest.repos.listCommits({ - owner: data.owner, - repo: data.repo, - per_page: 1, - }), - context.octokit.rest.pulls.list({ - owner: data.owner, - repo: data.repo, - state: "open", - per_page: 1, - }), - context.octokit.rest.issues.listForRepo({ - owner: data.owner, - repo: data.repo, - state: "open", - per_page: 1, - }), - ]); + const repoMetaKey = githubRevalidationSignalKeys.repoMeta(data); + const repoCodeKey = githubRevalidationSignalKeys.repoCode(data); - const repo = repoRes.data; - - // Extract total counts from Link headers - const branchCount = - parseLinkHeaderLastPage(branchesRes.headers.link as string | undefined) ?? - branchesRes.data.length; - const tagCount = - parseLinkHeaderLastPage(tagsRes.headers.link as string | undefined) ?? - tagsRes.data.length; - const openPullCount = - parseLinkHeaderLastPage( - openPullsRes.headers.link as string | undefined, - ) ?? openPullsRes.data.length; - // issues.listForRepo includes PRs, so subtract pull count for pure issues - const openIssueAndPrCount = - parseLinkHeaderLastPage( - openIssuesRes.headers.link as string | undefined, - ) ?? openIssuesRes.data.length; - const openIssueCount = Math.max(0, openIssueAndPrCount - openPullCount); - - const latestCommit = commitsRes.data[0] - ? { - sha: commitsRes.data[0].sha, - message: commitsRes.data[0].commit.message, - date: - commitsRes.data[0].commit.committer?.date ?? - commitsRes.data[0].commit.author?.date ?? - "", - author: mapActor(commitsRes.data[0].author), + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "repo.overview.v1", + params: data, + freshForMs: githubCachePolicy.repoMeta.staleTimeMs, + signalKeys: [repoMetaKey, repoCodeKey], + namespaceKeys: [repoMetaKey, repoCodeKey], + cacheMode: "split", + fetcher: async () => { + const response = + await executeGitHubGraphQL( + context, + `github repo overview ${data.owner}/${data.repo}`, + `query($owner: String!, $repo: String!) { + repository(owner: $owner, name: $repo) { + databaseId + name + nameWithOwner + description + isPrivate + isFork + defaultBranchRef { + name + target { + __typename + ... on Commit { + oid + message + committedDate + author { + user { + login + avatarUrl + url + } + } + } + } + } + stargazerCount + forkCount + watchers(first: 1) { + totalCount + } + primaryLanguage { + name + } + licenseInfo { + spdxId + } + repositoryTopics(first: 20) { + nodes { + topic { + name + } + } + } + url + owner { + login + avatarUrl + } + branches: refs(refPrefix: "refs/heads/", first: 1) { + totalCount + } + tags: refs(refPrefix: "refs/tags/", first: 1) { + totalCount + } + pullRequests(states: OPEN, first: 1) { + totalCount + } + issues(states: OPEN, first: 1) { + totalCount + } + hasDiscussionsEnabled + } + rateLimit { + cost + remaining + resetAt + } + }`, + { + owner: data.owner, + repo: data.repo, + }, + ); + + if (!response.repository) { + throw new Error( + `GitHub repository not found: ${data.owner}/${data.repo}`, + ); } - : null; - return { - id: repo.id, - name: repo.name, - fullName: repo.full_name, - description: repo.description, - isPrivate: repo.private, - isFork: repo.fork, - defaultBranch: repo.default_branch, - stars: repo.stargazers_count, - forks: repo.forks_count, - watchers: repo.subscribers_count, - language: repo.language, - license: repo.license?.spdx_id ?? null, - topics: repo.topics ?? [], - url: repo.html_url, - owner: repo.owner.login, - ownerAvatarUrl: repo.owner.avatar_url, - branchCount, - tagCount, - openPullCount, - openIssueCount, - hasDiscussions: !!(repo as Record).has_discussions, - latestCommit, - }; + return { + kind: "success", + data: mapGraphQLRepoOverview(response.repository), + metadata: createGraphQLResponseMetadata(response.rateLimit), + }; + }, + }); }); // --------------------------------------------------------------------------- @@ -4564,54 +6096,86 @@ type GraphQLDiscussionsResponse = { export const getRepoDiscussions = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return { discussions: [], totalCount: 0 }; - try { - const response = - await context.octokit.graphql( - `query($owner: String!, $repo: String!, $first: Int!) { - repository(owner: $owner, name: $repo) { - discussions(first: $first, orderBy: { field: UPDATED_AT, direction: DESC }) { - totalCount - nodes { - number - title - createdAt - updatedAt - author { login avatarUrl } - category { name emojiHTML } - comments { totalCount } - answerChosenAt - url + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "repo.discussions.v1", + params: { + owner: data.owner, + repo: data.repo, + first: data.first ?? 5, + }, + freshForMs: githubCachePolicy.list.staleTimeMs, + signalKeys: [githubRevalidationSignalKeys.repoMeta(data)], + namespaceKeys: [githubRevalidationSignalKeys.repoMeta(data)], + cacheMode: "split", + fetcher: async () => { + try { + const response = + await executeGitHubGraphQL( + context, + `github repo discussions ${data.owner}/${data.repo}`, + `query($owner: String!, $repo: String!, $first: Int!) { + repository(owner: $owner, name: $repo) { + discussions(first: $first, orderBy: { field: UPDATED_AT, direction: DESC }) { + totalCount + nodes { + number + title + createdAt + updatedAt + author { login avatarUrl } + category { name emojiHTML } + comments { totalCount } + answerChosenAt + url + } + } } - } - } - }`, - { - owner: data.owner, - repo: data.repo, - first: data.first ?? 5, - }, - ); + }`, + { + owner: data.owner, + repo: data.repo, + first: data.first ?? 5, + }, + ); - return { - totalCount: response.repository.discussions.totalCount, - discussions: response.repository.discussions.nodes.map((d) => ({ - number: d.number, - title: d.title, - createdAt: d.createdAt, - updatedAt: d.updatedAt, - author: d.author, - category: d.category?.name ?? null, - comments: d.comments.totalCount, - isAnswered: d.answerChosenAt !== null, - url: d.url, - })), - }; - } catch { - return { discussions: [], totalCount: 0 }; - } + return { + kind: "success", + data: { + totalCount: response.repository.discussions.totalCount, + discussions: response.repository.discussions.nodes.map((d) => ({ + number: d.number, + title: d.title, + createdAt: d.createdAt, + updatedAt: d.updatedAt, + author: d.author, + category: d.category?.name ?? null, + comments: d.comments.totalCount, + isAnswered: d.answerChosenAt !== null, + url: d.url, + })), + }, + metadata: createGitHubResponseMetadata(200, {}), + }; + } catch (error) { + if ( + error instanceof RequestError && + (error.status === 403 || error.status === 429) + ) { + throw error; + } + + return { + kind: "success", + data: { discussions: [], totalCount: 0 }, + metadata: createGitHubResponseMetadata(200, {}), + }; + } + }, + }); }); function parseLinkHeaderLastPage(link: string | undefined): number | null { @@ -4632,19 +6196,35 @@ type RepoBranchesInput = { export const getRepoBranches = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return []; - const res = await context.octokit.rest.repos.listBranches({ - owner: data.owner, - repo: data.repo, - per_page: 25, + return getCachedGitHubRequest< + Awaited< + ReturnType + >["data"], + RepoBranch[] + >({ + context, + resource: "repo.branches.v1", + params: data, + freshForMs: githubCachePolicy.repoMeta.staleTimeMs, + signalKeys: [githubRevalidationSignalKeys.repoCode(data)], + namespaceKeys: [githubRevalidationSignalKeys.repoCode(data)], + cacheMode: "split", + request: (headers) => + context.octokit.rest.repos.listBranches({ + owner: data.owner, + repo: data.repo, + per_page: 25, + headers, + }), + mapData: (branches) => + branches.map((b) => ({ + name: b.name, + isProtected: b.protected, + })), }); - - return res.data.map((b) => ({ - name: b.name, - isProtected: b.protected, - })); }); // --------------------------------------------------------------------------- @@ -4661,76 +6241,54 @@ type RepoTreeInput = { export const getRepoTree = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return []; - const res = await context.octokit.rest.repos.getContent({ - owner: data.owner, - repo: data.repo, - path: data.path, - ref: data.ref, - }); - - if (!Array.isArray(res.data)) return []; - - const entries: RepoTreeEntry[] = res.data.map((item) => ({ - name: item.name, - type: - item.type === "dir" - ? "dir" - : item.type === "submodule" - ? "submodule" - : "file", - path: item.path, - sha: item.sha, - size: item.size ?? null, - lastCommit: null, - })); - - // Fetch last commit for each entry in parallel (capped at 15 to avoid rate limits) - const BATCH_SIZE = 15; - for (let i = 0; i < entries.length; i += BATCH_SIZE) { - const batch = entries.slice(i, i + BATCH_SIZE); - const commitResults = await Promise.all( - batch.map(async (entry) => { - try { - const commitRes = await context.octokit.rest.repos.listCommits({ - owner: data.owner, - repo: data.repo, - sha: data.ref, - path: entry.path, - per_page: 1, - }); - const commit = commitRes.data[0]; - if (commit) { - return { - message: commit.commit.message.split("\n")[0], - date: - commit.commit.committer?.date ?? - commit.commit.author?.date ?? - "", - }; - } - } catch { - // Ignore individual commit fetch failures - } - return null; + return getCachedGitHubRequest< + Awaited>["data"], + RepoTreeEntry[] + >({ + context, + resource: "repo.tree.v1", + params: data, + freshForMs: githubCachePolicy.detail.staleTimeMs, + signalKeys: [githubRevalidationSignalKeys.repoCode(data)], + namespaceKeys: [githubRevalidationSignalKeys.repoCode(data)], + cacheMode: "split", + request: (headers) => + context.octokit.rest.repos.getContent({ + owner: data.owner, + repo: data.repo, + path: data.path, + ref: data.ref, + headers, }), - ); + mapData: (content) => { + if (!Array.isArray(content)) return []; - for (let j = 0; j < batch.length; j++) { - batch[j].lastCommit = commitResults[j] ?? null; - } - } + const entries: RepoTreeEntry[] = content.map((item) => ({ + name: item.name, + type: + item.type === "dir" + ? "dir" + : item.type === "submodule" + ? "submodule" + : "file", + path: item.path, + sha: item.sha, + size: item.size ?? null, + lastCommit: null, + })); - // Sort: dirs first, then files, alphabetically within each group - entries.sort((a, b) => { - if (a.type === "dir" && b.type !== "dir") return -1; - if (a.type !== "dir" && b.type === "dir") return 1; - return a.name.localeCompare(b.name); - }); + entries.sort((a, b) => { + if (a.type === "dir" && b.type !== "dir") return -1; + if (a.type !== "dir" && b.type === "dir") return 1; + return a.name.localeCompare(b.name); + }); - return entries; + return entries; + }, + }); }); // --------------------------------------------------------------------------- @@ -4747,22 +6305,33 @@ type RepoFileContentInput = { export const getRepoFileContent = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return null; - try { - const res = await context.octokit.rest.repos.getContent({ - owner: data.owner, - repo: data.repo, - path: data.path, - ref: data.ref, - }); - - if (Array.isArray(res.data) || !("content" in res.data)) return null; - return Buffer.from(res.data.content, "base64").toString("utf-8"); - } catch { - return null; - } + return getCachedGitHubRequest< + Awaited>["data"], + string | null + >({ + context, + resource: "repo.fileContent.v1", + params: data, + freshForMs: githubCachePolicy.repoMeta.staleTimeMs, + signalKeys: [githubRevalidationSignalKeys.repoCode(data)], + namespaceKeys: [githubRevalidationSignalKeys.repoCode(data)], + cacheMode: "split", + request: (headers) => + context.octokit.rest.repos.getContent({ + owner: data.owner, + repo: data.repo, + path: data.path, + ref: data.ref, + headers, + }), + mapData: (content) => { + if (Array.isArray(content) || !("content" in content)) return null; + return Buffer.from(content.content, "base64").toString("utf-8"); + }, + }).catch(() => null); }); // --------------------------------------------------------------------------- @@ -4777,27 +6346,67 @@ type RepoContributorsInput = { export const getRepoContributors = createServerFn({ method: "GET" }) .inputValidator(identityValidator) .handler(async ({ data }): Promise => { - const context = await getGitHubContext(); + const context = await getGitHubContextForRepository(data); if (!context) return { contributors: [], totalCount: 0 }; - const res = await context.octokit.rest.repos.listContributors({ - owner: data.owner, - repo: data.repo, - per_page: 30, - anon: "false", - }); - - const totalCount = - parseLinkHeaderLastPage(res.headers.link as string | undefined) ?? - res.data.length; + return getOrRevalidateGitHubResource({ + userId: context.session.user.id, + resource: "repo.contributors.v1", + params: data, + freshForMs: githubCachePolicy.repoMeta.staleTimeMs, + signalKeys: [githubRevalidationSignalKeys.repoCode(data)], + namespaceKeys: [githubRevalidationSignalKeys.repoCode(data)], + cacheMode: "split", + fetcher: async (conditionals) => { + try { + const response = await context.octokit.rest.repos.listContributors({ + owner: data.owner, + repo: data.repo, + per_page: 30, + anon: "false", + headers: buildConditionalHeaders(conditionals), + }); + const totalCount = + parseLinkHeaderLastPage( + response.headers.link as string | undefined, + ) ?? response.data.length; - const contributors: RepoContributor[] = res.data - .filter((c): c is typeof c & { login: string } => !!c.login) - .map((c) => ({ - login: c.login, - avatarUrl: c.avatar_url ?? "", - contributions: c.contributions ?? 0, - })); + return { + kind: "success", + data: { + contributors: response.data + .filter((c): c is typeof c & { login: string } => !!c.login) + .map((c) => ({ + login: c.login, + avatarUrl: c.avatar_url ?? "", + contributions: c.contributions ?? 0, + })), + totalCount, + }, + metadata: createGitHubResponseMetadata( + response.status, + normalizeResponseHeaders(response.headers), + ), + }; + } catch (error) { + if ( + error instanceof RequestError && + error.status === 304 && + error.response?.headers + ) { + return { + kind: "not-modified", + metadata: createGitHubResponseMetadata( + 304, + normalizeResponseHeaders( + error.response.headers as Record, + ), + ), + }; + } - return { contributors, totalCount }; + throw error; + } + }, + }); }); diff --git a/apps/dashboard/src/lib/github.server.test.ts b/apps/dashboard/src/lib/github.server.test.ts index 283936b..ded0f50 100644 --- a/apps/dashboard/src/lib/github.server.test.ts +++ b/apps/dashboard/src/lib/github.server.test.ts @@ -2,12 +2,16 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; const octokitInstances: Array<{ hookBefore: ReturnType; + hookAfter: ReturnType; + hookError: ReturnType; log: { warn: ReturnType; info: ReturnType }; options: Record; }> = []; const octokitConstructor = vi.fn((options: Record) => { const instance = { hookBefore: vi.fn(), + hookAfter: vi.fn(), + hookError: vi.fn(), log: { warn: vi.fn(), info: vi.fn(), @@ -19,7 +23,9 @@ const octokitConstructor = vi.fn((options: Record) => { return { hook: { + after: instance.hookAfter, before: instance.hookBefore, + error: instance.hookError, }, log: instance.log, }; @@ -41,6 +47,7 @@ vi.mock("./github-app.server", () => ({ })); beforeEach(() => { + vi.resetModules(); octokitInstances.length = 0; octokitConstructor.mockClear(); appConstructor.mockClear(); @@ -100,6 +107,8 @@ describe("getGitHubClient", () => { expect(options.throttle.fallbackSecondaryRateRetryAfter).toBe(60); expect(instance.hookBefore).toHaveBeenCalledTimes(1); + expect(instance.hookAfter).toHaveBeenCalledTimes(1); + expect(instance.hookError).toHaveBeenCalledTimes(1); const [hookEvent, hookHandler] = instance.hookBefore.mock.calls[0] as [ string, (options: { @@ -158,13 +167,23 @@ describe("getGitHubClient", () => { }); it("creates GitHub App installation clients from app credentials", async () => { - const installationOctokit = { + const appAuth = vi.fn(async () => ({ + token: "installation-token", + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + permissions: { contents: "read" }, + repositoryIds: [123], + repositorySelection: "selected", + })); + const appOctokit = { + auth: appAuth, hook: { + after: vi.fn(), before: vi.fn(), + error: vi.fn(), }, }; appConstructor.mockImplementationOnce(() => ({ - getInstallationOctokit: vi.fn(async () => installationOctokit), + octokit: appOctokit, })); const { getGitHubInstallationClient } = await import("./github.server"); @@ -177,6 +196,68 @@ describe("getGitHubClient", () => { privateKey: "private-key", Octokit: octokitConstructor, }); - expect(installationOctokit.hook.before).toHaveBeenCalledTimes(1); + expect(appAuth).toHaveBeenCalledWith({ + type: "installation", + installationId: 987, + }); + expect(octokitConstructor).toHaveBeenCalledTimes(1); + const [installationInstance] = octokitInstances; + const options = installationInstance.options as { + auth: string; + userAgent: string; + retry: { enabled: boolean }; + throttle: { + enabled: boolean; + id: string; + fallbackSecondaryRateRetryAfter: number; + }; + }; + expect(options.auth).toBe("installation-token"); + expect(options.userAgent).toBe("diffkit-dashboard"); + expect(options.retry).toEqual({ enabled: true }); + expect(options.throttle.enabled).toBe(true); + expect(options.throttle.id).toBe("github-installation:987"); + expect(options.throttle.fallbackSecondaryRateRetryAfter).toBe(60); + expect(appOctokit.hook.before).toHaveBeenCalledTimes(1); + expect(appOctokit.hook.after).toHaveBeenCalledTimes(1); + expect(appOctokit.hook.error).toHaveBeenCalledTimes(1); + expect(installationInstance.hookBefore).toHaveBeenCalledTimes(1); + expect(installationInstance.hookAfter).toHaveBeenCalledTimes(1); + expect(installationInstance.hookError).toHaveBeenCalledTimes(1); + }); + + it("reuses fresh installation tokens and remints after invalidation", async () => { + const appAuth = vi.fn(async () => ({ + token: `installation-token-${appAuth.mock.calls.length}`, + expiresAt: new Date(Date.now() + 60 * 60 * 1000).toISOString(), + })); + appConstructor.mockImplementation(() => ({ + octokit: { + auth: appAuth, + hook: { + after: vi.fn(), + before: vi.fn(), + error: vi.fn(), + }, + }, + })); + const { getGitHubInstallationClient, invalidateGitHubInstallationToken } = + await import("./github.server"); + + await getGitHubInstallationClient(987); + await getGitHubInstallationClient(987); + + expect(appConstructor).toHaveBeenCalledTimes(1); + expect(appAuth).toHaveBeenCalledTimes(1); + expect(octokitConstructor).toHaveBeenCalledTimes(2); + expect(octokitInstances[0].options.auth).toBe("installation-token-1"); + expect(octokitInstances[1].options.auth).toBe("installation-token-1"); + + await invalidateGitHubInstallationToken(987); + await getGitHubInstallationClient(987); + + expect(appConstructor).toHaveBeenCalledTimes(2); + expect(appAuth).toHaveBeenCalledTimes(2); + expect(octokitInstances[2].options.auth).toBe("installation-token-2"); }); }); diff --git a/apps/dashboard/src/lib/github.server.ts b/apps/dashboard/src/lib/github.server.ts index b86a9bf..9f17fee 100644 --- a/apps/dashboard/src/lib/github.server.ts +++ b/apps/dashboard/src/lib/github.server.ts @@ -9,6 +9,9 @@ import { configureGitHubRequestPolicies } from "./github-request-policy"; const GITHUB_CLIENT_USER_AGENT = "diffkit-dashboard"; const GITHUB_SECONDARY_RATE_LIMIT_FALLBACK_SECONDS = 60; +const GITHUB_INSTALLATION_TOKEN_CACHE_VERSION = "v1"; +const GITHUB_INSTALLATION_TOKEN_REFRESH_BUFFER_MS = 5 * 60 * 1000; +const GITHUB_INSTALLATION_TOKEN_MIN_KV_TTL_SECONDS = 60; type GitHubThrottleRequestOptions = { method?: string; @@ -17,6 +20,44 @@ type GitHubThrottleRequestOptions = { type GitHubThrottleClient = Pick; +type GitHubInstallationTokenCacheEntry = { + installationId: number; + token: string; + expiresAt: string; + permissions?: Record; + repositoryIds?: number[]; + repositorySelection?: string; + cachedAt: number; +}; + +type GitHubInstallationAuthResult = { + token?: string; + expiresAt?: string; + permissions?: Record; + repositoryIds?: number[]; + repositorySelection?: string; +}; + +type GitHubInstallationTokenCacheStore = { + get(storageKey: string): Promise; + put( + storageKey: string, + entry: GitHubInstallationTokenCacheEntry, + expirationTtlSeconds: number, + ): Promise; + delete(storageKey: string): Promise; +}; + +const githubInstallationTokenMemoryCache = new Map< + number, + GitHubInstallationTokenCacheEntry +>(); +const githubInstallationTokenInflight = new Map< + number, + Promise +>(); +const githubInstallationTokenInvalidationVersions = new Map(); + export async function getGitHubClient(userId: string): Promise { const octokit = new Octokit({ auth: await getGitHubAccessTokenByUserId(userId), @@ -54,7 +95,9 @@ export async function getGitHubClient(userId: string): Promise { }, }); - configureGitHubRequestPolicies(octokit); + configureGitHubRequestPolicies(octokit, { + tokenLabel: `oauth:user:${userId}`, + }); return octokit; } @@ -62,6 +105,137 @@ export async function getGitHubClient(userId: string): Promise { export async function getGitHubInstallationClient( installationId: number, ): Promise { + const tokenEntry = await getCachedGitHubInstallationToken(installationId); + const octokit = new Octokit({ + auth: tokenEntry.token, + userAgent: GITHUB_CLIENT_USER_AGENT, + retry: { enabled: true }, + throttle: { + enabled: true, + id: `github-installation:${installationId}`, + fallbackSecondaryRateRetryAfter: + GITHUB_SECONDARY_RATE_LIMIT_FALLBACK_SECONDS, + onRateLimit: ( + retryAfter: number, + options: GitHubThrottleRequestOptions, + throttledOctokit: GitHubThrottleClient, + retryCount: number, + ) => { + throttledOctokit.log.warn( + `GitHub rate limit for ${options.method} ${options.url}; retryAfter=${retryAfter}s retryCount=${retryCount}`, + ); + + return false; + }, + onSecondaryRateLimit: ( + retryAfter: number, + options: GitHubThrottleRequestOptions, + throttledOctokit: GitHubThrottleClient, + retryCount: number, + ) => { + throttledOctokit.log.warn( + `GitHub secondary rate limit for ${options.method} ${options.url}; retryAfter=${retryAfter}s retryCount=${retryCount}`, + ); + + return false; + }, + }, + }); + + configureGitHubRequestPolicies(octokit, { + tokenLabel: `installation:${installationId}`, + }); + + return octokit; +} + +export async function invalidateGitHubInstallationToken( + installationId: number, +) { + githubInstallationTokenMemoryCache.delete(installationId); + githubInstallationTokenInflight.delete(installationId); + githubInstallationTokenInvalidationVersions.set( + installationId, + getGitHubInstallationTokenInvalidationVersion(installationId) + 1, + ); + + try { + const store = await getGitHubInstallationTokenCacheStore(); + await store?.delete(getGitHubInstallationTokenStorageKey(installationId)); + } catch { + // Best effort: webhook processing should not fail if KV is unavailable. + } +} + +async function getCachedGitHubInstallationToken( + installationId: number, +): Promise { + const cachedEntry = githubInstallationTokenMemoryCache.get(installationId); + if (cachedEntry && isFreshGitHubInstallationToken(cachedEntry)) { + return cachedEntry; + } + if (cachedEntry) { + githubInstallationTokenMemoryCache.delete(installationId); + } + + const inflightEntry = githubInstallationTokenInflight.get(installationId); + if (inflightEntry) { + return inflightEntry; + } + + const invalidationVersion = + getGitHubInstallationTokenInvalidationVersion(installationId); + const tokenPromise = loadGitHubInstallationToken( + installationId, + invalidationVersion, + ); + githubInstallationTokenInflight.set(installationId, tokenPromise); + + try { + return await tokenPromise; + } finally { + if (githubInstallationTokenInflight.get(installationId) === tokenPromise) { + githubInstallationTokenInflight.delete(installationId); + } + } +} + +async function loadGitHubInstallationToken( + installationId: number, + invalidationVersion: number, +) { + const storageKey = getGitHubInstallationTokenStorageKey(installationId); + const storedEntry = await readGitHubInstallationTokenFromStore(storageKey); + if ( + isGitHubInstallationTokenCacheEntry(storedEntry, installationId) && + isFreshGitHubInstallationToken(storedEntry) + ) { + cacheGitHubInstallationTokenInMemory( + storedEntry, + installationId, + invalidationVersion, + ); + return storedEntry; + } + + const tokenEntry = await mintGitHubInstallationToken(installationId); + cacheGitHubInstallationTokenInMemory( + tokenEntry, + installationId, + invalidationVersion, + ); + await writeGitHubInstallationTokenToStore( + storageKey, + tokenEntry, + installationId, + invalidationVersion, + ); + return tokenEntry; +} + +async function mintGitHubInstallationToken( + installationId: number, +): Promise { const appId = getGitHubAppId(); const privateKey = getGitHubAppPrivateKey(); if (!appId || !privateKey) { @@ -75,9 +249,164 @@ export async function getGitHubInstallationClient( privateKey, Octokit, }); - const octokit = await app.getInstallationOctokit(installationId); - configureGitHubRequestPolicies(octokit); + configureGitHubRequestPolicies(app.octokit, { + tokenLabel: `app-auth:installation:${installationId}`, + }); - return octokit; + const auth = (await app.octokit.auth({ + type: "installation", + installationId, + })) as GitHubInstallationAuthResult; + + if (!auth.token || !auth.expiresAt) { + throw new Error( + `GitHub App did not return an installation token for installation ${installationId}.`, + ); + } + + return { + installationId, + token: auth.token, + expiresAt: auth.expiresAt, + permissions: auth.permissions, + repositoryIds: auth.repositoryIds, + repositorySelection: auth.repositorySelection, + cachedAt: Date.now(), + }; +} + +function isFreshGitHubInstallationToken( + entry: GitHubInstallationTokenCacheEntry, +) { + const expiresAtMs = Date.parse(entry.expiresAt); + return ( + Number.isFinite(expiresAtMs) && + expiresAtMs - Date.now() > GITHUB_INSTALLATION_TOKEN_REFRESH_BUFFER_MS + ); +} + +function isGitHubInstallationTokenCacheEntry( + value: unknown, + installationId: number, +): value is GitHubInstallationTokenCacheEntry { + return ( + isRecord(value) && + value.installationId === installationId && + typeof value.token === "string" && + typeof value.expiresAt === "string" && + typeof value.cachedAt === "number" + ); +} + +function isRecord(value: unknown): value is Record { + return Boolean(value) && typeof value === "object"; +} + +function getGitHubInstallationTokenStorageTtlSeconds( + entry: GitHubInstallationTokenCacheEntry, +) { + const expiresAtMs = Date.parse(entry.expiresAt); + if (!Number.isFinite(expiresAtMs)) { + return 0; + } + + return Math.floor( + (expiresAtMs - Date.now() - GITHUB_INSTALLATION_TOKEN_REFRESH_BUFFER_MS) / + 1000, + ); +} + +function getGitHubInstallationTokenStorageKey(installationId: number) { + return `github-installation-token:${GITHUB_INSTALLATION_TOKEN_CACHE_VERSION}:${installationId}`; +} + +function getGitHubInstallationTokenInvalidationVersion(installationId: number) { + return githubInstallationTokenInvalidationVersions.get(installationId) ?? 0; +} + +function cacheGitHubInstallationTokenInMemory( + entry: GitHubInstallationTokenCacheEntry, + installationId: number, + invalidationVersion: number, +) { + if ( + getGitHubInstallationTokenInvalidationVersion(installationId) !== + invalidationVersion + ) { + return; + } + + githubInstallationTokenMemoryCache.set(installationId, entry); +} + +async function readGitHubInstallationTokenFromStore(storageKey: string) { + try { + const store = await getGitHubInstallationTokenCacheStore(); + return (await store?.get(storageKey)) ?? null; + } catch { + return null; + } +} + +async function writeGitHubInstallationTokenToStore( + storageKey: string, + entry: GitHubInstallationTokenCacheEntry, + installationId: number, + invalidationVersion: number, +) { + if ( + getGitHubInstallationTokenInvalidationVersion(installationId) !== + invalidationVersion + ) { + return; + } + + const ttlSeconds = getGitHubInstallationTokenStorageTtlSeconds(entry); + if (ttlSeconds < GITHUB_INSTALLATION_TOKEN_MIN_KV_TTL_SECONDS) { + return; + } + + try { + const store = await getGitHubInstallationTokenCacheStore(); + await store?.put(storageKey, entry, ttlSeconds); + } catch { + // Best effort: the in-memory cache still dedupes within this worker. + } +} + +async function getGitHubInstallationTokenCacheStore(): Promise { + try { + const { env } = await import("cloudflare:workers"); + const workerEnv = env as { + GITHUB_CACHE_KV?: KVNamespace; + }; + const kv = workerEnv.GITHUB_CACHE_KV; + + if (!kv) { + return null; + } + + return { + async get(storageKey) { + const entry = await kv.get( + storageKey, + { + type: "json", + }, + ); + return entry ?? null; + }, + async put(storageKey, entry, expirationTtlSeconds) { + await kv.put(storageKey, JSON.stringify(entry), { + expirationTtl: expirationTtlSeconds, + }); + }, + async delete(storageKey) { + await kv.delete(storageKey); + }, + }; + } catch { + return null; + } } diff --git a/apps/dashboard/src/lib/use-github-revalidation.ts b/apps/dashboard/src/lib/use-github-revalidation.ts deleted file mode 100644 index 15eb3fd..0000000 --- a/apps/dashboard/src/lib/use-github-revalidation.ts +++ /dev/null @@ -1,286 +0,0 @@ -import { type QueryClient, useQueryClient } from "@tanstack/react-query"; -import { useEffect, useMemo } from "react"; -import { debug } from "./debug"; -import { getGitHubRevalidationSignalRecords } from "./github.functions"; -import { type GitHubQueryScope, githubQueryKeys } from "./github.query"; -import { - getGitHubRevalidationSignalKeysForTab, - githubRevalidationSignalKeys, -} from "./github-revalidation"; -import { type Tab, useTabs } from "./tab-store"; - -function isRecord(value: unknown): value is Record { - return Boolean(value) && typeof value === "object"; -} - -const GITHUB_REVALIDATION_POLL_INTERVAL_MS = 10_000; -const GITHUB_REVALIDATION_INITIAL_DELAY_MS = 5_000; - -function getUniqueRepoKeys(tabs: Tab[]) { - const repos = new Set(); - for (const tab of tabs) { - repos.add(tab.repo); - } - return Array.from(repos); -} - -function getUniqueSignalKeys(tabs: Tab[]) { - return Array.from( - new Set([ - githubRevalidationSignalKeys.pullsMine, - githubRevalidationSignalKeys.issuesMine, - ...tabs.flatMap((tab) => getGitHubRevalidationSignalKeysForTab(tab)), - ...getUniqueRepoKeys(tabs).map((repo) => { - const [owner, name] = repo.split("/"); - return githubRevalidationSignalKeys.repoCode({ - owner, - repo: name, - }); - }), - ]), - ); -} - -function getQueryUpdatedAt( - queryClient: QueryClient, - queryKey: readonly unknown[], -) { - return queryClient.getQueryState(queryKey)?.dataUpdatedAt ?? 0; -} - -type NumberedTab = Tab & { number: number }; - -async function invalidatePullTabQueries( - queryClient: QueryClient, - scope: GitHubQueryScope, - tab: NumberedTab, -) { - const [owner, repo] = tab.repo.split("/"); - const input = { owner, repo, pullNumber: tab.number }; - const queryKeys = [ - githubQueryKeys.pulls.page(scope, input), - githubQueryKeys.pulls.detail(scope, input), - githubQueryKeys.pulls.fileSummaries(scope, input), - githubQueryKeys.pulls.comments(scope, input), - githubQueryKeys.pulls.status(scope, input), - githubQueryKeys.pulls.files(scope, input), - githubQueryKeys.pulls.reviewComments(scope, input), - ]; - - const hasOlderQuery = queryKeys.some( - (queryKey) => getQueryUpdatedAt(queryClient, queryKey) > 0, - ); - if (!hasOlderQuery) { - return; - } - - await Promise.all( - queryKeys.map((queryKey) => - queryClient.invalidateQueries({ queryKey, refetchType: "all" }), - ), - ); -} - -async function invalidateIssueTabQueries( - queryClient: QueryClient, - scope: GitHubQueryScope, - tab: NumberedTab, -) { - const [owner, repo] = tab.repo.split("/"); - const input = { owner, repo, issueNumber: tab.number }; - const queryKeys = [ - githubQueryKeys.issues.page(scope, input), - githubQueryKeys.issues.detail(scope, input), - githubQueryKeys.issues.comments(scope, input), - ]; - - const hasOlderQuery = queryKeys.some( - (queryKey) => getQueryUpdatedAt(queryClient, queryKey) > 0, - ); - if (!hasOlderQuery) { - return; - } - - await Promise.all( - queryKeys.map((queryKey) => - queryClient.invalidateQueries({ queryKey, refetchType: "all" }), - ), - ); -} - -export function useGitHubRevalidation(userId: string) { - const queryClient = useQueryClient(); - const tabs = useTabs(); - const scope = useMemo(() => ({ userId }), [userId]); - const signalKeys = useMemo(() => getUniqueSignalKeys(tabs), [tabs]); - - useEffect(() => { - let cancelled = false; - let timeoutId: number | undefined; - - const pollSignals = async () => { - try { - const records = await getGitHubRevalidationSignalRecords({ - data: { signalKeys }, - }); - if (cancelled) { - return; - } - - const signalsByKey = new Map( - records.map((record) => [record.signalKey, record.updatedAt]), - ); - - // Invalidate list queries first (lightweight) - const pullsMineUpdatedAt = - signalsByKey.get(githubRevalidationSignalKeys.pullsMine) ?? 0; - if ( - pullsMineUpdatedAt > - getQueryUpdatedAt(queryClient, githubQueryKeys.pulls.mine(scope)) - ) { - debug("github-revalidation", "invalidating pull list queries", { - pullsMineUpdatedAt, - }); - await queryClient.invalidateQueries({ - queryKey: githubQueryKeys.pulls.mine(scope), - }); - } - - const issuesMineUpdatedAt = - signalsByKey.get(githubRevalidationSignalKeys.issuesMine) ?? 0; - if ( - issuesMineUpdatedAt > - getQueryUpdatedAt(queryClient, githubQueryKeys.issues.mine(scope)) - ) { - debug("github-revalidation", "invalidating issue list queries", { - issuesMineUpdatedAt, - }); - await queryClient.invalidateQueries({ - queryKey: githubQueryKeys.issues.mine(scope), - }); - } - - // Invalidate repo code queries (tree, file content) on push events - for (const repoKey of getUniqueRepoKeys(tabs)) { - if (cancelled) break; - const [owner, repo] = repoKey.split("/"); - const signalKey = githubRevalidationSignalKeys.repoCode({ - owner, - repo, - }); - const updatedAt = signalsByKey.get(signalKey) ?? 0; - if (updatedAt === 0) continue; - - const matchesRepo = (query: { queryKey: readonly unknown[] }) => { - const key = query.queryKey; - const input = key[4]; - return ( - isRecord(input) && input.owner === owner && input.repo === repo - ); - }; - - const treeQueries = queryClient.getQueriesData({ - queryKey: ["github", scope.userId, "repo", "tree"], - predicate: matchesRepo, - }); - const fileQueries = queryClient.getQueriesData({ - queryKey: ["github", scope.userId, "repo", "fileContent"], - predicate: matchesRepo, - }); - - if (treeQueries.length > 0 || fileQueries.length > 0) { - debug("github-revalidation", "invalidating repo code queries", { - signalKey, - repoKey, - }); - await queryClient.invalidateQueries({ - queryKey: ["github", scope.userId, "repo", "tree"], - predicate: matchesRepo, - refetchType: "all", - }); - await queryClient.invalidateQueries({ - queryKey: ["github", scope.userId, "repo", "fileContent"], - predicate: matchesRepo, - refetchType: "all", - }); - } - } - - // Invalidate tab queries serially to avoid a burst of concurrent - // server function RPCs that can overwhelm the Worker. - for (const tab of tabs) { - if (cancelled) break; - - const signalKey = getGitHubRevalidationSignalKeysForTab(tab)[0]; - const updatedAt = signalsByKey.get(signalKey) ?? 0; - if (updatedAt === 0) { - continue; - } - - // Repo tabs are revalidated via the repo code block above - if (tab.type === "repo" || tab.number == null) { - continue; - } - - const numberedTab = tab as NumberedTab; - - if (tab.type === "pull" || tab.type === "review") { - const [owner, repo] = tab.repo.split("/"); - const comparisonKey = githubQueryKeys.pulls.page(scope, { - owner, - repo, - pullNumber: numberedTab.number, - }); - if (updatedAt > getQueryUpdatedAt(queryClient, comparisonKey)) { - debug("github-revalidation", "invalidating pull tab queries", { - signalKey, - tabId: tab.id, - }); - await invalidatePullTabQueries(queryClient, scope, numberedTab); - } - continue; - } - - const [owner, repo] = tab.repo.split("/"); - const comparisonKey = githubQueryKeys.issues.page(scope, { - owner, - repo, - issueNumber: numberedTab.number, - }); - if (updatedAt > getQueryUpdatedAt(queryClient, comparisonKey)) { - debug("github-revalidation", "invalidating issue tab queries", { - signalKey, - tabId: tab.id, - }); - await invalidateIssueTabQueries(queryClient, scope, numberedTab); - } - } - } catch (error) { - debug("github-revalidation", "poll failed", { - error: error instanceof Error ? error.message : String(error), - }); - } finally { - if (!cancelled) { - timeoutId = window.setTimeout( - pollSignals, - GITHUB_REVALIDATION_POLL_INTERVAL_MS, - ); - } - } - }; - - // Delay the first poll so it doesn't collide with initial data loading - // (route loaders + preloading) which already makes server function RPCs. - timeoutId = window.setTimeout( - pollSignals, - GITHUB_REVALIDATION_INITIAL_DELAY_MS, - ); - - return () => { - cancelled = true; - if (typeof timeoutId !== "undefined") { - window.clearTimeout(timeoutId); - } - }; - }, [queryClient, scope, signalKeys, tabs]); -} diff --git a/apps/dashboard/src/lib/use-github-signal-refresh.ts b/apps/dashboard/src/lib/use-github-signal-refresh.ts new file mode 100644 index 0000000..a123d3d --- /dev/null +++ b/apps/dashboard/src/lib/use-github-signal-refresh.ts @@ -0,0 +1,103 @@ +import { type QueryKey, useQueryClient } from "@tanstack/react-query"; +import { useEffect, useMemo, useRef } from "react"; +import { debug } from "./debug"; +import { getGitHubRevalidationSignalRecords } from "./github.functions"; + +export type GitHubSignalRefreshTarget = { + queryKey: QueryKey; + signalKeys: readonly string[]; +}; + +export function useGitHubSignalRefresh({ + enabled, + targets, +}: { + enabled: boolean; + targets: readonly GitHubSignalRefreshTarget[]; +}) { + const queryClient = useQueryClient(); + const checkedSignatureRef = useRef(null); + const signature = useMemo( + () => + JSON.stringify( + targets.map((target) => ({ + queryKey: target.queryKey, + signalKeys: Array.from(new Set(target.signalKeys)).sort(), + })), + ), + [targets], + ); + + useEffect(() => { + if (!enabled || targets.length === 0) { + return; + } + + if (checkedSignatureRef.current === signature) { + return; + } + + const signalKeys = Array.from( + new Set(targets.flatMap((target) => target.signalKeys)), + ); + if (signalKeys.length === 0) { + return; + } + + checkedSignatureRef.current = signature; + let cancelled = false; + + void (async () => { + const records = await getGitHubRevalidationSignalRecords({ + data: { signalKeys }, + }); + if (cancelled) { + return; + } + + const updatedAtBySignalKey = new Map( + records.map((record) => [record.signalKey, record.updatedAt]), + ); + + await Promise.all( + targets.map(async (target) => { + const queryState = queryClient.getQueryState(target.queryKey); + const queryUpdatedAt = queryState?.dataUpdatedAt ?? 0; + if (queryUpdatedAt === 0 || queryState?.fetchStatus === "fetching") { + return; + } + + const signalUpdatedAt = target.signalKeys.reduce( + (latest, signalKey) => + Math.max(latest, updatedAtBySignalKey.get(signalKey) ?? 0), + 0, + ); + + if (signalUpdatedAt <= queryUpdatedAt) { + return; + } + + debug("github-revalidation", "refreshing query after webhook", { + queryKey: target.queryKey, + queryUpdatedAt, + signalUpdatedAt, + }); + + await queryClient.invalidateQueries({ + queryKey: target.queryKey, + exact: true, + refetchType: "active", + }); + }), + ); + })().catch((error: unknown) => { + debug("github-revalidation", "webhook signal check failed", { + error: error instanceof Error ? error.message : String(error), + }); + }); + + return () => { + cancelled = true; + }; + }, [enabled, queryClient, signature, targets]); +} diff --git a/apps/dashboard/src/routes/_protected/$owner/index.tsx b/apps/dashboard/src/routes/_protected/$owner/index.tsx index 341905d..ea529b5 100644 --- a/apps/dashboard/src/routes/_protected/$owner/index.tsx +++ b/apps/dashboard/src/routes/_protected/$owner/index.tsx @@ -32,18 +32,15 @@ import { useHasMounted } from "#/lib/use-has-mounted"; export const Route = createFileRoute("/_protected/$owner/")({ ssr: false, - loader: async ({ context, params }) => { + loader: ({ context, params }) => { const scope = { userId: context.user.id }; - await Promise.all([ - context.queryClient.ensureQueryData( - githubUserProfileQueryOptions(scope, params.owner), - ), - context.queryClient.ensureQueryData(githubViewerQueryOptions(scope)), - context.queryClient.ensureQueryData( - githubUserPinnedReposQueryOptions(scope, params.owner), - ), - ]); - // Contributions & activity load client-side + void context.queryClient.prefetchQuery( + githubUserProfileQueryOptions(scope, params.owner), + ); + void context.queryClient.prefetchQuery(githubViewerQueryOptions(scope)); + void context.queryClient.prefetchQuery( + githubUserPinnedReposQueryOptions(scope, params.owner), + ); void context.queryClient.prefetchQuery( githubUserContributionsQueryOptions(scope, params.owner), ); diff --git a/apps/dashboard/src/routes/_protected/index.tsx b/apps/dashboard/src/routes/_protected/index.tsx index 3f748b5..aeae1a6 100644 --- a/apps/dashboard/src/routes/_protected/index.tsx +++ b/apps/dashboard/src/routes/_protected/index.tsx @@ -1,25 +1,22 @@ import { GitPullRequestIcon, IssuesIcon, ReviewsIcon } from "@diffkit/icons"; import { useQuery } from "@tanstack/react-query"; -import { createFileRoute, Link, useRouter } from "@tanstack/react-router"; -import { type ComponentType, useEffect } from "react"; +import { createFileRoute, Link } from "@tanstack/react-router"; +import type { ComponentType } from "react"; import { DashboardContentLoading } from "#/components/layouts/dashboard-content-loading"; import { PullRequestRow } from "#/components/pulls/pull-request-row"; import { githubMyIssuesQueryOptions, githubMyPullsQueryOptions, } from "#/lib/github.query"; -import { preloadRouteOnce } from "#/lib/route-preload"; import { buildSeo, formatPageTitle } from "#/lib/seo"; import { useHasMounted } from "#/lib/use-has-mounted"; export const Route = createFileRoute("/_protected/")({ ssr: false, - loader: async ({ context }) => { + loader: ({ context }) => { const scope = { userId: context.user.id }; - await Promise.all([ - context.queryClient.ensureQueryData(githubMyPullsQueryOptions(scope)), - context.queryClient.ensureQueryData(githubMyIssuesQueryOptions(scope)), - ]); + void context.queryClient.prefetchQuery(githubMyPullsQueryOptions(scope)); + void context.queryClient.prefetchQuery(githubMyIssuesQueryOptions(scope)); }, pendingComponent: DashboardContentLoading, head: ({ match }) => @@ -37,7 +34,6 @@ function OverviewPage() { const { user } = Route.useRouteContext(); const scope = { userId: user.id }; const hasMounted = useHasMounted(); - const router = useRouter(); const pullsQuery = useQuery({ ...githubMyPullsQueryOptions(scope), enabled: hasMounted, @@ -47,22 +43,6 @@ function OverviewPage() { enabled: hasMounted, }); - // Prefetch detail routes for recent PRs in the background so - // navigating into a PR is near-instant even without hovering first. - const authoredPulls = pullsQuery.data?.authored; - useEffect(() => { - if (!authoredPulls) return; - const recent = authoredPulls.slice(0, 10); - void Promise.allSettled( - recent.map((pr) => - preloadRouteOnce( - router, - `/${pr.repository.owner}/${pr.repository.name}/pull/${pr.number}`, - ), - ), - ); - }, [authoredPulls, router]); - if (pullsQuery.error) throw pullsQuery.error; if (issuesQuery.error) throw issuesQuery.error; diff --git a/apps/dashboard/src/routes/_protected/issues.tsx b/apps/dashboard/src/routes/_protected/issues.tsx index 9a0ba9f..7b15bfa 100644 --- a/apps/dashboard/src/routes/_protected/issues.tsx +++ b/apps/dashboard/src/routes/_protected/issues.tsx @@ -30,10 +30,8 @@ export const Route = createFileRoute("/_protected/issues")({ ssr: false, loader: async ({ context }) => { const scope = { userId: context.user.id }; - const [, filterStore] = await Promise.all([ - context.queryClient.ensureQueryData(githubMyIssuesQueryOptions(scope)), - getFilterCookie(), - ]); + void context.queryClient.prefetchQuery(githubMyIssuesQueryOptions(scope)); + const filterStore = await getFilterCookie(); return { filterStore }; }, pendingComponent: DashboardContentLoading, diff --git a/apps/dashboard/src/routes/_protected/pulls.tsx b/apps/dashboard/src/routes/_protected/pulls.tsx index bd986c1..91f631b 100644 --- a/apps/dashboard/src/routes/_protected/pulls.tsx +++ b/apps/dashboard/src/routes/_protected/pulls.tsx @@ -36,10 +36,8 @@ export const Route = createFileRoute("/_protected/pulls")({ ssr: false, loader: async ({ context }) => { const scope = { userId: context.user.id }; - const [, filterStore] = await Promise.all([ - context.queryClient.ensureQueryData(githubMyPullsQueryOptions(scope)), - getFilterCookie(), - ]); + void context.queryClient.prefetchQuery(githubMyPullsQueryOptions(scope)); + const filterStore = await getFilterCookie(); return { filterStore }; }, pendingComponent: DashboardContentLoading, diff --git a/apps/dashboard/src/routes/_protected/reviews.tsx b/apps/dashboard/src/routes/_protected/reviews.tsx index 474f7f1..703fbc1 100644 --- a/apps/dashboard/src/routes/_protected/reviews.tsx +++ b/apps/dashboard/src/routes/_protected/reviews.tsx @@ -20,10 +20,8 @@ export const Route = createFileRoute("/_protected/reviews")({ ssr: false, loader: async ({ context }) => { const scope = { userId: context.user.id }; - const [, filterStore] = await Promise.all([ - context.queryClient.ensureQueryData(githubMyPullsQueryOptions(scope)), - getFilterCookie(), - ]); + void context.queryClient.prefetchQuery(githubMyPullsQueryOptions(scope)); + const filterStore = await getFilterCookie(); return { filterStore }; }, pendingComponent: DashboardContentLoading, diff --git a/apps/dashboard/src/routes/api/webhooks/github.ts b/apps/dashboard/src/routes/api/webhooks/github.ts index e196a2c..8736a3c 100644 --- a/apps/dashboard/src/routes/api/webhooks/github.ts +++ b/apps/dashboard/src/routes/api/webhooks/github.ts @@ -1,5 +1,6 @@ import { createFileRoute } from "@tanstack/react-router"; import { debug } from "#/lib/debug"; +import { invalidateGitHubInstallationToken } from "#/lib/github.server"; import { getGitHubWebhookSecret, verifyGitHubWebhookSignature, @@ -9,6 +10,30 @@ import { getGitHubWebhookRevalidationSignalKeys } from "#/lib/github-revalidatio import { getGitHubWebhookPayloadMetadata } from "#/lib/github-webhook-debug"; import { PRIVATE_ROUTE_HEADERS } from "#/lib/seo"; +const INSTALLATION_TOKEN_INVALIDATION_EVENTS = new Set([ + "installation", + "installation_repositories", + "github_app_authorization", +]); + +function getWebhookInstallationId(payload: unknown) { + if (!payload || typeof payload !== "object" || !("installation" in payload)) { + return null; + } + + const installation = payload.installation; + if ( + !installation || + typeof installation !== "object" || + !("id" in installation) || + typeof installation.id !== "number" + ) { + return null; + } + + return installation.id; +} + export const Route = createFileRoute("/api/webhooks/github")({ headers: () => PRIVATE_ROUTE_HEADERS, server: { @@ -87,12 +112,25 @@ export const Route = createFileRoute("/api/webhooks/github")({ event, payload, ); + const installationId = getWebhookInstallationId(payload); + let invalidatedInstallationToken = false; + + if ( + installationId !== null && + INSTALLATION_TOKEN_INVALIDATION_EVENTS.has(event) + ) { + await invalidateGitHubInstallationToken(installationId); + invalidatedInstallationToken = true; + } + const updatedSignalCount = await markGitHubRevalidationSignals(signalKeys); debug("github-webhook", "processed webhook", { deliveryId, event, + installationId, + invalidatedInstallationToken, signalKeys, updatedSignalCount, }); diff --git a/apps/dashboard/src/routes/setup.tsx b/apps/dashboard/src/routes/setup.tsx index 17d78d2..56d1bae 100644 --- a/apps/dashboard/src/routes/setup.tsx +++ b/apps/dashboard/src/routes/setup.tsx @@ -21,7 +21,7 @@ export const Route = createFileRoute("/setup")({ return { user: session.user }; }, loader: async () => { - const accessState = await getGitHubAppAccessState(); + const accessState = await getGitHubAppAccessState().catch(() => null); return { accessState }; }, headers: () => PRIVATE_ROUTE_HEADERS, diff --git a/apps/dashboard/wrangler.jsonc b/apps/dashboard/wrangler.jsonc index c2dcddc..9be66f8 100644 --- a/apps/dashboard/wrangler.jsonc +++ b/apps/dashboard/wrangler.jsonc @@ -2,7 +2,10 @@ "$schema": "node_modules/wrangler/config-schema.json", "name": "diffkit", "compatibility_date": "2025-09-02", - "compatibility_flags": ["nodejs_compat"], + "compatibility_flags": [ + "nodejs_compat", + "no_handle_cross_request_promise_resolution" + ], "main": "@tanstack/react-start/server-entry", "observability": { "logs": { diff --git a/docs/github-cache-architecture.md b/docs/github-cache-architecture.md index dd41aab..f3d2d9f 100644 --- a/docs/github-cache-architecture.md +++ b/docs/github-cache-architecture.md @@ -27,8 +27,8 @@ Cloudflare D1 stores durable app data and cache control state. Relevant tables: - `github_response_cache`: legacy durable response cache and migration fallback. -- `github_revalidation_signal`: timestamped invalidation signals used by - webhook and client revalidation flows. +- `github_revalidation_signal`: timestamped invalidation signals written by + webhook and mutation flows. - `github_cache_namespace`: version rows used by the split KV cache. The namespace table is created by: @@ -150,9 +150,13 @@ The first implemented split-cache coverage includes: - Authenticated viewer. - Authenticated user repo list. -- My pull request dashboard slices. -- My issue dashboard slices. +- My pull request dashboard result, fetched through installation-scoped GraphQL + searches for installed owners plus a guarded OAuth fallback. +- My issue dashboard result, fetched through installation-scoped GraphQL searches + for installed owners plus a guarded OAuth fallback. - Pull detail data. +- Pull page data, with detail, first/last comments, and commits bundled through + a repo-scoped GraphQL query plus timeline fallback reads. - Pull comments. - Pull commits. - Pull status. @@ -160,10 +164,18 @@ The first implemented split-cache coverage includes: - Pull file summaries. - Pull review comments. - Issue detail data. +- Issue page data, with detail and first/last comments bundled through a + repo-scoped GraphQL query plus timeline fallback reads. - Issue comments. - Repo collaborators. - Org teams. - Repo labels. +- Repository overview metadata. +- Repository branches. +- Repository tree listings. +- Repository file contents. +- Repository contributors. +- Repository discussions. Most resource opt-ins are in: @@ -171,10 +183,39 @@ Most resource opt-ins are in: apps/dashboard/src/lib/github.functions.ts ``` +Repository overview also uses a single GraphQL repository query for metadata, +counts, topics, and the latest default-branch commit. This avoids the previous +multi-REST-call cold miss for the same screen. + +The dashboard "my pulls" and "my issues" reads prefer GitHub App installation +tokens when the app is installed for an owner. The OAuth fallback excludes +organizations the viewer belongs to, so an OAuth-restricted organization cannot +make the whole aggregate search fail. Public repositories outside those +organizations are still discovered through the fallback. + +Those dashboard aggregate searches have a hard request budget. Source discovery, +installation client setup, and each owner-scoped GraphQL search are bounded so a +slow GitHub/App-token path can be skipped and the server function can still +return partial cached/search results instead of hanging the Worker request. + +Shared cached GitHub request helpers also wrap REST pagination, GraphQL calls, +and composed reads in operation deadlines. Route loaders avoid blocking +navigation on GitHub data; they prefetch and let React Query show cached data or +the normal loading state. The Worker config enables +`no_handle_cross_request_promise_resolution` to avoid cross-request promise +resolution behavior in the runtime. + ## Rate-Limit Behavior There are two layers of rate-limit protection. +During development, the Octokit request policy logs a rate-limit snapshot for +each GitHub response under the `github-rate-limit` debug scope. The token label +is non-secret and identifies the bucket being used, for example +`oauth:user:{id}`, `app-user:{id}`, or `installation:{id}`. The log includes the +method, URL, status, rate-limit resource, limit, used count, remaining count, and +reset time. + ### Cache-level stale fallback `github-cache.ts` tracks GitHub response metadata, including remaining quota and @@ -217,18 +258,51 @@ It enables Octokit's retry and throttling plugins with these rules: This keeps read paths resilient while avoiding duplicated side effects from `POST`, `PATCH`, or `DELETE` requests. -## PR Status Refresh +### GitHub App installation token cache + +Installation clients do not mint a new GitHub App installation token for every +read. The server caches installation tokens per `installationId` in memory and, +when available, in `GITHUB_CACHE_KV`. Cached tokens are reused until five +minutes before GitHub's `expiresAt` timestamp, and concurrent misses for the +same installation share one in-flight token mint. + +Webhook delivery invalidates the cached token for installation access changes: -The pull detail page no longer polls PR status every 15 seconds. +- `installation` +- `installation_repositories` +- `github_app_authorization` -Status fetching is owned by the merge status section in: +This keeps normal PR and issue reads on the larger installation rate-limit +bucket without continuing to use a token after app permissions or selected +repositories change. + +## Client Refresh + +The browser does not poll GitHub revalidation signals and does not sweep open +tabs to preload or refetch their routes. Webhook handlers and mutation handlers +update the durable server-side invalidation state; subsequent navigation, +explicit hover/focus prefetch, or normal query reads observe that state through +the server cache. + +PR detail, issue detail, and PR review routes also run a one-shot client signal +check after their page query has data. This preserves the fast path where cached +React Query data renders immediately, then asks the server for the relevant D1 +webhook signal timestamp once. If that signal is newer than the active query's +cached data, only that active query is invalidated and refetched. + +```text +apps/dashboard/src/lib/use-github-signal-refresh.ts +``` + +Tab prefetching is intentionally user-driven. Detail tabs call route preload on +hover, focus, or touch start only: ```text -apps/dashboard/src/components/pulls/detail/pull-detail-activity.tsx +apps/dashboard/src/components/layouts/dashboard-tabs.tsx ``` -It still refreshes on window focus, but it avoids constant background polling. -Webhook invalidation and server cache freshness now carry more of the load. +This keeps cached tab switches fast without turning every open tab into a +background GitHub refresh source. ## Operational Requirements @@ -238,6 +312,10 @@ Before this architecture is active in an environment: 2. The `github_cache_namespace` D1 migration must be applied. 3. Existing legacy D1 cache entries can remain in place. +`GITHUB_CACHE_KV` stores GitHub payload cache entries and short-lived GitHub App +installation tokens. Installation token entries expire before the token itself +enters its refresh window. + Migration commands: ```sh @@ -245,8 +323,9 @@ pnpm --filter @diffkit/dashboard migrate:local pnpm --filter @diffkit/dashboard migrate:remote ``` -If the KV binding is unavailable, the cache layer falls back to legacy D1 -behavior. +If the KV binding is unavailable, the payload cache layer falls back to legacy +D1 behavior, and installation token reuse is limited to the current Worker +isolate's memory cache. ## Testing Coverage @@ -260,6 +339,7 @@ The focused tests cover: - adaptive freshness when GitHub quota is low. - stale serving when GitHub is rate limited. - Octokit throttling and safe-method retry policy. +- GitHub App installation token reuse and webhook invalidation. Relevant test files: @@ -274,10 +354,14 @@ apps/dashboard/src/lib/github.server.test.ts - Split-cache writes still mirror payloads to legacy D1, so D1 cache churn is reduced but not eliminated yet. - In-flight deduplication is request-scoped, not cross-request or cross-worker. -- Realtime client updates still depend on the existing client revalidation - model rather than server push. -- KV is used for payloads only. It should not become the source of truth for - invalidation. +- Installation token in-flight deduplication is per worker isolate. The KV entry + prevents most cross-isolate remints after the first token is stored. +- Repository tree listings intentionally avoid per-entry commit lookups because + that pattern can turn one directory view into dozens of GitHub API calls. +- Realtime client updates are not pushed to the browser yet. Detail routes do a + one-shot signal check on entry, but users who keep a page open indefinitely do + not receive webhook updates until another read is triggered. +- KV is not the source of truth for invalidation. ## Future Work @@ -288,5 +372,5 @@ Likely next steps: - Add cross-request coalescing for hot GitHub reads. - Add metrics for KV hit rate, D1 fallback rate, stale-if-rate-limited events, and GitHub quota pressure. -- Move more realtime behavior toward push-based delivery if 10 second - revalidation polling becomes too noisy. +- Add push-based client notifications if live updates are needed without + reintroducing browser polling.