-
Integration Details
-
+
+ Integration details
+
+
{teamName && (
-
- Slack Workspace: {teamName}
-
+
+ Workspace: {" "}
+ {teamName}
+
)}
-
- Installed: {" "}
- {formatDate(new Date(slackIntegration.createdAt))}
-
+
+ Installed: {" "}
+
+
+
+
-
-
-
-
- Remove Integration
-
-
-
-
- Remove Slack Integration
-
-
- This will remove the Slack integration and disable all connected alert channels.
- This action cannot be undone.
-
-
-
+
+
+
+ Connected alert channels
+ ({alertChannels.length})
+
+ {alertChannels.length === 0 ? (
+
+ No alert channels are currently connected to this Slack integration.
+
+ ) : (
+
+
+
+ Channel
+ Project
+ Status
+ Created
+
+
+
+ {alertChannels.map((channel) => (
+
+ {channel.name}
+ {channel.project.name}
+
+
+
+
+
+
+
+ ))}
+
+
+ )}
+
+
+
+
Danger zone
+
+
Remove integration
+
+ This will remove the Slack integration and disable all connected alert channels.
+ This action cannot be undone.
+
+ {actionData?.error && (
+
+ {actionData.error}
+
+ )}
+
+
- {isUninstalling ? "Removing..." : "Remove Integration"}
+ Remove integration
-
- }
- cancelButton={
-
- Cancel
-
- }
- />
-
-
- {actionData?.error && (
-
- {actionData.error}
-
- )}
+
+
+
+ Remove Slack integration
+
+
+ This will remove the Slack integration and disable all connected alert
+ channels. This action cannot be undone.
+
+
+
+
+ {isUninstalling ? "Removing…" : "Remove integration"}
+
+
+ }
+ cancelButton={
+
+ Cancel
+
+ }
+ />
+
+
+ }
+ />
+
-
-
- {/* Connected Alert Channels Section */}
-
-
- Connected Alert Channels ({alertChannels.length})
-
-
- {alertChannels.length === 0 ? (
-
-
- No alert channels are currently connected to this Slack integration.
-
-
- ) : (
-
-
-
- Channel Name
- Project
- Status
- Created
-
-
-
- {alertChannels.map((channel) => (
-
- {channel.name}
- {channel.project.name}
-
-
-
- {formatDate(new Date(channel.createdAt))}
-
- ))}
-
-
- )}
-
+
);
diff --git a/apps/webapp/app/routes/storybook.unordered-list/route.tsx b/apps/webapp/app/routes/storybook.unordered-list/route.tsx
new file mode 100644
index 00000000000..b17bb2dda11
--- /dev/null
+++ b/apps/webapp/app/routes/storybook.unordered-list/route.tsx
@@ -0,0 +1,67 @@
+import { Header2 } from "~/components/primitives/Headers";
+import { Paragraph, type ParagraphVariant } from "~/components/primitives/Paragraph";
+import { UnorderedList } from "~/components/primitives/UnorderedList";
+
+const sampleItems = [
+ "A new issue is seen for the first time",
+ "A resolved issue re-occurs",
+ "An ignored issue re-occurs depending on the settings you configured",
+];
+
+const variantGroups: { label: string; variants: ParagraphVariant[] }[] = [
+ {
+ label: "Base",
+ variants: ["base", "base/bright"],
+ },
+ {
+ label: "Small",
+ variants: ["small", "small/bright", "small/dimmed"],
+ },
+ {
+ label: "Extra small",
+ variants: [
+ "extra-small",
+ "extra-small/bright",
+ "extra-small/dimmed",
+ "extra-small/mono",
+ "extra-small/bright/mono",
+ "extra-small/dimmed/mono",
+ "extra-small/caps",
+ "extra-small/bright/caps",
+ ],
+ },
+ {
+ label: "Extra extra small",
+ variants: [
+ "extra-extra-small",
+ "extra-extra-small/bright",
+ "extra-extra-small/caps",
+ "extra-extra-small/bright/caps",
+ "extra-extra-small/dimmed/caps",
+ ],
+ },
+];
+
+export default function Story() {
+ return (
+
+ {variantGroups.map((group) => (
+
+
{group.label}
+ {group.variants.map((variant) => (
+
+
{variant}
+
This is a paragraph before the list.
+
+ {sampleItems.map((item) => (
+ {item}
+ ))}
+
+
This is a paragraph after the list.
+
+ ))}
+
+ ))}
+
+ );
+}
diff --git a/apps/webapp/app/routes/storybook/route.tsx b/apps/webapp/app/routes/storybook/route.tsx
index 83d455c2a55..bcaee62d6b0 100644
--- a/apps/webapp/app/routes/storybook/route.tsx
+++ b/apps/webapp/app/routes/storybook/route.tsx
@@ -136,6 +136,10 @@ const stories: Story[] = [
name: "Typography",
slug: "typography",
},
+ {
+ name: "Unordered list",
+ slug: "unordered-list",
+ },
{
name: "Usage",
slug: "usage",
diff --git a/apps/webapp/app/utils/pathBuilder.ts b/apps/webapp/app/utils/pathBuilder.ts
index f73f4139a01..7a151053f5a 100644
--- a/apps/webapp/app/utils/pathBuilder.ts
+++ b/apps/webapp/app/utils/pathBuilder.ts
@@ -584,6 +584,14 @@ export function v3ErrorsPath(
return `${v3EnvironmentPath(organization, project, environment)}/errors`;
}
+export function v3ErrorsConnectToSlackPath(
+ organization: OrgForPath,
+ project: ProjectForPath,
+ environment: EnvironmentForPath
+) {
+ return `${v3ErrorsPath(organization, project, environment)}/connect-to-slack`;
+}
+
export function v3ErrorPath(
organization: OrgForPath,
project: ProjectForPath,
diff --git a/apps/webapp/app/v3/alertsWorker.server.ts b/apps/webapp/app/v3/alertsWorker.server.ts
index 46670887a75..693b16b738a 100644
--- a/apps/webapp/app/v3/alertsWorker.server.ts
+++ b/apps/webapp/app/v3/alertsWorker.server.ts
@@ -1,10 +1,12 @@
import { Logger } from "@trigger.dev/core/logger";
-import { Worker as RedisWorker } from "@trigger.dev/redis-worker";
+import { CronSchema, Worker as RedisWorker } from "@trigger.dev/redis-worker";
import { z } from "zod";
import { env } from "~/env.server";
import { logger } from "~/services/logger.server";
import { singleton } from "~/utils/singleton";
import { DeliverAlertService } from "./services/alerts/deliverAlert.server";
+import { DeliverErrorGroupAlertService } from "./services/alerts/deliverErrorGroupAlert.server";
+import { ErrorAlertEvaluator } from "./services/alerts/errorAlertEvaluator.server";
import { PerformDeploymentAlertsService } from "./services/alerts/performDeploymentAlerts.server";
import { PerformTaskRunAlertsService } from "./services/alerts/performTaskRunAlerts.server";
@@ -55,6 +57,42 @@ function initializeWorker() {
},
logErrors: false,
},
+ "v3.evaluateErrorAlerts": {
+ schema: z.object({
+ projectId: z.string(),
+ scheduledAt: z.number(),
+ }),
+ visibilityTimeoutMs: 60_000 * 5,
+ retry: {
+ maxAttempts: 3,
+ },
+ logErrors: true,
+ },
+ "v3.deliverErrorGroupAlert": {
+ schema: z.object({
+ channelId: z.string(),
+ projectId: z.string(),
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ error: z.object({
+ fingerprint: z.string(),
+ environmentId: z.string(),
+ environmentSlug: z.string(),
+ environmentName: z.string(),
+ taskIdentifier: z.string(),
+ errorType: z.string(),
+ errorMessage: z.string(),
+ sampleStackTrace: z.string(),
+ firstSeen: z.string(),
+ lastSeen: z.string(),
+ occurrenceCount: z.number(),
+ }),
+ }),
+ visibilityTimeoutMs: 60_000,
+ retry: {
+ maxAttempts: 3,
+ },
+ logErrors: true,
+ },
},
concurrency: {
workers: env.ALERTS_WORKER_CONCURRENCY_WORKERS,
@@ -80,6 +118,14 @@ function initializeWorker() {
const service = new PerformTaskRunAlertsService();
await service.call(payload.runId);
},
+ "v3.evaluateErrorAlerts": async ({ payload }) => {
+ const evaluator = new ErrorAlertEvaluator();
+ await evaluator.evaluate(payload.projectId, payload.scheduledAt);
+ },
+ "v3.deliverErrorGroupAlert": async ({ payload }) => {
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+ },
},
});
diff --git a/apps/webapp/app/v3/otlpExporter.server.ts b/apps/webapp/app/v3/otlpExporter.server.ts
index 5fe2624557d..7505693e3ab 100644
--- a/apps/webapp/app/v3/otlpExporter.server.ts
+++ b/apps/webapp/app/v3/otlpExporter.server.ts
@@ -1194,4 +1194,4 @@ function initializeOTLPExporter() {
? parseInt(process.env.SERVER_OTEL_SPAN_ATTRIBUTE_VALUE_LENGTH_LIMIT, 10)
: 8192
);
-}
+}
\ No newline at end of file
diff --git a/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
index b2bbb423983..3b0a3a13360 100644
--- a/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
+++ b/apps/webapp/app/v3/services/alerts/createAlertChannel.server.ts
@@ -1,12 +1,13 @@
import {
- ProjectAlertChannel,
- ProjectAlertType,
- RuntimeEnvironmentType,
+ type ProjectAlertChannel,
+ type ProjectAlertType,
+ type RuntimeEnvironmentType,
} from "@trigger.dev/database";
import { nanoid } from "nanoid";
import { env } from "~/env.server";
import { findProjectByRef } from "~/models/project.server";
import { encryptSecret } from "~/services/secrets/secretStore.server";
+import { alertsWorker } from "~/v3/alertsWorker.server";
import { generateFriendlyId } from "~/v3/friendlyIdentifiers";
import { BaseService, ServiceValidationError } from "../baseService.server";
@@ -60,7 +61,7 @@ export class CreateAlertChannelService extends BaseService {
: undefined;
if (existingAlertChannel) {
- return await this._prisma.projectAlertChannel.update({
+ const updated = await this._prisma.projectAlertChannel.update({
where: { id: existingAlertChannel.id },
data: {
name: options.name,
@@ -68,8 +69,15 @@ export class CreateAlertChannelService extends BaseService {
type: options.channel.type,
properties: await this.#createProperties(options.channel),
environmentTypes,
+ enabled: true,
},
});
+
+ if (options.alertTypes.includes("ERROR_GROUP")) {
+ await this.#scheduleErrorAlertEvaluation(project.id);
+ }
+
+ return updated;
}
const alertChannel = await this._prisma.projectAlertChannel.create({
@@ -87,9 +95,24 @@ export class CreateAlertChannelService extends BaseService {
},
});
+ if (options.alertTypes.includes("ERROR_GROUP")) {
+ await this.#scheduleErrorAlertEvaluation(project.id);
+ }
+
return alertChannel;
}
+ async #scheduleErrorAlertEvaluation(projectId: string): Promise
{
+ await alertsWorker.enqueue({
+ id: `evaluateErrorAlerts:${projectId}`,
+ job: "v3.evaluateErrorAlerts",
+ payload: {
+ projectId,
+ scheduledAt: Date.now(),
+ },
+ });
+ }
+
async #createProperties(channel: CreateAlertChannelOptions["channel"]) {
switch (channel.type) {
case "EMAIL":
diff --git a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
index 8b922f91e9f..5ab99bf8046 100644
--- a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
+++ b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts
@@ -319,6 +319,9 @@ export class DeliverAlertService extends BaseService {
break;
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
@@ -657,6 +660,9 @@ export class DeliverAlertService extends BaseService {
break;
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
@@ -913,6 +919,9 @@ export class DeliverAlertService extends BaseService {
return;
}
}
+ case "ERROR_GROUP": {
+ break;
+ }
default: {
assertNever(alert.type);
}
diff --git a/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts
new file mode 100644
index 00000000000..422811cdee3
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/deliverErrorGroupAlert.server.ts
@@ -0,0 +1,404 @@
+import {
+ type ChatPostMessageArguments,
+ ErrorCode,
+ type WebAPIPlatformError,
+ type WebAPIRateLimitedError,
+} from "@slack/web-api";
+import { type ProjectAlertChannelType } from "@trigger.dev/database";
+import assertNever from "assert-never";
+import { prisma } from "~/db.server";
+import { env } from "~/env.server";
+import { v3ErrorPath } from "~/utils/pathBuilder";
+import {
+ isIntegrationForService,
+ type OrganizationIntegrationForService,
+ OrgIntegrationRepository,
+} from "~/models/orgIntegration.server";
+import {
+ ProjectAlertEmailProperties,
+ ProjectAlertSlackProperties,
+ ProjectAlertWebhookProperties,
+} from "~/models/projectAlert.server";
+import { sendAlertEmail } from "~/services/email.server";
+import { logger } from "~/services/logger.server";
+import { decryptSecret } from "~/services/secrets/secretStore.server";
+import { subtle } from "crypto";
+import { generateErrorGroupWebhookPayload } from "./errorGroupWebhook.server";
+
+type ErrorAlertClassification = "new_issue" | "regression" | "unignored";
+
+interface ErrorAlertPayload {
+ channelId: string;
+ projectId: string;
+ classification: ErrorAlertClassification;
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentSlug: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+}
+
+class SkipRetryError extends Error {}
+
+export class DeliverErrorGroupAlertService {
+ async call(payload: ErrorAlertPayload): Promise {
+ const channel = await prisma.projectAlertChannel.findFirst({
+ where: { id: payload.channelId, enabled: true },
+ include: {
+ project: {
+ include: {
+ organization: true,
+ },
+ },
+ },
+ });
+
+ if (!channel) {
+ logger.warn("[DeliverErrorGroupAlert] Channel not found or disabled", {
+ channelId: payload.channelId,
+ });
+ return;
+ }
+
+ const errorLink = this.#buildErrorLink(channel.project.organization, channel.project, payload.error);
+
+ try {
+ switch (channel.type) {
+ case "EMAIL":
+ await this.#sendEmail(channel, payload, errorLink);
+ break;
+ case "SLACK":
+ await this.#sendSlack(channel, payload, errorLink);
+ break;
+ case "WEBHOOK":
+ await this.#sendWebhook(channel, payload, errorLink);
+ break;
+ default:
+ assertNever(channel.type);
+ }
+ } catch (error) {
+ if (error instanceof SkipRetryError) {
+ logger.warn("[DeliverErrorGroupAlert] Skipping retry", { reason: (error as Error).message });
+ return;
+ }
+ throw error;
+ }
+ }
+
+ #buildErrorLink(
+ organization: { slug: string },
+ project: { slug: string },
+ error: ErrorAlertPayload["error"]
+ ): string {
+ return `${env.APP_ORIGIN}${v3ErrorPath(organization, project, { slug: error.environmentSlug }, { fingerprint: error.fingerprint })}`;
+ }
+
+ #classificationLabel(classification: ErrorAlertClassification): string {
+ switch (classification) {
+ case "new_issue":
+ return "New error";
+ case "regression":
+ return "Regression";
+ case "unignored":
+ return "Error resurfaced";
+ }
+ }
+
+ async #sendEmail(
+ channel: { type: ProjectAlertChannelType; properties: unknown; project: { name: string; organization: { title: string } } },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const emailProperties = ProjectAlertEmailProperties.safeParse(channel.properties);
+ if (!emailProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse email properties", {
+ issues: emailProperties.error.issues,
+ });
+ return;
+ }
+
+ await sendAlertEmail({
+ email: "alert-error-group",
+ to: emailProperties.data.email,
+ classification: payload.classification,
+ taskIdentifier: payload.error.taskIdentifier,
+ environment: payload.error.environmentName,
+ error: {
+ message: payload.error.errorMessage,
+ type: payload.error.errorType,
+ stackTrace: payload.error.sampleStackTrace || undefined,
+ },
+ occurrenceCount: payload.error.occurrenceCount,
+ errorLink,
+ organization: channel.project.organization.title,
+ project: channel.project.name,
+ });
+ }
+
+ async #sendSlack(
+ channel: {
+ type: ProjectAlertChannelType;
+ properties: unknown;
+ project: { organizationId: string; name: string; organization: { title: string } };
+ },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const slackProperties = ProjectAlertSlackProperties.safeParse(channel.properties);
+ if (!slackProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse slack properties", {
+ issues: slackProperties.error.issues,
+ });
+ return;
+ }
+
+ const integration = slackProperties.data.integrationId
+ ? await prisma.organizationIntegration.findFirst({
+ where: {
+ id: slackProperties.data.integrationId,
+ organizationId: channel.project.organizationId,
+ },
+ include: { tokenReference: true },
+ })
+ : await prisma.organizationIntegration.findFirst({
+ where: {
+ service: "SLACK",
+ organizationId: channel.project.organizationId,
+ },
+ orderBy: { createdAt: "desc" },
+ include: { tokenReference: true },
+ });
+
+ if (!integration || !isIntegrationForService(integration, "SLACK")) {
+ logger.error("[DeliverErrorGroupAlert] Slack integration not found");
+ return;
+ }
+
+ const message = this.#buildErrorGroupSlackMessage(
+ payload,
+ errorLink,
+ channel.project.name
+ );
+
+ await this.#postSlackMessage(integration, {
+ channel: slackProperties.data.channelId,
+ ...message,
+ } as ChatPostMessageArguments);
+ }
+
+ async #sendWebhook(
+ channel: {
+ type: ProjectAlertChannelType;
+ properties: unknown;
+ project: { id: string; externalRef: string; slug: string; name: string; organizationId: string; organization: { slug: string; title: string } };
+ },
+ payload: ErrorAlertPayload,
+ errorLink: string
+ ): Promise {
+ const webhookProperties = ProjectAlertWebhookProperties.safeParse(channel.properties);
+ if (!webhookProperties.success) {
+ logger.error("[DeliverErrorGroupAlert] Failed to parse webhook properties", {
+ issues: webhookProperties.error.issues,
+ });
+ return;
+ }
+
+ const webhookPayload = generateErrorGroupWebhookPayload({
+ classification: payload.classification,
+ error: payload.error,
+ organization: {
+ id: channel.project.organizationId,
+ slug: channel.project.organization.slug,
+ name: channel.project.organization.title,
+ },
+ project: {
+ id: channel.project.id,
+ externalRef: channel.project.externalRef,
+ slug: channel.project.slug,
+ name: channel.project.name,
+ },
+ dashboardUrl: errorLink,
+ });
+
+ const rawPayload = JSON.stringify(webhookPayload);
+ const hashPayload = Buffer.from(rawPayload, "utf-8");
+ const secret = await decryptSecret(env.ENCRYPTION_KEY, webhookProperties.data.secret);
+ const hmacSecret = Buffer.from(secret, "utf-8");
+ const key = await subtle.importKey(
+ "raw",
+ hmacSecret,
+ { name: "HMAC", hash: "SHA-256" },
+ false,
+ ["sign"]
+ );
+ const signature = await subtle.sign("HMAC", key, hashPayload);
+ const signatureHex = Buffer.from(signature).toString("hex");
+
+ const response = await fetch(webhookProperties.data.url, {
+ method: "POST",
+ headers: {
+ "content-type": "application/json",
+ "x-trigger-signature-hmacsha256": signatureHex,
+ },
+ body: rawPayload,
+ signal: AbortSignal.timeout(5000),
+ });
+
+ if (!response.ok) {
+ logger.info("[DeliverErrorGroupAlert] Failed to send webhook", {
+ status: response.status,
+ statusText: response.statusText,
+ url: webhookProperties.data.url,
+ });
+ throw new Error(`Failed to send error group alert webhook to ${webhookProperties.data.url}`);
+ }
+ }
+
+ async #postSlackMessage(
+ integration: OrganizationIntegrationForService<"SLACK">,
+ message: ChatPostMessageArguments
+ ) {
+ const client = await OrgIntegrationRepository.getAuthenticatedClientForIntegration(
+ integration,
+ { forceBotToken: true }
+ );
+
+ try {
+ return await client.chat.postMessage({
+ ...message,
+ unfurl_links: false,
+ unfurl_media: false,
+ });
+ } catch (error) {
+ if (isWebAPIRateLimitedError(error)) {
+ throw new Error("Slack rate limited");
+ }
+ if (isWebAPIPlatformError(error)) {
+ if (
+ (error as WebAPIPlatformError).data.error === "invalid_blocks" ||
+ (error as WebAPIPlatformError).data.error === "account_inactive"
+ ) {
+ throw new SkipRetryError(`Slack: ${(error as WebAPIPlatformError).data.error}`);
+ }
+ throw new Error("Slack platform error");
+ }
+ throw error;
+ }
+ }
+
+ #buildErrorGroupSlackMessage(
+ payload: ErrorAlertPayload,
+ errorLink: string,
+ projectName: string
+ ): { text: string; blocks: object[]; attachments: object[] } {
+ const label = this.#classificationLabel(payload.classification);
+ const errorType = payload.error.errorType || "Error";
+ const task = payload.error.taskIdentifier;
+ const envName = payload.error.environmentName;
+
+ return {
+ text: `${label}: ${errorType} in ${task} [${envName}]`,
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: `*${label} in ${task} [${envName}]*`,
+ },
+ },
+ ],
+ attachments: [
+ {
+ color: "danger",
+ blocks: [
+ {
+ type: "section",
+ text: {
+ type: "mrkdwn",
+ text: this.#wrapInCodeBlock(
+ payload.error.sampleStackTrace || payload.error.errorMessage
+ ),
+ },
+ },
+ {
+ type: "section",
+ fields: [
+ {
+ type: "mrkdwn",
+ text: `*Task:*\n${task}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Environment:*\n${envName}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Project:*\n${projectName}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Occurrences:*\n${payload.error.occurrenceCount}`,
+ },
+ {
+ type: "mrkdwn",
+ text: `*Last seen:*\n${this.#formatTimestamp(new Date(Number(payload.error.lastSeen)))}`,
+ },
+ ],
+ },
+ {
+ type: "actions",
+ elements: [
+ {
+ type: "button",
+ text: { type: "plain_text", text: "Investigate" },
+ url: errorLink,
+ style: "primary",
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ };
+ }
+
+ #wrapInCodeBlock(text: string, maxLength = 3000) {
+ const wrapperLength = 6; // ``` prefix + ``` suffix
+ const truncationSuffix = "\n\n...truncated — check dashboard for full error";
+ const innerMax = maxLength - wrapperLength;
+
+ const truncated =
+ text.length > innerMax
+ ? text.slice(0, innerMax - truncationSuffix.length) + truncationSuffix
+ : text;
+ return `\`\`\`${truncated}\`\`\``;
+ }
+
+ #formatTimestamp(date: Date): string {
+ return new Intl.DateTimeFormat("en-US", {
+ month: "short",
+ day: "numeric",
+ year: "numeric",
+ hour: "numeric",
+ minute: "2-digit",
+ second: "2-digit",
+ hour12: true,
+ }).format(date);
+ }
+}
+
+function isWebAPIPlatformError(error: unknown): error is WebAPIPlatformError {
+ return (error as WebAPIPlatformError).code === ErrorCode.PlatformError;
+}
+
+function isWebAPIRateLimitedError(error: unknown): error is WebAPIRateLimitedError {
+ return (error as WebAPIRateLimitedError).code === ErrorCode.RateLimitedError;
+}
diff --git a/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts b/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts
new file mode 100644
index 00000000000..b62090c35b3
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/errorAlertEvaluator.server.ts
@@ -0,0 +1,468 @@
+import { type ActiveErrorsSinceQueryResult, type ClickHouse } from "@internal/clickhouse";
+import {
+ type ErrorGroupState,
+ type PrismaClientOrTransaction,
+ type ProjectAlertChannel,
+ type RuntimeEnvironmentType,
+} from "@trigger.dev/database";
+import { $replica, prisma } from "~/db.server";
+import { ErrorAlertConfig } from "~/models/projectAlert.server";
+import { clickhouseClient } from "~/services/clickhouseInstance.server";
+import { logger } from "~/services/logger.server";
+import { alertsWorker } from "~/v3/alertsWorker.server";
+
+type ErrorClassification = "new_issue" | "regression" | "unignored";
+
+interface AlertableError {
+ classification: ErrorClassification;
+ error: ActiveErrorsSinceQueryResult;
+ environmentSlug: string;
+ environmentName: string;
+}
+
+interface ResolvedEnvironment {
+ id: string;
+ slug: string;
+ type: RuntimeEnvironmentType;
+ displayName: string;
+}
+
+const DEFAULT_INTERVAL_MS = 300_000;
+
+/**
+ * For a project evalutes whether to send error alerts
+ *
+ * Alerts are sent if an error is
+ * 1. A new issue
+ * 2. A regression (was resolved and now back)
+ * 3. Unignored (was ignored and is no longer)
+ *
+ * Unignored happens in 3 situations
+ * 1. It was ignored with a future date, and that's now in the past
+ * 2. It was ignored until reaching an error rate (e.g. 10/minute) and that has been exceeded
+ * 3. It was ignored until reaching a total occurrence count (e.g. 1,000) and that has been exceeded
+ */
+export class ErrorAlertEvaluator {
+ constructor(
+ protected readonly _prisma: PrismaClientOrTransaction = prisma,
+ protected readonly _replica: PrismaClientOrTransaction = $replica,
+ protected readonly _clickhouse: ClickHouse = clickhouseClient
+ ) {}
+
+ async evaluate(projectId: string, scheduledAt: number): Promise {
+ const nextScheduledAt = Date.now();
+
+ const channels = await this.resolveChannels(projectId);
+ if (channels.length === 0) {
+ logger.info("[ErrorAlertEvaluator] No active ERROR_GROUP channels, self-terminating", {
+ projectId,
+ });
+ return;
+ }
+
+ const minIntervalMs = this.computeMinInterval(channels);
+ const windowMs = nextScheduledAt - scheduledAt;
+
+ if (windowMs > minIntervalMs * 2) {
+ logger.info("[ErrorAlertEvaluator] Large evaluation window (gap detected)", {
+ projectId,
+ scheduledAt,
+ nextScheduledAt,
+ windowMs,
+ minIntervalMs,
+ });
+ }
+
+ const allEnvTypes = this.collectEnvironmentTypes(channels);
+
+ try {
+ const [project, environments] = await Promise.all([
+ this._replica.project.findFirst({
+ where: { id: projectId },
+ select: { organizationId: true },
+ }),
+ this.resolveEnvironments(projectId, allEnvTypes),
+ ]);
+
+ if (!project) {
+ logger.error("[ErrorAlertEvaluator] Project not found", { projectId });
+ return;
+ }
+
+ if (environments.length === 0) {
+ return;
+ }
+
+ const envIds = environments.map((e) => e.id);
+ const envMap = new Map(environments.map((e) => [e.id, e]));
+ const channelsByEnvId = this.buildChannelsByEnvId(channels, environments);
+
+ const activeErrors = await this.getActiveErrors(
+ project.organizationId,
+ projectId,
+ envIds,
+ scheduledAt
+ );
+
+ if (activeErrors.length === 0) {
+ return;
+ }
+
+ const states = await this.getErrorGroupStates(activeErrors);
+ const stateMap = this.buildStateMap(states);
+
+ const occurrenceCounts = await this.getOccurrenceCountsSince(
+ project.organizationId,
+ projectId,
+ envIds,
+ scheduledAt
+ );
+ const occurrenceMap = this.buildOccurrenceMap(occurrenceCounts);
+
+ const alertableErrors: AlertableError[] = [];
+
+ for (const error of activeErrors) {
+ const key = `${error.environment_id}:${error.task_identifier}:${error.error_fingerprint}`;
+ const state = stateMap.get(key);
+ const env = envMap.get(error.environment_id);
+ const firstSeenMs = Number(error.first_seen);
+
+ const classification = this.classifyError(error, state, firstSeenMs, scheduledAt, {
+ occurrencesSince: occurrenceMap.get(key) ?? 0,
+ windowMs,
+ totalOccurrenceCount: error.occurrence_count,
+ });
+
+ if (classification) {
+ alertableErrors.push({
+ classification,
+ error,
+ environmentSlug: env?.slug ?? "",
+ environmentName: env?.displayName ?? error.environment_id,
+ });
+ }
+ }
+
+ for (const alertable of alertableErrors) {
+ const envChannels = channelsByEnvId.get(alertable.error.environment_id) ?? [];
+ for (const channel of envChannels) {
+ await alertsWorker.enqueue({
+ id: `deliverErrorGroupAlert:${channel.id}:${alertable.error.error_fingerprint}:${scheduledAt}`,
+ job: "v3.deliverErrorGroupAlert",
+ payload: {
+ channelId: channel.id,
+ projectId,
+ classification: alertable.classification,
+ error: {
+ fingerprint: alertable.error.error_fingerprint,
+ environmentId: alertable.error.environment_id,
+ environmentSlug: alertable.environmentSlug,
+ environmentName: alertable.environmentName,
+ taskIdentifier: alertable.error.task_identifier,
+ errorType: alertable.error.error_type,
+ errorMessage: alertable.error.error_message,
+ sampleStackTrace: alertable.error.sample_stack_trace,
+ firstSeen: alertable.error.first_seen,
+ lastSeen: alertable.error.last_seen,
+ occurrenceCount: alertable.error.occurrence_count,
+ },
+ },
+ });
+ }
+ }
+
+ const stateUpdates = alertableErrors.filter(
+ (a) => a.classification === "regression" || a.classification === "unignored"
+ );
+ await this.updateErrorGroupStates(stateUpdates, stateMap);
+
+ logger.info("[ErrorAlertEvaluator] Evaluation complete", {
+ projectId,
+ activeErrors: activeErrors.length,
+ alertableErrors: alertableErrors.length,
+ deliveryJobsEnqueued: alertableErrors.reduce(
+ (sum, a) => sum + (channelsByEnvId.get(a.error.environment_id)?.length ?? 0),
+ 0
+ ),
+ });
+ } catch (error) {
+ logger.error("[ErrorAlertEvaluator] Evaluation failed, will retry on next cycle", {
+ projectId,
+ error,
+ });
+ } finally {
+ await this.selfChain(projectId, nextScheduledAt, minIntervalMs);
+ }
+ }
+
+ private classifyError(
+ error: ActiveErrorsSinceQueryResult,
+ state: ErrorGroupState | undefined,
+ firstSeenMs: number,
+ scheduledAt: number,
+ thresholdContext: { occurrencesSince: number; windowMs: number; totalOccurrenceCount: number }
+ ): ErrorClassification | null {
+ if (!state) {
+ return firstSeenMs > scheduledAt ? "new_issue" : null;
+ }
+
+ switch (state.status) {
+ case "UNRESOLVED":
+ return null;
+
+ case "RESOLVED": {
+ if (!state.resolvedAt) return null;
+ const lastSeenMs = Number(error.last_seen);
+ return lastSeenMs > state.resolvedAt.getTime() ? "regression" : null;
+ }
+
+ case "IGNORED":
+ return this.isIgnoreBreached(state, thresholdContext) ? "unignored" : null;
+
+ default:
+ return null;
+ }
+ }
+
+ private isIgnoreBreached(
+ state: ErrorGroupState,
+ context: { occurrencesSince: number; windowMs: number; totalOccurrenceCount: number }
+ ): boolean {
+ if (state.ignoredUntil && state.ignoredUntil.getTime() <= Date.now()) {
+ return true;
+ }
+
+ if (
+ state.ignoredUntilOccurrenceRate !== null &&
+ state.ignoredUntilOccurrenceRate !== undefined
+ ) {
+ const windowMinutes = Math.max(context.windowMs / 60_000, 1);
+ const rate = context.occurrencesSince / windowMinutes;
+ if (rate > state.ignoredUntilOccurrenceRate) {
+ return true;
+ }
+ }
+
+ if (
+ state.ignoredUntilTotalOccurrences != null &&
+ state.ignoredAtOccurrenceCount != null
+ ) {
+ const occurrencesSinceIgnored =
+ context.totalOccurrenceCount - Number(state.ignoredAtOccurrenceCount);
+ if (occurrencesSinceIgnored >= state.ignoredUntilTotalOccurrences) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private async resolveChannels(projectId: string): Promise {
+ return this._replica.projectAlertChannel.findMany({
+ where: {
+ projectId,
+ alertTypes: { has: "ERROR_GROUP" },
+ enabled: true,
+ },
+ });
+ }
+
+ private computeMinInterval(channels: ProjectAlertChannel[]): number {
+ let min = DEFAULT_INTERVAL_MS;
+ for (const ch of channels) {
+ const config = ErrorAlertConfig.safeParse(ch.errorAlertConfig);
+ if (config.success) {
+ min = Math.min(min, config.data.evaluationIntervalMs);
+ }
+ }
+ return min;
+ }
+
+ private collectEnvironmentTypes(channels: ProjectAlertChannel[]): RuntimeEnvironmentType[] {
+ const types = new Set();
+ for (const ch of channels) {
+ for (const t of ch.environmentTypes) {
+ types.add(t);
+ }
+ }
+ return Array.from(types);
+ }
+
+ private async resolveEnvironments(
+ projectId: string,
+ types: RuntimeEnvironmentType[]
+ ): Promise {
+ const envs = await this._replica.runtimeEnvironment.findMany({
+ where: {
+ projectId,
+ type: { in: types },
+ },
+ select: {
+ id: true,
+ type: true,
+ slug: true,
+ branchName: true,
+ },
+ });
+
+ return envs.map((e) => ({
+ id: e.id,
+ slug: e.slug,
+ type: e.type,
+ displayName: e.branchName ?? e.slug,
+ }));
+ }
+
+ private buildChannelsByEnvId(
+ channels: ProjectAlertChannel[],
+ environments: ResolvedEnvironment[]
+ ): Map {
+ const result = new Map();
+ for (const env of environments) {
+ const matching = channels.filter((ch) => ch.environmentTypes.includes(env.type));
+ if (matching.length > 0) {
+ result.set(env.id, matching);
+ }
+ }
+ return result;
+ }
+
+ private async getActiveErrors(
+ organizationId: string,
+ projectId: string,
+ envIds: string[],
+ scheduledAt: number
+ ): Promise {
+ const qb = this._clickhouse.errors.activeErrorsSinceQueryBuilder();
+ qb.where("organization_id = {organizationId: String}", { organizationId });
+ qb.where("project_id = {projectId: String}", { projectId });
+ qb.where("environment_id IN {envIds: Array(String)}", { envIds });
+ qb.groupBy("environment_id, task_identifier, error_fingerprint");
+ qb.having("toInt64(last_seen) > {scheduledAt: Int64}", {
+ scheduledAt,
+ });
+
+ const [err, results] = await qb.execute();
+ if (err) {
+ logger.error("[ErrorAlertEvaluator] Failed to query active errors", { error: err });
+ return [];
+ }
+ return results ?? [];
+ }
+
+ private async getErrorGroupStates(
+ activeErrors: ActiveErrorsSinceQueryResult[]
+ ): Promise {
+ if (activeErrors.length === 0) return [];
+
+ return this._replica.errorGroupState.findMany({
+ where: {
+ OR: activeErrors.map((e) => ({
+ environmentId: e.environment_id,
+ taskIdentifier: e.task_identifier,
+ errorFingerprint: e.error_fingerprint,
+ })),
+ },
+ });
+ }
+
+ private buildStateMap(states: ErrorGroupState[]): Map {
+ const map = new Map();
+ for (const s of states) {
+ map.set(`${s.environmentId}:${s.taskIdentifier}:${s.errorFingerprint}`, s);
+ }
+ return map;
+ }
+
+ private async getOccurrenceCountsSince(
+ organizationId: string,
+ projectId: string,
+ envIds: string[],
+ scheduledAt: number
+ ): Promise<
+ Array<{
+ environment_id: string;
+ task_identifier: string;
+ error_fingerprint: string;
+ occurrences_since: number;
+ }>
+ > {
+ const qb = this._clickhouse.errors.occurrenceCountsSinceQueryBuilder();
+ qb.where("organization_id = {organizationId: String}", { organizationId });
+ qb.where("project_id = {projectId: String}", { projectId });
+ qb.where("environment_id IN {envIds: Array(String)}", { envIds });
+ qb.where("minute >= toStartOfMinute(fromUnixTimestamp64Milli({scheduledAt: Int64}))", {
+ scheduledAt,
+ });
+ qb.groupBy("environment_id, task_identifier, error_fingerprint");
+
+ const [err, results] = await qb.execute();
+ if (err) {
+ logger.error("[ErrorAlertEvaluator] Failed to query occurrence counts", { error: err });
+ return [];
+ }
+ return results ?? [];
+ }
+
+ private buildOccurrenceMap(
+ counts: Array<{
+ environment_id: string;
+ task_identifier: string;
+ error_fingerprint: string;
+ occurrences_since: number;
+ }>
+ ): Map {
+ const map = new Map();
+ for (const c of counts) {
+ map.set(
+ `${c.environment_id}:${c.task_identifier}:${c.error_fingerprint}`,
+ c.occurrences_since
+ );
+ }
+ return map;
+ }
+
+ private async updateErrorGroupStates(
+ alertableErrors: AlertableError[],
+ stateMap: Map
+ ): Promise {
+ for (const alertable of alertableErrors) {
+ const key = `${alertable.error.environment_id}:${alertable.error.task_identifier}:${alertable.error.error_fingerprint}`;
+ const state = stateMap.get(key);
+ if (!state) continue;
+
+ await this._prisma.errorGroupState.update({
+ where: { id: state.id },
+ data: {
+ status: "UNRESOLVED",
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ },
+ });
+ }
+ }
+
+ private async selfChain(
+ projectId: string,
+ nextScheduledAt: number,
+ intervalMs: number
+ ): Promise {
+ await alertsWorker.enqueue({
+ id: `evaluateErrorAlerts:${projectId}`,
+ job: "v3.evaluateErrorAlerts",
+ payload: {
+ projectId,
+ scheduledAt: nextScheduledAt,
+ },
+ availableAt: new Date(nextScheduledAt + intervalMs),
+ });
+ }
+}
diff --git a/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts b/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts
new file mode 100644
index 00000000000..1c0f939862c
--- /dev/null
+++ b/apps/webapp/app/v3/services/alerts/errorGroupWebhook.server.ts
@@ -0,0 +1,74 @@
+import { nanoid } from "nanoid";
+import type { ErrorWebhook } from "@trigger.dev/core/v3/schemas";
+
+export type ErrorAlertClassification = "new_issue" | "regression" | "unignored";
+
+export type ErrorGroupAlertData = {
+ classification: ErrorAlertClassification;
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+ organization: {
+ id: string;
+ slug: string;
+ name: string;
+ };
+ project: {
+ id: string;
+ externalRef: string;
+ slug: string;
+ name: string;
+ };
+ dashboardUrl: string;
+};
+
+/**
+ * Generates a webhook payload for an error group alert that conforms to the
+ * ErrorWebhook schema from @trigger.dev/core/v3/schemas
+ */
+export function generateErrorGroupWebhookPayload(data: ErrorGroupAlertData): ErrorWebhook {
+ return {
+ id: nanoid(),
+ created: new Date(),
+ webhookVersion: "2025-01-01",
+ type: "alert.error" as const,
+ object: {
+ classification: data.classification,
+ error: {
+ fingerprint: data.error.fingerprint,
+ type: data.error.errorType,
+ message: data.error.errorMessage,
+ stackTrace: data.error.sampleStackTrace || undefined,
+ firstSeen: new Date(Number(data.error.firstSeen)),
+ lastSeen: new Date(Number(data.error.lastSeen)),
+ occurrenceCount: data.error.occurrenceCount,
+ taskIdentifier: data.error.taskIdentifier,
+ },
+ environment: {
+ id: data.error.environmentId,
+ name: data.error.environmentName,
+ },
+ organization: {
+ id: data.organization.id,
+ slug: data.organization.slug,
+ name: data.organization.name,
+ },
+ project: {
+ id: data.project.id,
+ ref: data.project.externalRef,
+ slug: data.project.slug,
+ name: data.project.name,
+ },
+ dashboardUrl: data.dashboardUrl,
+ },
+ };
+}
diff --git a/apps/webapp/app/v3/services/errorGroupActions.server.ts b/apps/webapp/app/v3/services/errorGroupActions.server.ts
new file mode 100644
index 00000000000..c026efe2aba
--- /dev/null
+++ b/apps/webapp/app/v3/services/errorGroupActions.server.ts
@@ -0,0 +1,144 @@
+import { type PrismaClientOrTransaction, prisma } from "~/db.server";
+
+type ErrorGroupIdentifier = {
+ organizationId: string;
+ projectId: string;
+ environmentId: string;
+ taskIdentifier: string;
+ errorFingerprint: string;
+};
+
+export class ErrorGroupActions {
+ constructor(private readonly _prisma: PrismaClientOrTransaction = prisma) {}
+
+ async resolveError(
+ identifier: ErrorGroupIdentifier,
+ params: {
+ userId: string;
+ resolvedInVersion?: string;
+ }
+ ) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ const now = new Date();
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: {
+ status: "RESOLVED",
+ resolvedAt: now,
+ resolvedInVersion: params.resolvedInVersion ?? null,
+ resolvedBy: params.userId,
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ },
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ status: "RESOLVED",
+ resolvedAt: now,
+ resolvedInVersion: params.resolvedInVersion ?? null,
+ resolvedBy: params.userId,
+ },
+ });
+ }
+
+ async ignoreError(
+ identifier: ErrorGroupIdentifier,
+ params: {
+ userId: string;
+ duration?: number;
+ occurrenceRateThreshold?: number;
+ totalOccurrencesThreshold?: number;
+ occurrenceCountAtIgnoreTime?: number;
+ reason?: string;
+ }
+ ) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ const now = new Date();
+ const ignoredUntil = params.duration ? new Date(now.getTime() + params.duration) : null;
+
+ const data = {
+ status: "IGNORED" as const,
+ ignoredAt: now,
+ ignoredUntil,
+ ignoredUntilOccurrenceRate: params.occurrenceRateThreshold ?? null,
+ ignoredUntilTotalOccurrences: params.totalOccurrencesThreshold ?? null,
+ ignoredAtOccurrenceCount: params.occurrenceCountAtIgnoreTime ?? null,
+ ignoredReason: params.reason ?? null,
+ ignoredByUserId: params.userId,
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ };
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: data,
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ ...data,
+ },
+ });
+ }
+
+ async unresolveError(identifier: ErrorGroupIdentifier) {
+ const where = {
+ environmentId_taskIdentifier_errorFingerprint: {
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ },
+ };
+
+ return this._prisma.errorGroupState.upsert({
+ where,
+ update: {
+ status: "UNRESOLVED",
+ resolvedAt: null,
+ resolvedInVersion: null,
+ resolvedBy: null,
+ ignoredUntil: null,
+ ignoredUntilOccurrenceRate: null,
+ ignoredUntilTotalOccurrences: null,
+ ignoredAtOccurrenceCount: null,
+ ignoredAt: null,
+ ignoredReason: null,
+ ignoredByUserId: null,
+ },
+ create: {
+ organizationId: identifier.organizationId,
+ projectId: identifier.projectId,
+ environmentId: identifier.environmentId,
+ taskIdentifier: identifier.taskIdentifier,
+ errorFingerprint: identifier.errorFingerprint,
+ status: "UNRESOLVED",
+ },
+ });
+ }
+}
diff --git a/apps/webapp/tailwind.config.js b/apps/webapp/tailwind.config.js
index dd053b0ac7f..d598ae83d20 100644
--- a/apps/webapp/tailwind.config.js
+++ b/apps/webapp/tailwind.config.js
@@ -177,6 +177,7 @@ const docs = colors.blue[500];
const bulkActions = colors.emerald[500];
const aiPrompts = colors.blue[500];
const aiMetrics = colors.green[500];
+const errors = colors.amber[500];
/** Other variables */
const radius = "0.5rem";
@@ -262,6 +263,7 @@ module.exports = {
customDashboards,
aiPrompts,
aiMetrics,
+ errors,
},
focusStyles: {
outline: "1px solid",
diff --git a/apps/webapp/test/errorGroupWebhook.test.ts b/apps/webapp/test/errorGroupWebhook.test.ts
new file mode 100644
index 00000000000..a7e797685ae
--- /dev/null
+++ b/apps/webapp/test/errorGroupWebhook.test.ts
@@ -0,0 +1,248 @@
+import { describe, test, expect } from "vitest";
+import { Webhook } from "@trigger.dev/core/v3/schemas";
+import {
+ generateErrorGroupWebhookPayload,
+ type ErrorGroupAlertData,
+} from "~/v3/services/alerts/errorGroupWebhook.server";
+
+function createMockAlertData(overrides: Partial = {}): ErrorGroupAlertData {
+ const now = Date.now();
+ const earlier = now - 3600000; // 1 hour ago
+
+ return {
+ classification: "new_issue",
+ error: {
+ fingerprint: "fp_test_12345",
+ environmentId: "env_abc123",
+ environmentName: "Production",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)`,
+ firstSeen: String(earlier),
+ lastSeen: String(now),
+ occurrenceCount: 5,
+ },
+ organization: {
+ id: "org_xyz789",
+ slug: "acme-corp",
+ name: "Acme Corp",
+ },
+ project: {
+ id: "proj_123",
+ externalRef: "proj_abc",
+ slug: "my-project",
+ name: "My Project",
+ },
+ dashboardUrl:
+ "https://cloud.trigger.dev/orgs/acme-corp/projects/my-project/errors/fp_test_12345",
+ ...overrides,
+ };
+}
+
+describe("generateErrorGroupWebhookPayload", () => {
+ test("generates a valid webhook payload", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ expect(payload).toMatchObject({
+ type: "alert.error",
+ object: {
+ classification: "new_issue",
+ error: {
+ fingerprint: "fp_test_12345",
+ type: "TypeError",
+ message: "Cannot read property 'id' of undefined",
+ taskIdentifier: "process-payment",
+ occurrenceCount: 5,
+ },
+ environment: {
+ id: "env_abc123",
+ name: "Production",
+ },
+ organization: {
+ id: "org_xyz789",
+ slug: "acme-corp",
+ name: "Acme Corp",
+ },
+ project: {
+ id: "proj_123",
+ ref: "proj_abc",
+ slug: "my-project",
+ name: "My Project",
+ },
+ dashboardUrl:
+ "https://cloud.trigger.dev/orgs/acme-corp/projects/my-project/errors/fp_test_12345",
+ },
+ });
+
+ expect(payload.id).toBeDefined();
+ expect(payload.created).toBeInstanceOf(Date);
+ expect(payload.webhookVersion).toBe("2025-01-01");
+ });
+
+ test("payload is valid according to Webhook schema", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ const parsed = Webhook.parse(payload);
+ expect(parsed.type).toBe("alert.error");
+ });
+
+ test("payload can be serialized and deserialized", () => {
+ const alertData = createMockAlertData();
+ const payload = generateErrorGroupWebhookPayload(alertData);
+
+ // Serialize to JSON (simulating sending over HTTP)
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+
+ // Verify it can still be parsed by the schema
+ const parsed = Webhook.parse(deserialized);
+ expect(parsed.type).toBe("alert.error");
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ expect(parsed.object.error.fingerprint).toBe("fp_test_12345");
+ }
+ });
+
+ test("handles new_issue classification", () => {
+ const alertData = createMockAlertData({ classification: "new_issue" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ }
+ });
+
+ test("handles regression classification", () => {
+ const alertData = createMockAlertData({ classification: "regression" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("regression");
+ }
+ });
+
+ test("handles unignored classification", () => {
+ const alertData = createMockAlertData({ classification: "unignored" });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("unignored");
+ }
+ });
+
+ test("handles empty stack trace", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ sampleStackTrace: "",
+ },
+ });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBeUndefined();
+ }
+ });
+
+ test("includes stack trace when present", () => {
+ const stackTrace = "Error at line 42";
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ sampleStackTrace: stackTrace,
+ },
+ });
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBe(stackTrace);
+ }
+ });
+
+ test("preserves date fields correctly", () => {
+ const firstSeen = new Date("2024-01-01T00:00:00Z");
+ const lastSeen = new Date("2024-01-02T12:00:00Z");
+
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ firstSeen: String(firstSeen.getTime()),
+ lastSeen: String(lastSeen.getTime()),
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.firstSeen).toEqual(firstSeen);
+ expect(parsed.object.error.lastSeen).toEqual(lastSeen);
+ }
+ });
+
+ test("handles special characters in error messages", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ errorMessage: "Unexpected token `<` in JSON at position 0",
+ sampleStackTrace: `SyntaxError: Unexpected token \`<\` in JSON
+ at JSON.parse ()
+ at fetch("https://api.example.com/data?query=test&limit=10")`,
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+ const parsed = Webhook.parse(deserialized);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.message).toBe("Unexpected token `<` in JSON at position 0");
+ }
+ });
+
+ test("handles unicode and emoji in error messages", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ errorMessage: "Failed to process emoji 🔥 in message: Hello 世界",
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const serialized = JSON.stringify(payload);
+ const deserialized = JSON.parse(serialized);
+ const parsed = Webhook.parse(deserialized);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.message).toBe("Failed to process emoji 🔥 in message: Hello 世界");
+ }
+ });
+
+ test("handles large occurrence counts", () => {
+ const alertData = createMockAlertData({
+ error: {
+ ...createMockAlertData().error,
+ occurrenceCount: 999999,
+ },
+ });
+
+ const payload = generateErrorGroupWebhookPayload(alertData);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.occurrenceCount).toBe(999999);
+ }
+ });
+});
diff --git a/apps/webapp/test/slackErrorAlerts.test.ts b/apps/webapp/test/slackErrorAlerts.test.ts
new file mode 100644
index 00000000000..b86856adc4c
--- /dev/null
+++ b/apps/webapp/test/slackErrorAlerts.test.ts
@@ -0,0 +1,403 @@
+import { describe, test, expect, beforeAll, afterAll } from "vitest";
+import type { PrismaClient } from "@trigger.dev/database";
+
+let DeliverErrorGroupAlertService: typeof import("../app/v3/services/alerts/deliverErrorGroupAlert.server.js").DeliverErrorGroupAlertService;
+let prisma: PrismaClient;
+let getSecretStore: typeof import("../app/services/secrets/secretStore.server.js").getSecretStore;
+
+type ErrorAlertPayload = {
+ channelId: string;
+ projectId: string;
+ classification: "new_issue" | "regression" | "unignored";
+ error: {
+ fingerprint: string;
+ environmentId: string;
+ environmentSlug: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+ };
+};
+
+let testChannelId: string;
+let testProjectId: string;
+let testOrganizationId: string;
+let testSecretKey: string;
+let testSecretReferenceId: string;
+
+// Helper to create mock error payloads
+function createMockErrorPayload(
+ overrides: Partial> & {
+ error?: Partial;
+ } = {}
+): ErrorAlertPayload {
+ const { error: errorOverrides, ...payloadOverrides } = overrides;
+
+ const defaultError: ErrorAlertPayload["error"] = {
+ fingerprint: "fp_test_" + Date.now(),
+ environmentId: "env_test_dev",
+ environmentSlug: "dev",
+ environmentName: "Development",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)
+ at TaskExecutor.execute (node_modules/@trigger.dev/core/dist/index.js:234:18)`,
+ firstSeen: Date.now().toString(),
+ lastSeen: Date.now().toString(),
+ occurrenceCount: 42,
+ ...errorOverrides,
+ };
+
+ return {
+ channelId: testChannelId,
+ projectId: testProjectId,
+ classification: "new_issue",
+ ...payloadOverrides,
+ error: defaultError,
+ };
+}
+
+// Skip tests if Slack credentials not configured
+const hasSlackCredentials =
+ !!process.env.TEST_SLACK_CHANNEL_ID &&
+ !!process.env.TEST_SLACK_BOT_TOKEN;
+
+describe.skipIf(!hasSlackCredentials)("Slack Error Alert Visual Tests", () => {
+ beforeAll(async () => {
+ const dbModule = await import("../app/db.server.js");
+ prisma = dbModule.prisma;
+ const secretModule = await import("../app/services/secrets/secretStore.server.js");
+ getSecretStore = secretModule.getSecretStore;
+ const alertModule = await import(
+ "../app/v3/services/alerts/deliverErrorGroupAlert.server.js"
+ );
+ DeliverErrorGroupAlertService = alertModule.DeliverErrorGroupAlertService;
+
+ const organization = await prisma.organization.create({
+ data: {
+ title: "Slack Test Org",
+ slug: "slack-test-org-" + Date.now(),
+ },
+ });
+ testOrganizationId = organization.id;
+
+ // Create test project
+ const project = await prisma.project.create({
+ data: {
+ name: "Slack Test Project",
+ slug: "slack-test-project-" + Date.now(),
+ externalRef: "proj_slack_test_" + Date.now(),
+ organizationId: organization.id,
+ },
+ });
+ testProjectId = project.id;
+
+ const secretStore = getSecretStore("DATABASE");
+ testSecretKey = `slack-test-token-${Date.now()}`;
+
+ await secretStore.setSecret(testSecretKey, {
+ botAccessToken: process.env.TEST_SLACK_BOT_TOKEN!,
+ });
+
+ const secretReference = await prisma.secretReference.create({
+ data: {
+ key: testSecretKey,
+ provider: "DATABASE",
+ },
+ });
+ testSecretReferenceId = secretReference.id;
+
+ // Create Slack organization integration
+ const integration = await prisma.organizationIntegration.create({
+ data: {
+ friendlyId: "integration_test_" + Date.now(),
+ organizationId: organization.id,
+ service: "SLACK",
+ integrationData: {},
+ tokenReferenceId: secretReference.id,
+ },
+ });
+
+ // Create alert channel
+ const channel = await prisma.projectAlertChannel.create({
+ data: {
+ friendlyId: "channel_test_" + Date.now(),
+ name: "Test Slack Channel",
+ type: "SLACK",
+ enabled: true,
+ projectId: project.id,
+ integrationId: integration.id,
+ properties: {
+ channelId: process.env.TEST_SLACK_CHANNEL_ID!,
+ channelName: "test-slack-alerts",
+ integrationId: integration.id,
+ },
+ },
+ });
+ testChannelId = channel.id;
+ });
+
+ afterAll(async () => {
+ if (testChannelId) {
+ await prisma.projectAlertChannel.deleteMany({
+ where: { id: testChannelId },
+ });
+ }
+ if (testOrganizationId) {
+ await prisma.organizationIntegration.deleteMany({
+ where: { organizationId: testOrganizationId },
+ });
+ }
+ if (testSecretReferenceId) {
+ await prisma.secretReference.deleteMany({
+ where: { id: testSecretReferenceId },
+ });
+ }
+ if (testSecretKey) {
+ const secretStore = getSecretStore("DATABASE");
+ await secretStore.deleteSecret(testSecretKey);
+ }
+ if (testProjectId) {
+ await prisma.project.deleteMany({
+ where: { id: testProjectId },
+ });
+ }
+ if (testOrganizationId) {
+ await prisma.organization.deleteMany({
+ where: { id: testOrganizationId },
+ });
+ }
+ });
+
+ test("new_issue classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "new_issue",
+ error: {
+ taskIdentifier: "process-order",
+ errorMessage: "Failed to process order due to invalid payment method",
+ errorType: "PaymentError",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ // Message sent - check Slack channel visually
+ expect(true).toBe(true);
+ });
+
+ test("regression classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "regression",
+ error: {
+ taskIdentifier: "send-email",
+ errorMessage: "SMTP connection timeout after 30 seconds",
+ errorType: "TimeoutError",
+ occurrenceCount: 156,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("unignored (resurfaced) classification", async () => {
+ const payload = createMockErrorPayload({
+ classification: "unignored",
+ error: {
+ taskIdentifier: "sync-database",
+ errorMessage: "Connection pool exhausted",
+ errorType: "DatabaseError",
+ occurrenceCount: 99,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("short error message", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Not found",
+ errorType: "NotFoundError",
+ sampleStackTrace: "NotFoundError: Not found\n at findUser (src/db.ts:10:5)",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("long stack trace", async () => {
+ const longStackTrace = `ReferenceError: processData is not defined
+ at handler (src/tasks/data-processor.ts:125:15)
+ at async TaskRunner.execute (node_modules/@trigger.dev/sdk/dist/runner.js:89:12)
+ at async WorkerThread.processTask (node_modules/@trigger.dev/sdk/dist/worker.js:234:18)
+ at async WorkerPool.run (src/lib/worker-pool.ts:56:10)
+ at async TaskQueue.dequeue (src/lib/queue.ts:142:8)
+ at async Orchestrator.processNextTask (src/orchestrator.ts:98:5)
+ at async Orchestrator.start (src/orchestrator.ts:45:7)
+ at async main (src/index.ts:12:3)
+ at Object. (src/index.ts:20:1)
+ at Module._compile (node:internal/modules/cjs/loader:1376:14)
+ at Module._extensions..js (node:internal/modules/cjs/loader:1435:10)
+ at Module.load (node:internal/modules/cjs/loader:1207:32)
+ at Module._load (node:internal/modules/cjs/loader:1023:12)
+ at Function.executeUserEntryPoint [as runMain] (node:internal/modules/run_main:135:12)
+ at node:internal/main/run_main_module:28:49`;
+
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "ReferenceError",
+ errorMessage: "processData is not defined",
+ sampleStackTrace: longStackTrace,
+ taskIdentifier: "data-processor",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("very long error message (triggers truncation)", async () => {
+ // Create a message that's over 3000 characters
+ const longMessage = "x".repeat(3500);
+ const longStackTrace = `Error: ${longMessage}
+ at verylongfunctionname (src/tasks/long-task.ts:1:1)
+ ${" at stackframe (file.ts:1:1)\n".repeat(100)}`;
+
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: longMessage,
+ sampleStackTrace: longStackTrace,
+ taskIdentifier: "long-error-task",
+ errorType: "Error",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ // Should see truncation message in Slack
+ expect(true).toBe(true);
+ });
+
+ test("special characters in error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Unexpected token `<` in JSON at position 0",
+ errorType: "SyntaxError",
+ sampleStackTrace: `SyntaxError: Unexpected token \`<\` in JSON at position 0
+ at JSON.parse ()
+ at parseResponse (src/api/client.ts:42:15)
+ at fetch("https://api.example.com/data?query=test&limit=10")`,
+ taskIdentifier: "api-fetch-task",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("unicode and emoji in error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "Failed to process emoji 🔥 in message: Hello 世界",
+ errorType: "EncodingError",
+ sampleStackTrace: `EncodingError: Failed to process emoji 🔥 in message: Hello 世界
+ at encodeMessage (src/utils/encoding.ts:15:10)
+ at sendMessage (src/tasks/messaging.ts:42:8)`,
+ taskIdentifier: "messaging-task",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - TypeError", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "TypeError",
+ errorMessage: "Cannot call method 'map' on undefined",
+ sampleStackTrace: `TypeError: Cannot call method 'map' on undefined
+ at transformData (src/transformers/data.ts:18:25)`,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - ReferenceError", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "ReferenceError",
+ errorMessage: "userConfig is not defined",
+ sampleStackTrace: `ReferenceError: userConfig is not defined
+ at initializeApp (src/app.ts:32:10)`,
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("different error types - Custom Error", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorType: "InvalidConfigurationError",
+ errorMessage: "API key is missing or invalid",
+ sampleStackTrace: `InvalidConfigurationError: API key is missing or invalid
+ at validateConfig (src/config/validator.ts:45:11)
+ at loadConfig (src/config/loader.ts:23:5)`,
+ taskIdentifier: "config-loader",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+
+ test("error with no stack trace", async () => {
+ const payload = createMockErrorPayload({
+ error: {
+ errorMessage: "An unknown error occurred",
+ errorType: "Error",
+ sampleStackTrace: "",
+ },
+ });
+
+ const service = new DeliverErrorGroupAlertService();
+ await service.call(payload);
+
+ expect(true).toBe(true);
+ });
+});
diff --git a/apps/webapp/test/webhookErrorAlerts.test.ts b/apps/webapp/test/webhookErrorAlerts.test.ts
new file mode 100644
index 00000000000..d0e3e9e1a89
--- /dev/null
+++ b/apps/webapp/test/webhookErrorAlerts.test.ts
@@ -0,0 +1,128 @@
+import { describe, test, expect } from "vitest";
+import { Webhook } from "@trigger.dev/core/v3/schemas";
+import { generateErrorGroupWebhookPayload } from "~/v3/services/alerts/errorGroupWebhook.server";
+
+type ErrorData = {
+ fingerprint: string;
+ environmentId: string;
+ environmentName: string;
+ taskIdentifier: string;
+ errorType: string;
+ errorMessage: string;
+ sampleStackTrace: string;
+ firstSeen: string;
+ lastSeen: string;
+ occurrenceCount: number;
+};
+
+const TEST_ORG = { id: "org_test_123", slug: "webhook-test-org", name: "Webhook Test Org" };
+const TEST_PROJECT = {
+ id: "proj_test_456",
+ externalRef: "proj_webhook_test",
+ slug: "webhook-test-project",
+ name: "Webhook Test Project",
+};
+const DASHBOARD_URL = "https://cloud.trigger.dev/test";
+
+function createMockError(overrides: Partial = {}): ErrorData {
+ return {
+ fingerprint: "fp_test_default",
+ environmentId: "env_test_dev",
+ environmentName: "Development",
+ taskIdentifier: "process-payment",
+ errorType: "TypeError",
+ errorMessage: "Cannot read property 'id' of undefined",
+ sampleStackTrace: `TypeError: Cannot read property 'id' of undefined
+ at processPayment (src/tasks/payment.ts:42:15)
+ at Object.run (src/tasks/payment.ts:15:20)
+ at TaskExecutor.execute (node_modules/@trigger.dev/core/dist/index.js:234:18)`,
+ firstSeen: Date.now().toString(),
+ lastSeen: Date.now().toString(),
+ occurrenceCount: 42,
+ ...overrides,
+ };
+}
+
+function buildPayload(classification: "new_issue" | "regression" | "unignored", error: ErrorData) {
+ return generateErrorGroupWebhookPayload({
+ classification,
+ error,
+ organization: TEST_ORG,
+ project: TEST_PROJECT,
+ dashboardUrl: DASHBOARD_URL,
+ });
+}
+
+describe("Webhook Error Alert Payload", () => {
+ test("payload structure is valid and parseable", () => {
+ const payload = buildPayload("new_issue", createMockError());
+ const parsed = Webhook.parse(payload);
+
+ expect(parsed.type).toBe("alert.error");
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("new_issue");
+ expect(parsed.object.error.type).toBe("TypeError");
+ expect(parsed.object.organization.slug).toBe("webhook-test-org");
+ expect(parsed.object.project.ref).toBe("proj_webhook_test");
+ }
+ });
+
+ test("payload survives JSON round-trip", () => {
+ const error = createMockError();
+ const payload = buildPayload("regression", error);
+
+ const deserialized = JSON.parse(JSON.stringify(payload));
+ const parsed = Webhook.parse(deserialized);
+
+ expect(parsed.type).toBe("alert.error");
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe("regression");
+ expect(parsed.object.error.fingerprint).toBe(error.fingerprint);
+ }
+ });
+
+ test("all classifications are valid", () => {
+ const classifications = ["new_issue", "regression", "unignored"] as const;
+
+ for (const classification of classifications) {
+ const payload = buildPayload(classification, createMockError());
+ const parsed = Webhook.parse(payload);
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.classification).toBe(classification);
+ }
+ }
+ });
+
+ test("error details are preserved", () => {
+ const error = createMockError({
+ fingerprint: "fp_custom_123",
+ errorType: "CustomError",
+ errorMessage: "Custom error message",
+ sampleStackTrace: "CustomError: at line 42",
+ taskIdentifier: "my-custom-task",
+ occurrenceCount: 999,
+ });
+
+ const payload = buildPayload("new_issue", error);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.fingerprint).toBe("fp_custom_123");
+ expect(parsed.object.error.type).toBe("CustomError");
+ expect(parsed.object.error.message).toBe("Custom error message");
+ expect(parsed.object.error.stackTrace).toBe("CustomError: at line 42");
+ expect(parsed.object.error.taskIdentifier).toBe("my-custom-task");
+ expect(parsed.object.error.occurrenceCount).toBe(999);
+ }
+ });
+
+ test("empty stack trace becomes undefined", () => {
+ const error = createMockError({ sampleStackTrace: "" });
+ const payload = buildPayload("new_issue", error);
+ const parsed = Webhook.parse(payload);
+
+ if (parsed.type === "alert.error") {
+ expect(parsed.object.error.stackTrace).toBeUndefined();
+ }
+ });
+});
diff --git a/internal-packages/clickhouse/src/errors.ts b/internal-packages/clickhouse/src/errors.ts
index c93efbcaf1f..4b13ce18c80 100644
--- a/internal-packages/clickhouse/src/errors.ts
+++ b/internal-packages/clickhouse/src/errors.ts
@@ -94,8 +94,8 @@ export function getErrorGroups(ch: ClickhouseReader, settings?: ClickHouseSettin
AND project_id = {projectId: String}
AND environment_id = {environmentId: String}
GROUP BY error_fingerprint, task_identifier
- HAVING max(last_seen) >= now() - INTERVAL {days: Int64} DAY
- ORDER BY last_seen DESC
+ HAVING toInt64(last_seen) >= toInt64(toUnixTimestamp(now() - INTERVAL {days: Int64} DAY)) * 1000
+ ORDER BY toInt64(last_seen) DESC
LIMIT {limit: Int64}
OFFSET {offset: Int64}
`,
@@ -314,3 +314,148 @@ export function createErrorOccurrencesQueryBuilder(
settings
);
}
+
+export const ErrorOccurrencesByVersionQueryResult = z.object({
+ error_fingerprint: z.string(),
+ task_version: z.string(),
+ bucket_epoch: z.number(),
+ count: z.number(),
+});
+
+export type ErrorOccurrencesByVersionQueryResult = z.infer<
+ typeof ErrorOccurrencesByVersionQueryResult
+>;
+
+/**
+ * Creates a query builder for bucketed error occurrence counts grouped by task_version.
+ * Used for stacked-by-version activity charts on the error detail page.
+ */
+export function createErrorOccurrencesByVersionQueryBuilder(
+ ch: ClickhouseReader,
+ intervalExpr: string,
+ settings?: ClickHouseSettings
+): ClickhouseQueryBuilder {
+ return new ClickhouseQueryBuilder(
+ "getErrorOccurrencesByVersion",
+ `
+ SELECT
+ error_fingerprint,
+ task_version,
+ toUnixTimestamp(toStartOfInterval(minute, ${intervalExpr})) as bucket_epoch,
+ sum(count) as count
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ ch,
+ ErrorOccurrencesByVersionQueryResult,
+ settings
+ );
+}
+
+// ---------------------------------------------------------------------------
+// Alert evaluator – active errors since a timestamp
+// ---------------------------------------------------------------------------
+
+export const ActiveErrorsSinceQueryResult = z.object({
+ environment_id: z.string(),
+ task_identifier: z.string(),
+ error_fingerprint: z.string(),
+ error_type: z.string(),
+ error_message: z.string(),
+ sample_stack_trace: z.string(),
+ first_seen: z.string(),
+ last_seen: z.string(),
+ occurrence_count: z.number(),
+});
+
+export type ActiveErrorsSinceQueryResult = z.infer;
+
+/**
+ * Query builder for fetching all errors active since a given timestamp.
+ * Returns errors with last_seen > scheduledAt, grouped by env/task/fingerprint.
+ * Used by the error alert evaluator to find new issues, regressions, and un-ignored errors.
+ */
+export function getActiveErrorsSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getActiveErrorsSince",
+ baseQuery: `
+ SELECT
+ environment_id,
+ task_identifier,
+ error_fingerprint,
+ any(error_type) as error_type,
+ any(error_message) as error_message,
+ any(sample_stack_trace) as sample_stack_trace,
+ toString(toUnixTimestamp64Milli(min(first_seen))) as first_seen,
+ toString(toUnixTimestamp64Milli(max(last_seen))) as last_seen,
+ toUInt64(sumMerge(occurrence_count)) as occurrence_count
+ FROM trigger_dev.errors_v1
+ `,
+ schema: ActiveErrorsSinceQueryResult,
+ settings,
+ });
+}
+
+export const OccurrenceCountsSinceQueryResult = z.object({
+ environment_id: z.string(),
+ task_identifier: z.string(),
+ error_fingerprint: z.string(),
+ occurrences_since: z.number(),
+});
+
+export type OccurrenceCountsSinceQueryResult = z.infer;
+
+/**
+ * Query builder for occurrence counts since a given timestamp, grouped by error.
+ * Used by the alert evaluator to check ignore thresholds.
+ */
+export function getOccurrenceCountsSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getOccurrenceCountsSince",
+ baseQuery: `
+ SELECT
+ environment_id,
+ task_identifier,
+ error_fingerprint,
+ sum(count) as occurrences_since
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ schema: OccurrenceCountsSinceQueryResult,
+ settings,
+ });
+}
+
+// ---------------------------------------------------------------------------
+// Alert evaluator helpers – occurrence rate & count since timestamp
+// ---------------------------------------------------------------------------
+
+export const ErrorOccurrenceTotalCountResult = z.object({
+ total_count: z.number(),
+});
+
+export type ErrorOccurrenceTotalCountResult = z.infer;
+
+/**
+ * Query builder for summing occurrences since a given timestamp.
+ * Used by the alert evaluator to check total-count-based ignore thresholds.
+ */
+export function getOccurrenceCountSinceQueryBuilder(
+ ch: ClickhouseReader,
+ settings?: ClickHouseSettings
+) {
+ return ch.queryBuilder({
+ name: "getOccurrenceCountSince",
+ baseQuery: `
+ SELECT
+ sum(count) as total_count
+ FROM trigger_dev.error_occurrences_v1
+ `,
+ schema: ErrorOccurrenceTotalCountResult,
+ settings,
+ });
+}
diff --git a/internal-packages/clickhouse/src/index.ts b/internal-packages/clickhouse/src/index.ts
index 99d22a5a18e..c6b8858fa9c 100644
--- a/internal-packages/clickhouse/src/index.ts
+++ b/internal-packages/clickhouse/src/index.ts
@@ -40,7 +40,11 @@ import {
getErrorHourlyOccurrences,
getErrorOccurrencesListQueryBuilder,
createErrorOccurrencesQueryBuilder,
+ createErrorOccurrencesByVersionQueryBuilder,
getErrorAffectedVersionsQueryBuilder,
+ getOccurrenceCountSinceQueryBuilder,
+ getActiveErrorsSinceQueryBuilder,
+ getOccurrenceCountsSinceQueryBuilder,
} from "./errors.js";
export { msToClickHouseInterval } from "./intervals.js";
import { Logger, type LogLevel } from "@trigger.dev/core/logger";
@@ -273,6 +277,11 @@ export class ClickHouse {
occurrencesListQueryBuilder: getErrorOccurrencesListQueryBuilder(this.reader),
createOccurrencesQueryBuilder: (intervalExpr: string) =>
createErrorOccurrencesQueryBuilder(this.reader, intervalExpr),
+ createOccurrencesByVersionQueryBuilder: (intervalExpr: string) =>
+ createErrorOccurrencesByVersionQueryBuilder(this.reader, intervalExpr),
+ occurrenceCountSinceQueryBuilder: getOccurrenceCountSinceQueryBuilder(this.reader),
+ activeErrorsSinceQueryBuilder: getActiveErrorsSinceQueryBuilder(this.reader),
+ occurrenceCountsSinceQueryBuilder: getOccurrenceCountsSinceQueryBuilder(this.reader),
};
}
}
diff --git a/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql b/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql
new file mode 100644
index 00000000000..0510505b6ae
--- /dev/null
+++ b/internal-packages/database/prisma/migrations/20260306102053_error_group_state/migration.sql
@@ -0,0 +1,53 @@
+-- CreateEnum
+CREATE TYPE "public"."ErrorGroupStatus" AS ENUM ('UNRESOLVED', 'RESOLVED', 'IGNORED');
+
+-- AlterEnum
+ALTER TYPE "public"."ProjectAlertType" ADD VALUE IF NOT EXISTS 'ERROR_GROUP';
+
+-- CreateTable
+CREATE TABLE
+ "public"."ErrorGroupState" (
+ "id" TEXT NOT NULL,
+ "organizationId" TEXT NOT NULL,
+ "projectId" TEXT NOT NULL,
+ "environmentId" TEXT,
+ "taskIdentifier" TEXT NOT NULL,
+ "errorFingerprint" TEXT NOT NULL,
+ "status" "public"."ErrorGroupStatus" NOT NULL DEFAULT 'UNRESOLVED',
+ "ignoredUntil" TIMESTAMP(3),
+ "ignoredUntilOccurrenceRate" INTEGER,
+ "ignoredUntilTotalOccurrences" INTEGER,
+ "ignoredAtOccurrenceCount" BIGINT,
+ "ignoredAt" TIMESTAMP(3),
+ "ignoredReason" TEXT,
+ "ignoredByUserId" TEXT,
+ "resolvedAt" TIMESTAMP(3),
+ "resolvedInVersion" TEXT,
+ "resolvedBy" TEXT,
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "updatedAt" TIMESTAMP(3) NOT NULL,
+ CONSTRAINT "ErrorGroupState_pkey" PRIMARY KEY ("id")
+ );
+
+-- CreateIndex
+CREATE UNIQUE INDEX "ErrorGroupState_environmentId_taskIdentifier_errorFingerpri_key" ON "public"."ErrorGroupState" (
+ "environmentId",
+ "taskIdentifier",
+ "errorFingerprint"
+);
+
+-- CreateIndex
+CREATE INDEX "ErrorGroupState_environmentId_status_idx" ON "public"."ErrorGroupState" ("environmentId", "status");
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_organizationId_fkey" FOREIGN KEY ("organizationId") REFERENCES "public"."Organization" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "public"."Project" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AddForeignKey
+ALTER TABLE "public"."ErrorGroupState" ADD CONSTRAINT "ErrorGroupState_environmentId_fkey" FOREIGN KEY ("environmentId") REFERENCES "public"."RuntimeEnvironment" ("id") ON DELETE CASCADE ON UPDATE CASCADE;
+
+-- AlterTable
+ALTER TABLE "public"."ProjectAlertChannel"
+ADD COLUMN "errorAlertConfig" JSONB;
\ No newline at end of file
diff --git a/internal-packages/database/prisma/schema.prisma b/internal-packages/database/prisma/schema.prisma
index a0ff9aee690..1de0aaf1ddf 100644
--- a/internal-packages/database/prisma/schema.prisma
+++ b/internal-packages/database/prisma/schema.prisma
@@ -61,11 +61,10 @@ model User {
backupCodes MfaBackupCode[]
bulkActions BulkActionGroup[]
- impersonationsPerformed ImpersonationAuditLog[] @relation("ImpersonationAdmin")
- impersonationsReceived ImpersonationAuditLog[] @relation("ImpersonationTarget")
- customerQueries CustomerQuery[]
- metricsDashboards MetricsDashboard[]
-
+ impersonationsPerformed ImpersonationAuditLog[] @relation("ImpersonationAdmin")
+ impersonationsReceived ImpersonationAuditLog[] @relation("ImpersonationTarget")
+ customerQueries CustomerQuery[]
+ metricsDashboards MetricsDashboard[]
platformNotifications PlatformNotification[]
platformNotificationInteractions PlatformNotificationInteraction[]
}
@@ -233,7 +232,8 @@ model Organization {
metricsDashboards MetricsDashboard[]
prompts Prompt[]
- platformNotifications PlatformNotification[]
+ platformNotifications PlatformNotification[]
+ errorGroupStates ErrorGroupState[]
}
model OrgMember {
@@ -353,6 +353,7 @@ model RuntimeEnvironment {
BulkActionGroup BulkActionGroup[]
customerQueries CustomerQuery[]
prompts Prompt[]
+ errorGroupStates ErrorGroupState[]
@@unique([projectId, slug, orgMemberId])
@@unique([projectId, shortcode])
@@ -426,8 +427,8 @@ model Project {
metricsDashboards MetricsDashboard[]
llmModels LlmModel[]
prompts Prompt[]
-
platformNotifications PlatformNotification[]
+ errorGroupStates ErrorGroupState[]
}
enum ProjectVersion {
@@ -2116,6 +2117,8 @@ model ProjectAlertChannel {
alertTypes ProjectAlertType[]
environmentTypes RuntimeEnvironmentType[] @default([STAGING, PRODUCTION])
+ errorAlertConfig Json?
+
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade)
projectId String
@@ -2170,6 +2173,7 @@ enum ProjectAlertType {
TASK_RUN_ATTEMPT
DEPLOYMENT_FAILURE
DEPLOYMENT_SUCCESS
+ ERROR_GROUP
}
enum ProjectAlertStatus {
@@ -2839,3 +2843,83 @@ model PlatformNotificationInteraction {
@@unique([notificationId, userId])
}
+
+enum ErrorGroupStatus {
+ UNRESOLVED
+ RESOLVED
+ IGNORED
+}
+
+/**
+ * Error group state is used to track when a user has interacted with an error (ignored/resolved)
+ * The actual error data is in ClickHouse.
+ */
+model ErrorGroupState {
+ id String @id @default(cuid())
+
+ organization Organization @relation(fields: [organizationId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ organizationId String
+
+ project Project @relation(fields: [projectId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ projectId String
+
+ /**
+ * You can ignore/resolve an error across all environments, or specific ones
+ */
+ environment RuntimeEnvironment? @relation(fields: [environmentId], references: [id], onDelete: Cascade, onUpdate: Cascade)
+ environmentId String?
+
+ taskIdentifier String
+ errorFingerprint String
+
+ status ErrorGroupStatus @default(UNRESOLVED)
+
+ /**
+ * Error is ignored until this date
+ */
+ ignoredUntil DateTime?
+ /**
+ * Error is ignored until this occurrence rate
+ */
+ ignoredUntilOccurrenceRate Int?
+ /**
+ * Error is ignored until this total occurrences
+ */
+ ignoredUntilTotalOccurrences Int?
+
+ /// Total occurrence count at the time the error was ignored (from ClickHouse).
+ /// Used with ignoredUntilTotalOccurrences to compute occurrences since ignoring.
+ ignoredAtOccurrenceCount BigInt?
+
+ /**
+ * Error was ignored at this date
+ */
+ ignoredAt DateTime?
+ /**
+ * Reason for ignoring the error
+ */
+ ignoredReason String?
+ /**
+ * User who ignored the error
+ */
+ ignoredByUserId String?
+
+ /**
+ * Error was resolved at this date
+ */
+ resolvedAt DateTime?
+ /**
+ * Error was resolved in this version
+ */
+ resolvedInVersion String?
+ /**
+ * User who resolved the error
+ */
+ resolvedBy String?
+
+ createdAt DateTime @default(now())
+ updatedAt DateTime @updatedAt
+
+ @@unique([environmentId, taskIdentifier, errorFingerprint])
+ @@index([environmentId, status])
+}
diff --git a/internal-packages/emails/emails/alert-error-group.tsx b/internal-packages/emails/emails/alert-error-group.tsx
new file mode 100644
index 00000000000..f584f06edba
--- /dev/null
+++ b/internal-packages/emails/emails/alert-error-group.tsx
@@ -0,0 +1,114 @@
+import {
+ Body,
+ CodeBlock,
+ Container,
+ Head,
+ Html,
+ Link,
+ Preview,
+ Text,
+ dracula,
+} from "@react-email/components";
+import { z } from "zod";
+import { Footer } from "./components/Footer";
+import { Image } from "./components/Image";
+import { anchor, container, h1, main, paragraphLight, paragraphTight } from "./components/styles";
+import React from "react";
+
+export const AlertErrorGroupEmailSchema = z.object({
+ email: z.literal("alert-error-group"),
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ taskIdentifier: z.string(),
+ environment: z.string(),
+ error: z.object({
+ message: z.string(),
+ type: z.string().optional(),
+ stackTrace: z.string().optional(),
+ }),
+ occurrenceCount: z.number(),
+ errorLink: z.string().url(),
+ organization: z.string(),
+ project: z.string(),
+});
+
+type AlertErrorGroupEmailProps = z.infer;
+
+const classificationLabels: Record = {
+ new_issue: "New error",
+ regression: "Regression",
+ unignored: "Error resurfaced",
+};
+
+const previewDefaults: AlertErrorGroupEmailProps = {
+ email: "alert-error-group",
+ classification: "new_issue",
+ taskIdentifier: "my-task",
+ environment: "Production",
+ error: {
+ message: "Cannot read property 'foo' of undefined",
+ type: "TypeError",
+ stackTrace: "TypeError: Cannot read property 'foo' of undefined\n at Object.",
+ },
+ occurrenceCount: 42,
+ errorLink: "https://trigger.dev",
+ organization: "my-organization",
+ project: "my-project",
+};
+
+export default function Email(props: AlertErrorGroupEmailProps) {
+ const {
+ classification,
+ taskIdentifier,
+ environment,
+ error,
+ occurrenceCount,
+ errorLink,
+ organization,
+ project,
+ } = {
+ ...previewDefaults,
+ ...props,
+ };
+
+ const label = classificationLabels[classification] ?? "Error alert";
+
+ return (
+
+
+
+ {`${organization}: [${label}] ${error.type ?? "Error"} in ${taskIdentifier} (${environment})`}
+
+
+
+
+ {label}: {error.type ?? "Error"} in {taskIdentifier}
+
+ Organization: {organization}
+ Project: {project}
+ Task: {taskIdentifier}
+ Environment: {environment}
+ Occurrences: {occurrenceCount}
+
+ {error.message}
+ {error.stackTrace && (
+
+ )}
+
+ Investigate this error
+
+
+
+
+
+
+
+ );
+}
diff --git a/internal-packages/emails/src/index.tsx b/internal-packages/emails/src/index.tsx
index e43e60f3f4c..a1bd00d03cf 100644
--- a/internal-packages/emails/src/index.tsx
+++ b/internal-packages/emails/src/index.tsx
@@ -2,6 +2,9 @@ import { ReactElement } from "react";
import { z } from "zod";
import AlertAttemptFailureEmail, { AlertAttemptEmailSchema } from "../emails/alert-attempt-failure";
+import AlertErrorGroupEmail, {
+ AlertErrorGroupEmailSchema,
+} from "../emails/alert-error-group";
import AlertRunFailureEmail, { AlertRunEmailSchema } from "../emails/alert-run-failure";
import { setGlobalBasePath } from "../emails/components/BasePath";
import AlertDeploymentFailureEmail, {
@@ -31,6 +34,7 @@ export const DeliverEmailSchema = z
InviteEmailSchema,
AlertRunEmailSchema,
AlertAttemptEmailSchema,
+ AlertErrorGroupEmailSchema,
AlertDeploymentFailureEmailSchema,
AlertDeploymentSuccessEmailSchema,
MfaEnabledEmailSchema,
@@ -114,6 +118,18 @@ export class EmailClient {
component: ,
};
}
+ case "alert-error-group": {
+ const classLabel =
+ data.classification === "new_issue"
+ ? "New error"
+ : data.classification === "regression"
+ ? "Regression"
+ : "Error resurfaced";
+ return {
+ subject: `[${data.organization}] ${classLabel}: ${data.error.type ?? "Error"} in ${data.taskIdentifier} [${data.environment}]`,
+ component: ,
+ };
+ }
case "alert-deployment-failure": {
return {
subject: `[${data.organization}] Deployment ${data.version} [${data.environment}] failed: ${data.error.name}`,
diff --git a/packages/core/src/v3/schemas/webhooks.ts b/packages/core/src/v3/schemas/webhooks.ts
index 047ea98c4b3..b5ed927602e 100644
--- a/packages/core/src/v3/schemas/webhooks.ts
+++ b/packages/core/src/v3/schemas/webhooks.ts
@@ -190,6 +190,62 @@ export type AlertWebhookDeploymentSuccessObject = z.infer<
>;
export type AlertWebhookDeploymentFailedObject = z.infer;
+/** Represents an error group alert webhook payload */
+export const AlertWebhookErrorGroupObject = z.object({
+ /** Classification of the error alert */
+ classification: z.enum(["new_issue", "regression", "unignored"]),
+ /** Error information */
+ error: z.object({
+ /** Error fingerprint identifier */
+ fingerprint: z.string(),
+ /** Error type */
+ type: z.string(),
+ /** Error message */
+ message: z.string(),
+ /** Sample stack trace */
+ stackTrace: z.string().optional(),
+ /** When the error was first seen */
+ firstSeen: z.coerce.date(),
+ /** When the error was last seen */
+ lastSeen: z.coerce.date(),
+ /** Number of occurrences */
+ occurrenceCount: z.number(),
+ /** Task identifier where the error occurred */
+ taskIdentifier: z.string(),
+ }),
+ /** Environment information */
+ environment: z.object({
+ /** Environment ID */
+ id: z.string(),
+ /** Environment name */
+ name: z.string(),
+ }),
+ /** Organization information */
+ organization: z.object({
+ /** Organization ID */
+ id: z.string(),
+ /** Organization slug */
+ slug: z.string(),
+ /** Organization name */
+ name: z.string(),
+ }),
+ /** Project information */
+ project: z.object({
+ /** Project ID */
+ id: z.string(),
+ /** Project reference */
+ ref: z.string(),
+ /** Project slug */
+ slug: z.string(),
+ /** Project name */
+ name: z.string(),
+ }),
+ /** URL to view the error in the dashboard */
+ dashboardUrl: z.string(),
+});
+
+export type AlertWebhookErrorGroupObject = z.infer;
+
/** Common properties for all webhooks */
const commonProperties = {
/** Webhook ID */
@@ -220,9 +276,16 @@ export const Webhook = z.discriminatedUnion("type", [
type: z.literal("alert.deployment.failed"),
object: AlertWebhookDeploymentFailedObject,
}),
+ /** Error group alert webhook */
+ z.object({
+ ...commonProperties,
+ type: z.literal("alert.error"),
+ object: AlertWebhookErrorGroupObject,
+ }),
]);
export type Webhook = z.infer;
export type RunFailedWebhook = Extract;
export type DeploymentSuccessWebhook = Extract;
export type DeploymentFailedWebhook = Extract;
+export type ErrorWebhook = Extract;