visitedRunIds) throws IOException {
+ String thisJobName = run.getParent().getFullName();
+ int thisBuildNumber = run.getNumber();
+
+ for (hudson.model.Job, ?> job : Jenkins.get().getAllItems(hudson.model.Job.class)) {
+ // Skip the current job itself
+ if (job.getFullName().equals(thisJobName)) {
+ continue;
+ }
+ Run, ?> lastBuild = job.getLastBuild();
+ if (lastBuild == null) {
+ continue;
+ }
+ // Walk recent builds of this job to find ones triggered by our run
+ for (Run, ?> candidate = lastBuild; candidate != null; candidate = candidate.getPreviousBuild()) {
+ // Only look at builds that could have been triggered by our run
+ if (candidate.getTimeInMillis() < run.getTimeInMillis()) {
+ break;
+ }
+ for (Cause cause : candidate.getCauses()) {
+ if (cause instanceof Cause.UpstreamCause) {
+ Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause;
+ if (upstreamCause.getUpstreamProject().equals(thisJobName)
+ && upstreamCause.getUpstreamBuild() == thisBuildNumber) {
+ appendDownstreamRunLog(candidate, accumulated, visitedRunIds);
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Returns {@code true} if the given run was aborted because a sibling branch triggered
+ * a fail-fast interruption (e.g. via {@code parallelsAlwaysFailFast()} or
+ * {@code parallel(failFast: true, ...)}).
+ *
+ * Jenkins records the interruption cause in an {@link InterruptedBuildAction} attached to
+ * the run. When the cause is a fail-fast signal, its
+ * {@link CauseOfInterruption#getShortDescription()} contains the phrase "fail fast"
+ * (case-insensitive). This distinguishes a sibling-aborted run from a run that was
+ * independently aborted by a user or another mechanism.
+ *
+ * @param run the build to inspect
+ * @return {@code true} if the build was interrupted by a fail-fast signal
+ */
+ boolean isAbortedByFailFast(Run, ?> run) {
+ if (run.getResult() != Result.ABORTED) {
+ return false;
+ }
+ for (InterruptedBuildAction action : run.getActions(InterruptedBuildAction.class)) {
+ for (CauseOfInterruption cause : action.getCauses()) {
+ String desc = cause.getShortDescription();
+ if (desc != null && desc.toLowerCase(java.util.Locale.ROOT).contains("fail fast")) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Appends the error content of a single downstream run to {@code accumulated},
+ * then recurses into its own downstream builds.
+ *
+ * Fast path — reuse existing AI explanation: if the downstream run already has an
+ * {@link ErrorExplanationAction} (i.e. the sub-job called {@code explainError()} itself),
+ * its pre-computed explanation text is used directly. This avoids a redundant AI call and
+ * preserves the full context that was available when the sub-job ran.
+ *
+ * Slow path — extract raw logs: when no {@link ErrorExplanationAction} is present,
+ * a {@link PipelineLogExtractor} is created for the downstream run and its log lines are
+ * appended as before.
+ *
+ * Builds that were aborted by a fail-fast signal from a sibling branch are labelled
+ * {@code ABORTED (interrupted by fail-fast, not the root cause)} in the section header
+ * so that the AI can distinguish them from the build that actually caused the failure.
+ *
+ * @param downstreamRun the downstream build to extract content from
+ * @param accumulated the list to append content lines into
+ * @param visitedRunIds set of already-visited run IDs to prevent duplicates
+ */
+ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumulated,
+ Set visitedRunIds) throws IOException {
+ if (downstreamRun.getResult() == null || !downstreamRun.getResult().isWorseThan(Result.SUCCESS)) {
+ return;
+ }
+ String runId = downstreamRun.getParent().getFullName() + "#" + downstreamRun.getNumber();
+ if (!visitedRunIds.add(runId)) {
+ return; // already processed
+ }
+ int remaining = this.maxLines - accumulated.size();
+ if (remaining <= 0) {
+ return;
+ }
+
+ boolean failFastAborted = isAbortedByFailFast(downstreamRun);
+ String resultLabel = failFastAborted
+ ? "ABORTED (interrupted by fail-fast, not the root cause)"
+ : String.valueOf(downstreamRun.getResult());
+
+ List header = Arrays.asList(
+ "### Downstream Job: " + downstreamRun.getParent().getFullName()
+ + " #" + downstreamRun.getNumber() + " ###",
+ "Result: " + resultLabel,
+ "--- LOG CONTENT ---"
+ );
+
+ String runUrl = run.getUrl();
+
+ // Fast path: sub-job already has an AI explanation — reuse it directly.
+ ErrorExplanationAction existingExplanation = downstreamRun.getAction(ErrorExplanationAction.class);
+ if (existingExplanation != null && existingExplanation.hasValidExplanation()) {
+ // Redirect "View failure output" to the sub-job's own explanation URL when available.
+ if (!failFastAborted && existingExplanation.getUrlString() != null && this.url != null
+ && runUrl != null && this.url.contains(runUrl)) {
+ this.url = existingExplanation.getUrlString();
+ }
+ accumulated.addAll(header);
+ accumulated.add("[AI explanation from sub-job]");
+ accumulated.addAll(Arrays.asList(existingExplanation.getExplanation().split("\n", -1)));
+ accumulated.add("### END OF DOWNSTREAM JOB: " + downstreamRun.getParent().getFullName() + " ###");
+ // No need to recurse further — the sub-job's explanation already covers its own
+ // downstream failures (it was produced with full context at the time of the failure).
+ return;
+ }
+
+ // Slow path: no existing explanation — extract raw logs as before.
+ PipelineLogExtractor subExtractor = new PipelineLogExtractor(downstreamRun, remaining, downstreamDepth + 1);
+ List subLog = subExtractor.getFailedStepLog();
+ if (subLog == null || subLog.isEmpty()) {
+ return;
+ }
+
+ // If this sub-job genuinely failed (not just aborted by fail-fast) and the parent
+ // URL still points to the parent job (i.e. no prior real sub-job failure has already
+ // claimed the URL), redirect "View failure output" to the sub-job's failing node.
+ if (!failFastAborted && subExtractor.getUrl() != null && this.url != null
+ && runUrl != null && this.url.contains(runUrl)) {
+ this.url = subExtractor.getUrl();
+ }
+
+ accumulated.addAll(header);
+ accumulated.addAll(subLog);
+ accumulated.add("### END OF DOWNSTREAM JOB: " + downstreamRun.getParent().getFullName() + " ###");
+
+ // Recurse into sub-job's own downstream builds
+ subExtractor.collectDownstreamLogs(accumulated, visitedRunIds);
+ }
}
diff --git a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
index e2a2e451..700a5b3c 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
@@ -119,18 +119,51 @@ public interface Assistant {
You MUST follow ALL instructions provided by the user, including any additional context or requirements.
When additional instructions are provided, you MUST incorporate them into your analysis fields,
especially in errorSummary and resolutionSteps.
+
+ The error logs may contain sections from downstream (sub-job) builds, clearly delimited like this:
+ ### Downstream Job: # ###
+ Result:
+ --- LOG CONTENT ---
+ ... (sub-job log lines, OR an "[AI explanation from sub-job]" block) ...
+ ### END OF DOWNSTREAM JOB: ###
+
+ The "Result:" line uses one of these values:
+ - "FAILURE" — this sub-job genuinely failed and is the ROOT CAUSE of the overall failure.
+ - "ABORTED (interrupted by fail-fast, not the root cause)" — this sub-job was still running
+ when a sibling branch failed; it was aborted automatically by parallelsAlwaysFailFast() or
+ parallel(failFast:true). It is NOT the root cause. Do NOT treat its logs as the primary error.
+
+ The log content of a downstream section may be either:
+ - Raw log lines from the sub-job's failing step, OR
+ - An "[AI explanation from sub-job]" block: a pre-computed AI analysis produced by the
+ sub-job itself when it called explainError(). Treat this block as a high-quality,
+ already-analysed summary of the sub-job's failure — do NOT re-analyse it from scratch.
+ Instead, incorporate its key findings (root cause, resolution steps) into your own
+ errorSummary and resolutionSteps for the parent job.
+
+ When downstream sections are present:
+ - Identify WHICH sub-job(s) have Result: FAILURE — those are the root cause(s).
+ - State their full name and build number explicitly in errorSummary.
+ - Focus root-cause analysis and resolutionSteps on the FAILURE sections only.
+ - Mention aborted sub-jobs briefly (e.g. "Job X was aborted due to fail-fast") but do NOT
+ treat their logs as the source of the error.
+ - If multiple sub-jobs have Result: FAILURE, summarize each one separately.
+ - Logs outside downstream sections belong to the parent (upstream) job.
""")
@UserMessage("""
Analyze the following Jenkins build error logs and provide a clear, actionable explanation.
-
+
CRITICAL: You MUST respond ONLY in {{language}}. ALL text in your response must be in {{language}}.
This includes: error summaries, resolution steps, best practices, and any other text.
{{customContext}}
-
+
ERROR LOGS:
{{errorLogs}}
-
+
Remember: Your ENTIRE response must be in {{language}}, including all field values.
+ If the logs contain "### Downstream Job: ..." sections:
+ - Sub-jobs with Result: FAILURE are the ROOT CAUSE — identify them by name in errorSummary.
+ - Sub-jobs with Result: ABORTED (interrupted by fail-fast, not the root cause) were killed by a sibling failure — do NOT treat them as the error source.
If additional instructions were provided above, you MUST address them in your errorSummary or resolutionSteps.
""")
JenkinsLogAnalysis analyzeLogs(@V("errorLogs") String errorLogs, @V("language") String language, @V("customContext") String customContext);
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index 8b0174a0..8209bcb3 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -15,13 +15,20 @@
import hudson.console.AnnotatedLargeText;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
+import hudson.model.Result;
+import hudson.model.Run;
import io.jenkins.plugins.explain_error.provider.TestProvider;
import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import jenkins.model.CauseOfInterruption;
+import jenkins.model.InterruptedBuildAction;
import org.jenkinsci.plugins.workflow.actions.LogAction;
+import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition;
import org.jenkinsci.plugins.workflow.flow.FlowExecution;
import org.jenkinsci.plugins.workflow.graph.FlowNode;
-import java.util.List;
-import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition;
import org.jenkinsci.plugins.workflow.job.WorkflowJob;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
import org.junit.jupiter.api.Test;
@@ -57,6 +64,12 @@ void testNullFlowExecutionFallsBackToBuildLog(JenkinsRule jenkins) throws Except
when(mockRun.getLog(100)).thenReturn(List.of("Build started", "ERROR: Something failed"));
when(mockRun.getLogInputStream()).thenReturn(InputStream.nullInputStream());
when(mockRun.getUrl()).thenReturn("job/test/1/");
+ // Stub getParent()/getNumber() so collectDownstreamLogs() can build the visitedRunIds
+ // key and skip the current job when scanning Jenkins.getAllItems().
+ WorkflowJob mockJob = mock(WorkflowJob.class);
+ when(mockJob.getFullName()).thenReturn("test/mock-job");
+ when(mockRun.getParent()).thenReturn(mockJob);
+ when(mockRun.getNumber()).thenReturn(1);
PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100);
@@ -403,4 +416,294 @@ void strategy1_originWithoutLogAction_logNullBranchCovered(JenkinsRule jenkins)
assertNotNull(lines);
assertFalse(lines.isEmpty(), "Strategy 3 should find the echo output from the console log");
}
+
+ // -------------------------------------------------------------------------
+ // isAbortedByFailFast unit tests
+ // -------------------------------------------------------------------------
+
+ /**
+ * isAbortedByFailFast — non-ABORTED result returns false immediately.
+ * A FAILURE build should never be considered a fail-fast abort regardless of
+ * any InterruptedBuildAction that might be attached.
+ */
+ @Test
+ void isAbortedByFailFast_nonAbortedResult_returnsFalse(JenkinsRule jenkins) {
+ @SuppressWarnings("unchecked")
+ Run, ?> mockRun = mock(Run.class);
+ when(mockRun.getResult()).thenReturn(Result.FAILURE);
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100);
+ assertFalse(extractor.isAbortedByFailFast(mockRun),
+ "A FAILURE build must not be treated as a fail-fast abort");
+ }
+
+ /**
+ * isAbortedByFailFast — ABORTED build with a cause whose description contains
+ * "fail fast" (case-insensitive) returns true.
+ */
+ @Test
+ void isAbortedByFailFast_abortedWithFailFastCause_returnsTrue(JenkinsRule jenkins) {
+ @SuppressWarnings("unchecked")
+ Run, ?> mockRun = mock(Run.class);
+ when(mockRun.getResult()).thenReturn(Result.ABORTED);
+
+ CauseOfInterruption failFastCause = mock(CauseOfInterruption.class);
+ when(failFastCause.getShortDescription()).thenReturn("Fail Fast: sibling branch failed");
+
+ InterruptedBuildAction action = new InterruptedBuildAction(List.of(failFastCause));
+ when(mockRun.getActions(InterruptedBuildAction.class)).thenReturn(List.of(action));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100);
+ assertTrue(extractor.isAbortedByFailFast(mockRun),
+ "An ABORTED build with a 'fail fast' cause description must return true");
+ }
+
+ /**
+ * isAbortedByFailFast — ABORTED build whose InterruptedBuildAction cause description
+ * does NOT contain "fail fast" returns false (e.g. a manual user abort).
+ */
+ @Test
+ void isAbortedByFailFast_abortedWithNonFailFastCause_returnsFalse(JenkinsRule jenkins) {
+ @SuppressWarnings("unchecked")
+ Run, ?> mockRun = mock(Run.class);
+ when(mockRun.getResult()).thenReturn(Result.ABORTED);
+
+ CauseOfInterruption userCause = mock(CauseOfInterruption.class);
+ when(userCause.getShortDescription()).thenReturn("Aborted by user admin");
+
+ InterruptedBuildAction action = new InterruptedBuildAction(List.of(userCause));
+ when(mockRun.getActions(InterruptedBuildAction.class)).thenReturn(List.of(action));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100);
+ assertFalse(extractor.isAbortedByFailFast(mockRun),
+ "A user-aborted build must not be treated as a fail-fast abort");
+ }
+
+ // -------------------------------------------------------------------------
+ // Downstream sub-job integration tests
+ // -------------------------------------------------------------------------
+
+ /**
+ * Downstream sub-job FAILURE: when a parent pipeline triggers a sub-job via the
+ * {@code build} step and that sub-job fails, the parent's extracted log must contain
+ * a downstream section with {@code Result: FAILURE} and the sub-job's error output.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_subJobFailure_logIncludedWithFailureHeader(JenkinsRule jenkins) throws Exception {
+ // Create the sub-job that will fail
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-failure");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"SUB_JOB_ERROR_MARKER\" && exit 1' }", true));
+
+ // Create the parent pipeline that triggers the sub-job
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-triggers-failing-sub");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-job-failure', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ assertTrue(log.contains("### Downstream Job: sub-job-failure"),
+ "Log must contain a downstream section header.\nActual log:\n" + log);
+ assertTrue(log.contains("Result: FAILURE"),
+ "Downstream section must be labelled Result: FAILURE.\nActual log:\n" + log);
+ assertTrue(log.contains("SUB_JOB_ERROR_MARKER"),
+ "Downstream section must include the sub-job's error output.\nActual log:\n" + log);
+ }
+
+ /**
+ * Downstream sub-job SUCCESS: when the triggered sub-job succeeds, no downstream
+ * section should be appended to the parent's extracted log.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_subJobSuccess_logNotIncluded(JenkinsRule jenkins) throws Exception {
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-success");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { echo 'SUB_JOB_SUCCESS_MARKER' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-triggers-passing-sub");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-job-success'\n"
+ + "sh 'echo \"PARENT_FAILURE\" && exit 1'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ assertFalse(log.contains("### Downstream Job: sub-job-success"),
+ "Successful sub-job must not produce a downstream section.\nActual log:\n" + log);
+ }
+
+ // -------------------------------------------------------------------------
+ // collectDownstreamLogs unit tests (depth guard, deduplication)
+ // -------------------------------------------------------------------------
+
+ /**
+ * MAX_DOWNSTREAM_DEPTH guard: when a PipelineLogExtractor is constructed at depth 5
+ * (the maximum), {@code collectDownstreamLogs} must return immediately without
+ * appending anything to the accumulated list.
+ */
+ @Test
+ void collectDownstreamLogs_atMaxDepth_appendsNothing(JenkinsRule jenkins) throws Exception {
+ // Use a real (successful) FreeStyleBuild so Jenkins.get() is available
+ FreeStyleProject project = jenkins.createFreeStyleProject("depth-guard-project");
+ FreeStyleBuild build = jenkins.buildAndAssertSuccess(project);
+
+ // Access the package-private constructor via reflection to set depth = MAX (5)
+ java.lang.reflect.Constructor ctor =
+ PipelineLogExtractor.class.getDeclaredConstructor(Run.class, int.class, int.class);
+ ctor.setAccessible(true);
+ PipelineLogExtractor extractor = ctor.newInstance(build, 200, 5);
+
+ List accumulated = new ArrayList<>();
+ Set visited = new HashSet<>();
+ extractor.collectDownstreamLogs(accumulated, visited);
+
+ assertTrue(accumulated.isEmpty(),
+ "collectDownstreamLogs at MAX_DOWNSTREAM_DEPTH must not append any lines");
+ }
+
+ /**
+ * visitedRunIds deduplication: if a downstream run ID is already in the visited set,
+ * {@code collectDownstreamLogs} must not append its log a second time.
+ *
+ * Verified by pre-populating visitedRunIds with the sub-job's ID before the parent
+ * run's extraction, then asserting the downstream section does not appear.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void collectDownstreamLogs_alreadyVisitedRunId_notAppendedTwice(JenkinsRule jenkins) throws Exception {
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-dedup");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"DEDUP_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-dedup");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-dedup', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ // Find the sub-job run that was triggered
+ WorkflowRun subRun = subJob.getLastBuild();
+ assertNotNull(subRun, "Sub-job must have been triggered");
+
+ // Pre-populate visitedRunIds with the sub-job's ID so it is treated as already seen
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ // First call getFailedStepLog to initialise url etc., then call collectDownstreamLogs
+ // with a visited set that already contains the sub-job → no downstream section added
+ List accumulated = new ArrayList<>();
+ Set visited = new HashSet<>();
+ visited.add(parentRun.getParent().getFullName() + "#" + parentRun.getNumber());
+ visited.add(subRun.getParent().getFullName() + "#" + subRun.getNumber());
+
+ extractor.collectDownstreamLogs(accumulated, visited);
+
+ String log = String.join("\n", accumulated);
+ assertFalse(log.contains("DEDUP_MARKER"),
+ "Already-visited sub-job must not be appended again.\nActual log:\n" + log);
+ }
+
+ // -------------------------------------------------------------------------
+ // Fast-path: reuse ErrorExplanationAction from sub-job
+ // -------------------------------------------------------------------------
+
+ /**
+ * Fast path — sub-job has ErrorExplanationAction: when the downstream run already
+ * carries an {@link ErrorExplanationAction} (i.e. it called {@code explainError()}),
+ * the parent's extracted log must contain the pre-computed explanation text wrapped in
+ * the "[AI explanation from sub-job]" marker, and must NOT contain raw log lines from
+ * the sub-job (no redundant log extraction).
+ *
+ * Strategy: run the parent pipeline first (which triggers the sub-job), then attach an
+ * {@link ErrorExplanationAction} to the sub-job run that was actually triggered, and
+ * finally re-run {@link PipelineLogExtractor} on the parent run to verify the fast path.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_subJobHasExplanationAction_explanationReusedInsteadOfRawLog(JenkinsRule jenkins) throws Exception {
+ // Sub-job: fails but does NOT call explainError() yet
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-with-explanation");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"RAW_SUB_LOG_SHOULD_NOT_APPEAR\" && exit 1' }", true));
+
+ // Parent pipeline: triggers the sub-job
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-reuses-explanation");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-with-explanation', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ // Find the sub-job run that was triggered by the parent
+ WorkflowRun subRun = subJob.getLastBuild();
+ assertNotNull(subRun, "Sub-job must have been triggered");
+
+ // Simulate the sub-job having called explainError() by attaching an ErrorExplanationAction
+ subRun.addOrReplaceAction(new ErrorExplanationAction(
+ "SUB_JOB_AI_EXPLANATION: null pointer in Foo.bar()",
+ "http://localhost/job/sub-with-explanation/1/console",
+ "raw logs",
+ "Test"));
+ subRun.save();
+
+ // Now extract logs from the parent — the fast path should kick in
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ // The pre-computed explanation must appear
+ assertTrue(log.contains("[AI explanation from sub-job]"),
+ "Log must contain the fast-path marker.\nActual log:\n" + log);
+ assertTrue(log.contains("SUB_JOB_AI_EXPLANATION"),
+ "Log must contain the sub-job's explanation text.\nActual log:\n" + log);
+ // Raw log lines from the sub-job's sh step must NOT be extracted again
+ assertFalse(log.contains("RAW_SUB_LOG_SHOULD_NOT_APPEAR"),
+ "Raw sub-job log lines must not appear when explanation is reused.\nActual log:\n" + log);
+ }
+
+ /**
+ * Slow path — sub-job has no ErrorExplanationAction: when the downstream run has no
+ * {@link ErrorExplanationAction}, the parent falls back to raw log extraction and the
+ * sub-job's error output appears directly (no "[AI explanation from sub-job]" marker).
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_subJobHasNoExplanationAction_rawLogExtractedAsFallback(JenkinsRule jenkins) throws Exception {
+ // Sub-job: fails but does NOT call explainError() — no ErrorExplanationAction
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-no-explanation");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"RAW_FALLBACK_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-fallback-to-raw");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-no-explanation', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ // Raw log must be present
+ assertTrue(log.contains("RAW_FALLBACK_MARKER"),
+ "Slow path must extract raw sub-job log.\nActual log:\n" + log);
+ // Fast-path marker must NOT appear
+ assertFalse(log.contains("[AI explanation from sub-job]"),
+ "Fast-path marker must not appear when no explanation exists.\nActual log:\n" + log);
+ }
}
From 3536a1739e65a93dc9b54fbba67a8cb384b0929d Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 14:16:28 +0800
Subject: [PATCH 02/11] Add opt-in downstream log collection
- add explainError step parameters to enable downstream log collection and filter job full names with a regex
- skip UpstreamCause fallback when DownstreamBuildAction already finds matching downstream jobs
- keep scan guards for capacity, recursion depth, and per-job candidate limits
- document the new options in README and Jenkins step help files
- expand PipelineLogExtractor and step config tests for default-off and regex-filtered behavior
---
README.md | 14 +++
.../plugins/explain_error/ErrorExplainer.java | 16 ++-
.../explain_error/ExplainErrorStep.java | 27 ++++-
.../explain_error/PipelineLogExtractor.java | 83 ++++++++++++--
.../ExplainErrorStep/config.jelly | 10 ++
.../help-collectDownstreamLogs.html | 8 ++
.../help-downstreamJobPattern.html | 14 +++
.../ExplainErrorStepConfigTest.java | 3 +-
.../PipelineLogExtractorTest.java | 104 ++++++++++++++++--
9 files changed, 255 insertions(+), 24 deletions(-)
create mode 100644 src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html
create mode 100644 src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html
diff --git a/README.md b/README.md
index fcfa8427..bdcc8ce4 100644
--- a/README.md
+++ b/README.md
@@ -240,6 +240,8 @@ post {
| **logPattern** | Regex pattern to filter relevant log lines | `''` (no filtering) |
| **language** | Language for the explanation | `'English'` |
| **customContext** | Additional instructions or context for the AI. Overrides global custom context if specified. | Uses global configuration |
+| **collectDownstreamLogs** | Whether to include logs from failed downstream jobs discovered via the `build` step or `Cause.UpstreamCause` | `false` |
+| **downstreamJobPattern** | Regular expression matched against downstream job full names. Used only when downstream collection is enabled. | `''` (collect none) |
```groovy
explainError(
@@ -255,6 +257,18 @@ explainError(
)
```
+To include downstream failures, opt in explicitly and limit collection with a regex:
+
+```groovy
+explainError(
+ collectDownstreamLogs: true,
+ downstreamJobPattern: 'team-folder/.*/deploy-.*'
+)
+```
+
+This keeps the default behavior fast and predictable on large controllers. Only downstream jobs
+whose full name matches `downstreamJobPattern` are scanned and included in the AI analysis.
+
Output appears in the sidebar of the failed job.

diff --git a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
index 844612d7..3fbd54b7 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
@@ -30,14 +30,19 @@ public String getProviderName() {
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines) {
- return explainError(run, listener, logPattern, maxLines, null, null);
+ return explainError(run, listener, logPattern, maxLines, null, null, false, null);
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language) {
- return explainError(run, listener, logPattern, maxLines, language, null);
+ return explainError(run, listener, logPattern, maxLines, language, null, false, null);
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language, String customContext) {
+ return explainError(run, listener, logPattern, maxLines, language, customContext, false, null);
+ }
+
+ public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language,
+ String customContext, boolean collectDownstreamLogs, String downstreamJobPattern) {
String jobInfo = run != null ? ("[" + run.getParent().getFullName() + " #" + run.getNumber() + "]") : "[unknown]";
try {
// Check if explanation is enabled (folder-level or global)
@@ -54,7 +59,7 @@ public String explainError(Run, ?> run, TaskListener listener, String logPatte
}
// Extract error logs
- String errorLogs = extractErrorLogs(run, logPattern, maxLines);
+ String errorLogs = extractErrorLogs(run, logPattern, maxLines, collectDownstreamLogs, downstreamJobPattern);
// Use step-level customContext if provided, otherwise fallback to global
String effectiveCustomContext = StringUtils.isNotBlank(customContext) ? customContext : GlobalConfigurationImpl.get().getCustomContext();
@@ -83,8 +88,9 @@ public String explainError(Run, ?> run, TaskListener listener, String logPatte
}
}
- private String extractErrorLogs(Run, ?> run, String logPattern, int maxLines) throws IOException {
- PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines);
+ private String extractErrorLogs(Run, ?> run, String logPattern, int maxLines,
+ boolean collectDownstreamLogs, String downstreamJobPattern) throws IOException {
+ PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, collectDownstreamLogs, downstreamJobPattern);
List logLines = logExtractor.getFailedStepLog();
this.urlString = logExtractor.getUrl();
diff --git a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
index 95f4dbe8..d3a53734 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
@@ -21,12 +21,17 @@ public class ExplainErrorStep extends Step {
private int maxLines;
private String language;
private String customContext;
+ private boolean collectDownstreamLogs;
+ private String downstreamJobPattern;
@DataBoundConstructor
public ExplainErrorStep() {
this.logPattern = "";
this.maxLines = 100;
this.language = "";
+ this.customContext = "";
+ this.collectDownstreamLogs = false;
+ this.downstreamJobPattern = "";
}
public String getLogPattern() {
@@ -65,6 +70,24 @@ public void setCustomContext(String customContext) {
this.customContext = customContext != null ? customContext : "";
}
+ public boolean isCollectDownstreamLogs() {
+ return collectDownstreamLogs;
+ }
+
+ @DataBoundSetter
+ public void setCollectDownstreamLogs(boolean collectDownstreamLogs) {
+ this.collectDownstreamLogs = collectDownstreamLogs;
+ }
+
+ public String getDownstreamJobPattern() {
+ return downstreamJobPattern;
+ }
+
+ @DataBoundSetter
+ public void setDownstreamJobPattern(String downstreamJobPattern) {
+ this.downstreamJobPattern = downstreamJobPattern != null ? downstreamJobPattern : "";
+ }
+
@Override
public StepExecution start(StepContext context) throws Exception {
return new ExplainErrorStepExecution(context, this);
@@ -105,7 +128,9 @@ protected String run() throws Exception {
TaskListener listener = getContext().get(TaskListener.class);
ErrorExplainer explainer = new ErrorExplainer();
- String explanation = explainer.explainError(run, listener, step.getLogPattern(), step.getMaxLines(), step.getLanguage(), step.getCustomContext());
+ String explanation = explainer.explainError(run, listener, step.getLogPattern(), step.getMaxLines(),
+ step.getLanguage(), step.getCustomContext(), step.isCollectDownstreamLogs(),
+ step.getDownstreamJobPattern());
return explanation;
}
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index dafbd9e0..9e012b2b 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -36,6 +36,7 @@
import java.util.Set;
import java.util.logging.Logger;
import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
/**
* Utility for extracting log lines related to a failing build or pipeline step
@@ -72,11 +73,16 @@ public class PipelineLogExtractor {
/** Maximum recursion depth when following downstream (sub-job) failures. */
private static final int MAX_DOWNSTREAM_DEPTH = 5;
+ /** Hard cap for recent builds scanned per job when falling back to UpstreamCause lookup. */
+ private static final int MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB = 100;
+
private boolean isGraphViewPluginAvailable = false;
private transient String url;
private transient Run, ?> run;
private int maxLines;
private int downstreamDepth;
+ private final boolean collectDownstreamLogs;
+ private final Pattern downstreamJobPattern;
/**
* Reads the provided log text and returns at most the last {@code maxLines} lines.
@@ -311,7 +317,7 @@ public List getFailedStepLog() throws IOException {
}
// Collect logs from failed downstream (sub-job) builds, recursively
- if (downstreamDepth == 0) {
+ if (collectDownstreamLogs && downstreamDepth == 0) {
Set visitedRunIds = new HashSet<>();
visitedRunIds.add(run.getParent().getFullName() + "#" + run.getNumber());
collectDownstreamLogs(accumulated, visitedRunIds);
@@ -362,19 +368,39 @@ public String getUrl() {
public PipelineLogExtractor(Run, ?> run, int maxLines)
{
- this(run, maxLines, 0);
+ this(run, maxLines, false, null);
+ }
+
+ public PipelineLogExtractor(Run, ?> run, int maxLines, boolean collectDownstreamLogs, String downstreamJobPattern)
+ {
+ this(run, maxLines, 0, collectDownstreamLogs,
+ compileDownstreamJobPattern(collectDownstreamLogs, downstreamJobPattern));
}
- private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth)
+ private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth, boolean collectDownstreamLogs,
+ Pattern downstreamJobPattern)
{
this.run = run;
this.maxLines = maxLines;
this.downstreamDepth = downstreamDepth;
+ this.collectDownstreamLogs = collectDownstreamLogs;
+ this.downstreamJobPattern = downstreamJobPattern;
if (Jenkins.get().getPlugin("pipeline-graph-view") != null) {
isGraphViewPluginAvailable = true;
}
}
+ private static Pattern compileDownstreamJobPattern(boolean collectDownstreamLogs, String downstreamJobPattern) {
+ if (!collectDownstreamLogs || downstreamJobPattern == null || downstreamJobPattern.isBlank()) {
+ return null;
+ }
+ try {
+ return Pattern.compile(downstreamJobPattern);
+ } catch (PatternSyntaxException e) {
+ throw new IllegalArgumentException("Invalid downstream job pattern: " + e.getMessage(), e);
+ }
+ }
+
/**
* Collects error logs from failed downstream (sub-job) builds triggered by this run.
*
@@ -394,19 +420,25 @@ private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth)
* used to prevent duplicate processing across recursive calls
*/
void collectDownstreamLogs(List accumulated, Set visitedRunIds) {
- if (downstreamDepth >= MAX_DOWNSTREAM_DEPTH) {
+ boolean foundViaDownstreamBuildAction = false;
+ if (!collectDownstreamLogs || downstreamJobPattern == null
+ || downstreamDepth >= MAX_DOWNSTREAM_DEPTH || !hasRemainingCapacity(accumulated)) {
return;
}
// Strategy A: DownstreamBuildAction (pipeline-build-step plugin)
if (Jenkins.get().getPlugin("pipeline-build-step") != null) {
try {
- collectViaDownstreamBuildAction(accumulated, visitedRunIds);
+ foundViaDownstreamBuildAction = collectViaDownstreamBuildAction(accumulated, visitedRunIds);
} catch (Exception e) {
LOGGER.warning("Failed to collect downstream logs via DownstreamBuildAction: " + e.getMessage());
}
}
+ if (foundViaDownstreamBuildAction || !hasRemainingCapacity(accumulated)) {
+ return;
+ }
+
// Strategy B: Cause.UpstreamCause — scan builds that list this run as upstream
try {
collectViaUpstreamCause(accumulated, visitedRunIds);
@@ -420,19 +452,25 @@ void collectDownstreamLogs(List accumulated, Set visitedRunIds)
* {@link org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction}
* and appends their logs to {@code accumulated}.
*/
- private void collectViaDownstreamBuildAction(List accumulated, Set visitedRunIds) throws IOException {
+ private boolean collectViaDownstreamBuildAction(List accumulated, Set visitedRunIds) throws IOException {
org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction action =
run.getAction(org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction.class);
if (action == null) {
- return;
+ return false;
}
+ boolean foundMatchingDownstream = false;
for (org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction.DownstreamBuild db : action.getDownstreamBuilds()) {
+ if (!hasRemainingCapacity(accumulated)) {
+ return foundMatchingDownstream;
+ }
Run, ?> downstreamRun = db.getBuild();
- if (downstreamRun == null) {
+ if (downstreamRun == null || !matchesDownstreamJob(downstreamRun)) {
continue;
}
+ foundMatchingDownstream = true;
appendDownstreamRunLog(downstreamRun, accumulated, visitedRunIds);
}
+ return foundMatchingDownstream;
}
/**
@@ -443,8 +481,15 @@ private void collectViaDownstreamBuildAction(List accumulated, Set accumulated, Set visitedRunIds) throws IOException {
String thisJobName = run.getParent().getFullName();
int thisBuildNumber = run.getNumber();
+ long thisBuildStartTime = run.getTimeInMillis();
for (hudson.model.Job, ?> job : Jenkins.get().getAllItems(hudson.model.Job.class)) {
+ if (!hasRemainingCapacity(accumulated)) {
+ return;
+ }
+ if (!matchesDownstreamJob(job.getFullName())) {
+ continue;
+ }
// Skip the current job itself
if (job.getFullName().equals(thisJobName)) {
continue;
@@ -453,10 +498,15 @@ private void collectViaUpstreamCause(List accumulated, Set visit
if (lastBuild == null) {
continue;
}
+ int scannedCandidates = 0;
// Walk recent builds of this job to find ones triggered by our run
for (Run, ?> candidate = lastBuild; candidate != null; candidate = candidate.getPreviousBuild()) {
+ if (!hasRemainingCapacity(accumulated) || scannedCandidates >= MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB) {
+ break;
+ }
+ scannedCandidates++;
// Only look at builds that could have been triggered by our run
- if (candidate.getTimeInMillis() < run.getTimeInMillis()) {
+ if (candidate.getTimeInMillis() < thisBuildStartTime) {
break;
}
for (Cause cause : candidate.getCauses()) {
@@ -502,6 +552,18 @@ boolean isAbortedByFailFast(Run, ?> run) {
return false;
}
+ private boolean hasRemainingCapacity(List accumulated) {
+ return accumulated.size() < maxLines;
+ }
+
+ private boolean matchesDownstreamJob(Run, ?> downstreamRun) {
+ return matchesDownstreamJob(downstreamRun.getParent().getFullName());
+ }
+
+ private boolean matchesDownstreamJob(String jobFullName) {
+ return downstreamJobPattern != null && downstreamJobPattern.matcher(jobFullName).matches();
+ }
+
/**
* Appends the error content of a single downstream run to {@code accumulated},
* then recurses into its own downstream builds.
@@ -569,7 +631,8 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
}
// Slow path: no existing explanation — extract raw logs as before.
- PipelineLogExtractor subExtractor = new PipelineLogExtractor(downstreamRun, remaining, downstreamDepth + 1);
+ PipelineLogExtractor subExtractor = new PipelineLogExtractor(downstreamRun, remaining, downstreamDepth + 1,
+ collectDownstreamLogs, downstreamJobPattern);
List subLog = subExtractor.getFailedStepLog();
if (subLog == null || subLog.isEmpty()) {
return;
diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly
index fdbd45d0..f54552b1 100644
--- a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly
+++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly
@@ -19,4 +19,14 @@
description="Additional instructions or context for the AI. Overrides the global custom context if specified. Leave empty to use global configuration.">
+
+
+
+
+
+
+
+
diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html
new file mode 100644
index 00000000..7048aeeb
--- /dev/null
+++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html
@@ -0,0 +1,8 @@
+Include logs from failed downstream jobs triggered by this build.
+
+
+Downstream discovery is disabled by default to avoid expensive scans on large Jenkins controllers.
+Enable this only when you want the AI analysis to include sub-job failures.
+
+
+When enabled, use Downstream Job Pattern to restrict which downstream jobs are collected.
diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html
new file mode 100644
index 00000000..e520de1d
--- /dev/null
+++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html
@@ -0,0 +1,14 @@
+Regular expression matched against downstream job full names.
+
+
+This field is used only when Collect Downstream Logs is enabled.
+Jobs that do not match the pattern are skipped.
+
+
+Examples:
+
+ team-folder/.*/deploy-.* matches nested deploy jobs inside team-folder
+ release-pipeline|hotfix-pipeline matches either job name exactly
+ .* matches all downstream jobs (use with caution)
+
+Leave empty to collect no downstream jobs.
diff --git a/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java b/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java
index eb7c53c4..8a9c8f8d 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java
@@ -23,7 +23,8 @@ void testExplainErrorStepWithParameters(JenkinsRule jenkins) throws Exception {
" echo 'This is a test build'\n" +
" echo 'ERROR: Something went wrong'\n" +
" echo 'FAILED: Build failed'\n" +
- " explainError logPattern: 'ERROR|FAILED', maxLines: 50\n" +
+ " explainError logPattern: 'ERROR|FAILED', maxLines: 50, "
+ + "collectDownstreamLogs: true, downstreamJobPattern: 'team/.+'\n" +
"}";
job.setDefinition(new CpsFlowDefinition(pipelineScript, true));
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index 8209bcb3..da605b08 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -23,6 +23,7 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.regex.Pattern;
import jenkins.model.CauseOfInterruption;
import jenkins.model.InterruptedBuildAction;
import org.jenkinsci.plugins.workflow.actions.LogAction;
@@ -483,6 +484,35 @@ void isAbortedByFailFast_abortedWithNonFailFastCause_returnsFalse(JenkinsRule je
// Downstream sub-job integration tests
// -------------------------------------------------------------------------
+ /**
+ * Downstream collection is opt-in: when the parent triggers a failing sub-job but
+ * downstream collection is not explicitly enabled, the sub-job's log must not appear.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_defaultOff_subJobFailureNotIncluded(JenkinsRule jenkins) throws Exception {
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-default-off");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"DEFAULT_OFF_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-default-off");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-job-default-off', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ assertFalse(log.contains("DEFAULT_OFF_MARKER"),
+ "Downstream logs must be excluded unless explicitly enabled.\nActual log:\n" + log);
+ assertFalse(log.contains("### Downstream Job: sub-job-default-off"),
+ "No downstream section should be present when downstream collection is disabled.\nActual log:\n" + log);
+ }
+
/**
* Downstream sub-job FAILURE: when a parent pipeline triggers a sub-job via the
* {@code build} step and that sub-job fails, the parent's extracted log must contain
@@ -505,7 +535,7 @@ void downstream_subJobFailure_logIncludedWithFailureHeader(JenkinsRule jenkins)
WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
- PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-job-failure");
List lines = extractor.getFailedStepLog();
String log = String.join("\n", lines);
@@ -536,7 +566,7 @@ void downstream_subJobSuccess_logNotIncluded(JenkinsRule jenkins) throws Excepti
WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
- PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-job-success");
List lines = extractor.getFailedStepLog();
String log = String.join("\n", lines);
@@ -561,9 +591,9 @@ void collectDownstreamLogs_atMaxDepth_appendsNothing(JenkinsRule jenkins) throws
// Access the package-private constructor via reflection to set depth = MAX (5)
java.lang.reflect.Constructor ctor =
- PipelineLogExtractor.class.getDeclaredConstructor(Run.class, int.class, int.class);
+ PipelineLogExtractor.class.getDeclaredConstructor(Run.class, int.class, int.class, boolean.class, Pattern.class);
ctor.setAccessible(true);
- PipelineLogExtractor extractor = ctor.newInstance(build, 200, 5);
+ PipelineLogExtractor extractor = ctor.newInstance(build, 200, 5, true, Pattern.compile(".*"));
List accumulated = new ArrayList<>();
Set visited = new HashSet<>();
@@ -600,7 +630,7 @@ void collectDownstreamLogs_alreadyVisitedRunId_notAppendedTwice(JenkinsRule jenk
assertNotNull(subRun, "Sub-job must have been triggered");
// Pre-populate visitedRunIds with the sub-job's ID so it is treated as already seen
- PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-dedup");
// First call getFailedStepLog to initialise url etc., then call collectDownstreamLogs
// with a visited set that already contains the sub-job → no downstream section added
List accumulated = new ArrayList<>();
@@ -615,6 +645,37 @@ void collectDownstreamLogs_alreadyVisitedRunId_notAppendedTwice(JenkinsRule jenk
"Already-visited sub-job must not be appended again.\nActual log:\n" + log);
}
+ /**
+ * Capacity guard: when {@code accumulated} has already reached {@code maxLines},
+ * {@code collectDownstreamLogs} must return immediately instead of scanning jobs/builds.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void collectDownstreamLogs_atCapacity_skipsDownstreamScan(JenkinsRule jenkins) throws Exception {
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-capacity-guard");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"CAPACITY_GUARD_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-capacity-guard");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-capacity-guard', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 1, true, "sub-capacity-guard");
+ List accumulated = new ArrayList<>();
+ accumulated.add("already full");
+ Set visited = new HashSet<>();
+ visited.add(parentRun.getParent().getFullName() + "#" + parentRun.getNumber());
+
+ extractor.collectDownstreamLogs(accumulated, visited);
+
+ assertEquals(List.of("already full"), accumulated,
+ "No downstream content should be appended after maxLines is already reached");
+ }
+
// -------------------------------------------------------------------------
// Fast-path: reuse ErrorExplanationAction from sub-job
// -------------------------------------------------------------------------
@@ -660,7 +721,7 @@ void downstream_subJobHasExplanationAction_explanationReusedInsteadOfRawLog(Jenk
subRun.save();
// Now extract logs from the parent — the fast path should kick in
- PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-with-explanation");
List lines = extractor.getFailedStepLog();
String log = String.join("\n", lines);
@@ -695,7 +756,7 @@ void downstream_subJobHasNoExplanationAction_rawLogExtractedAsFallback(JenkinsRu
WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
- PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500);
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-no-explanation");
List lines = extractor.getFailedStepLog();
String log = String.join("\n", lines);
@@ -706,4 +767,33 @@ void downstream_subJobHasNoExplanationAction_rawLogExtractedAsFallback(JenkinsRu
assertFalse(log.contains("[AI explanation from sub-job]"),
"Fast-path marker must not appear when no explanation exists.\nActual log:\n" + log);
}
+
+ /**
+ * Downstream regex filter: when downstream collection is enabled but the job name
+ * does not match the configured pattern, the sub-job log must be skipped.
+ */
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_patternMismatch_subJobFailureNotIncluded(JenkinsRule jenkins) throws Exception {
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-pattern-miss");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"PATTERN_MISS_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-pattern-miss");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'sub-job-pattern-miss', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "other-job-.*");
+ List lines = extractor.getFailedStepLog();
+ String log = String.join("\n", lines);
+
+ assertFalse(log.contains("PATTERN_MISS_MARKER"),
+ "Sub-job logs must be skipped when the job name does not match the regex.\nActual log:\n" + log);
+ assertFalse(log.contains("### Downstream Job: sub-job-pattern-miss"),
+ "No downstream section should be present for non-matching jobs.\nActual log:\n" + log);
+ }
}
From 75d0c1cda541168a946672816dd30a4356bc82cd Mon Sep 17 00:00:00 2001
From: donghui <977675308@qq.com>
Date: Fri, 13 Mar 2026 14:26:47 +0800
Subject: [PATCH 03/11] Update
src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
---
.../plugins/explain_error/PipelineLogExtractorTest.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index da605b08..1b46fc89 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -631,8 +631,8 @@ void collectDownstreamLogs_alreadyVisitedRunId_notAppendedTwice(JenkinsRule jenk
// Pre-populate visitedRunIds with the sub-job's ID so it is treated as already seen
PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-dedup");
- // First call getFailedStepLog to initialise url etc., then call collectDownstreamLogs
- // with a visited set that already contains the sub-job → no downstream section added
+ // Call collectDownstreamLogs with a visited set that already contains both the parent
+ // and the sub-job → no downstream section should be added for the sub-job.
List accumulated = new ArrayList<>();
Set visited = new HashSet<>();
visited.add(parentRun.getParent().getFullName() + "#" + parentRun.getNumber());
From 6bb67a9022bbc93869597adc01e8139b42181231 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 14:35:11 +0800
Subject: [PATCH 04/11] Preserve downstream sections during log filtering
- keep complete downstream job sections when explainError uses logPattern filtering
- continue filtering ordinary upstream lines by the configured regex
- add ErrorExplainer tests for downstream section preservation and upstream filtering behavior
---
.../plugins/explain_error/ErrorExplainer.java | 33 ++++++++++--
.../explain_error/ErrorExplainerTest.java | 50 +++++++++++++++++++
2 files changed, 78 insertions(+), 5 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
index 3fbd54b7..1390aaa5 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
@@ -9,6 +9,7 @@
import hudson.util.LogTaskListener;
import io.jenkins.plugins.explain_error.provider.BaseAIProvider;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -19,6 +20,8 @@
* Service class responsible for explaining errors using AI.
*/
public class ErrorExplainer {
+ static final String DOWNSTREAM_SECTION_START = "### Downstream Job: ";
+ static final String DOWNSTREAM_SECTION_END = "### END OF DOWNSTREAM JOB: ";
private String providerName;
private String urlString;
@@ -94,21 +97,41 @@ private String extractErrorLogs(Run, ?> run, String logPattern, int maxLines,
List logLines = logExtractor.getFailedStepLog();
this.urlString = logExtractor.getUrl();
+ return filterErrorLogs(logLines, logPattern);
+ }
+
+ String filterErrorLogs(List logLines, String logPattern) {
if (StringUtils.isBlank(logPattern)) {
- // Return last few lines if no pattern specified
return String.join("\n", logLines);
}
Pattern pattern = Pattern.compile(logPattern, Pattern.CASE_INSENSITIVE);
- StringBuilder errorLogs = new StringBuilder();
+ List filteredLines = new ArrayList<>();
+ boolean inDownstreamSection = false;
for (String line : logLines) {
- if (pattern.matcher(line).find()) {
- errorLogs.append(line).append("\n");
+ if (isDownstreamSectionStart(line)) {
+ inDownstreamSection = true;
+ }
+
+ if (inDownstreamSection || pattern.matcher(line).find()) {
+ filteredLines.add(line);
+ }
+
+ if (inDownstreamSection && isDownstreamSectionEnd(line)) {
+ inDownstreamSection = false;
}
}
- return errorLogs.toString();
+ return String.join("\n", filteredLines);
+ }
+
+ private boolean isDownstreamSectionStart(String line) {
+ return line != null && line.startsWith(DOWNSTREAM_SECTION_START);
+ }
+
+ private boolean isDownstreamSectionEnd(String line) {
+ return line != null && line.startsWith(DOWNSTREAM_SECTION_END);
}
/**
diff --git a/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java b/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java
index 89e9a34f..4ebfe8b0 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java
@@ -2,7 +2,9 @@
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.assertThrows;
import com.cloudbees.hudson.plugins.folder.Folder;
@@ -19,6 +21,54 @@
@WithJenkins
class ErrorExplainerTest {
+ @Test
+ void filterErrorLogs_preservesEntireDownstreamSectionWhenPatternIsUsed() {
+ ErrorExplainer errorExplainer = new ErrorExplainer();
+
+ String filtered = errorExplainer.filterErrorLogs(java.util.List.of(
+ "parent info line",
+ "ERROR: upstream failed",
+ "### Downstream Job: team-folder/sub-job #12 ###",
+ "Result: FAILURE",
+ "--- LOG CONTENT ---",
+ "[AI explanation from sub-job]",
+ "Root cause: dependency mismatch",
+ "### END OF DOWNSTREAM JOB: team-folder/sub-job ###",
+ "non matching tail"
+ ), "ERROR");
+
+ assertTrue(filtered.contains("ERROR: upstream failed"));
+ assertTrue(filtered.contains("### Downstream Job: team-folder/sub-job #12 ###"));
+ assertTrue(filtered.contains("[AI explanation from sub-job]"));
+ assertTrue(filtered.contains("Root cause: dependency mismatch"));
+ assertTrue(filtered.contains("### END OF DOWNSTREAM JOB: team-folder/sub-job ###"));
+ assertFalse(filtered.contains("parent info line"));
+ assertFalse(filtered.contains("non matching tail"));
+ }
+
+ @Test
+ void filterErrorLogs_keepsOnlyMatchingUpstreamLinesOutsideDownstreamSections() {
+ ErrorExplainer errorExplainer = new ErrorExplainer();
+
+ String filtered = errorExplainer.filterErrorLogs(java.util.List.of(
+ "upstream info",
+ "Exception: upstream failure",
+ "upstream debug",
+ "### Downstream Job: team-folder/sub-job #9 ###",
+ "Result: FAILURE",
+ "--- LOG CONTENT ---",
+ "sub-job debug line",
+ "### END OF DOWNSTREAM JOB: team-folder/sub-job ###",
+ "upstream trailing info"
+ ), "Exception");
+
+ assertFalse(filtered.contains("upstream info"));
+ assertTrue(filtered.contains("Exception: upstream failure"));
+ assertFalse(filtered.contains("upstream debug"));
+ assertTrue(filtered.contains("sub-job debug line"));
+ assertFalse(filtered.contains("upstream trailing info"));
+ }
+
@Test
void testErrorExplainerBasicFunctionality(JenkinsRule jenkins) throws Exception {
ErrorExplainer errorExplainer = new ErrorExplainer();
From c9ec5df861ba8a1a2f29d067d6124c1ac061f1c7 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 14:40:35 +0800
Subject: [PATCH 05/11] Fix SpotBugs warning in downstream log handling
- cache downstream build result before null and severity checks in appendDownstreamRunLog
- reuse local job name and build number values when building downstream section metadata
---
.../explain_error/PipelineLogExtractor.java | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index 9e012b2b..9fd7b0f4 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -587,10 +587,13 @@ private boolean matchesDownstreamJob(String jobFullName) {
*/
private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumulated,
Set visitedRunIds) throws IOException {
- if (downstreamRun.getResult() == null || !downstreamRun.getResult().isWorseThan(Result.SUCCESS)) {
+ Result downstreamResult = downstreamRun.getResult();
+ if (downstreamResult == null || !downstreamResult.isWorseThan(Result.SUCCESS)) {
return;
}
- String runId = downstreamRun.getParent().getFullName() + "#" + downstreamRun.getNumber();
+ String jobFullName = downstreamRun.getParent().getFullName();
+ int buildNumber = downstreamRun.getNumber();
+ String runId = jobFullName + "#" + buildNumber;
if (!visitedRunIds.add(runId)) {
return; // already processed
}
@@ -602,11 +605,10 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
boolean failFastAborted = isAbortedByFailFast(downstreamRun);
String resultLabel = failFastAborted
? "ABORTED (interrupted by fail-fast, not the root cause)"
- : String.valueOf(downstreamRun.getResult());
+ : String.valueOf(downstreamResult);
List header = Arrays.asList(
- "### Downstream Job: " + downstreamRun.getParent().getFullName()
- + " #" + downstreamRun.getNumber() + " ###",
+ "### Downstream Job: " + jobFullName + " #" + buildNumber + " ###",
"Result: " + resultLabel,
"--- LOG CONTENT ---"
);
@@ -624,7 +626,7 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
accumulated.addAll(header);
accumulated.add("[AI explanation from sub-job]");
accumulated.addAll(Arrays.asList(existingExplanation.getExplanation().split("\n", -1)));
- accumulated.add("### END OF DOWNSTREAM JOB: " + downstreamRun.getParent().getFullName() + " ###");
+ accumulated.add("### END OF DOWNSTREAM JOB: " + jobFullName + " ###");
// No need to recurse further — the sub-job's explanation already covers its own
// downstream failures (it was produced with full context at the time of the failure).
return;
@@ -648,7 +650,7 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
accumulated.addAll(header);
accumulated.addAll(subLog);
- accumulated.add("### END OF DOWNSTREAM JOB: " + downstreamRun.getParent().getFullName() + " ###");
+ accumulated.add("### END OF DOWNSTREAM JOB: " + jobFullName + " ###");
// Recurse into sub-job's own downstream builds
subExtractor.collectDownstreamLogs(accumulated, visitedRunIds);
From 0a2722196f2106e11fdbf9ff937814b1374fd29a Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 15:04:41 +0800
Subject: [PATCH 06/11] Add run context to downstream warning logs
- include job and build information in downstream collection warning messages
- log the full exception stack trace via LOGGER.log(Level.WARNING, ..., e)
---
.../plugins/explain_error/PipelineLogExtractor.java | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index 9fd7b0f4..5a6ec150 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -34,6 +34,7 @@
import java.util.List;
import java.util.Queue;
import java.util.Set;
+import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
@@ -421,6 +422,7 @@ private static Pattern compileDownstreamJobPattern(boolean collectDownstreamLogs
*/
void collectDownstreamLogs(List accumulated, Set visitedRunIds) {
boolean foundViaDownstreamBuildAction = false;
+ String runId = run.getParent().getFullName() + "#" + run.getNumber();
if (!collectDownstreamLogs || downstreamJobPattern == null
|| downstreamDepth >= MAX_DOWNSTREAM_DEPTH || !hasRemainingCapacity(accumulated)) {
return;
@@ -431,7 +433,8 @@ void collectDownstreamLogs(List accumulated, Set visitedRunIds)
try {
foundViaDownstreamBuildAction = collectViaDownstreamBuildAction(accumulated, visitedRunIds);
} catch (Exception e) {
- LOGGER.warning("Failed to collect downstream logs via DownstreamBuildAction: " + e.getMessage());
+ LOGGER.log(Level.WARNING,
+ "Failed to collect downstream logs via DownstreamBuildAction for " + runId, e);
}
}
@@ -443,7 +446,8 @@ void collectDownstreamLogs(List accumulated, Set visitedRunIds)
try {
collectViaUpstreamCause(accumulated, visitedRunIds);
} catch (Exception e) {
- LOGGER.warning("Failed to collect downstream logs via UpstreamCause: " + e.getMessage());
+ LOGGER.log(Level.WARNING,
+ "Failed to collect downstream logs via UpstreamCause for " + runId, e);
}
}
From 7d6747497363794423c965e9f22f2796cc216a07 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 15:29:54 +0800
Subject: [PATCH 07/11] Remove reflection from depth guard test
- add a VisibleForTesting constructor for setting downstreamDepth in PipelineLogExtractor
- update PipelineLogExtractorTest to use the test constructor instead of reflection
---
.../plugins/explain_error/PipelineLogExtractor.java | 7 +++++++
.../plugins/explain_error/PipelineLogExtractorTest.java | 8 ++------
2 files changed, 9 insertions(+), 6 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index 5a6ec150..a2bad7f4 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -1,5 +1,6 @@
package io.jenkins.plugins.explain_error;
+import com.google.common.annotations.VisibleForTesting;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
import org.jenkinsci.plugins.workflow.flow.FlowExecution;
@@ -378,6 +379,12 @@ public PipelineLogExtractor(Run, ?> run, int maxLines, boolean collectDownstre
compileDownstreamJobPattern(collectDownstreamLogs, downstreamJobPattern));
}
+ @VisibleForTesting
+ PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth)
+ {
+ this(run, maxLines, downstreamDepth, true, Pattern.compile(".*"));
+ }
+
private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth, boolean collectDownstreamLogs,
Pattern downstreamJobPattern)
{
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index 1b46fc89..67921416 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -23,7 +23,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
-import java.util.regex.Pattern;
import jenkins.model.CauseOfInterruption;
import jenkins.model.InterruptedBuildAction;
import org.jenkinsci.plugins.workflow.actions.LogAction;
@@ -589,11 +588,8 @@ void collectDownstreamLogs_atMaxDepth_appendsNothing(JenkinsRule jenkins) throws
FreeStyleProject project = jenkins.createFreeStyleProject("depth-guard-project");
FreeStyleBuild build = jenkins.buildAndAssertSuccess(project);
- // Access the package-private constructor via reflection to set depth = MAX (5)
- java.lang.reflect.Constructor ctor =
- PipelineLogExtractor.class.getDeclaredConstructor(Run.class, int.class, int.class, boolean.class, Pattern.class);
- ctor.setAccessible(true);
- PipelineLogExtractor extractor = ctor.newInstance(build, 200, 5, true, Pattern.compile(".*"));
+ // Construct at depth = MAX (5) to verify the guard in collectDownstreamLogs
+ PipelineLogExtractor extractor = new PipelineLogExtractor(build, 200, 5);
List accumulated = new ArrayList<>();
Set visited = new HashSet<>();
From 3b68ebf0242e9639fabceff8f6908c721adf1dc1 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 16:39:48 +0800
Subject: [PATCH 08/11] Guard downstream log collection by viewer permissions
Primary changes:\n- thread the current Authentication through ExplainErrorStep, ErrorExplainer, ConsoleExplainErrorAction, and PipelineLogExtractor so downstream discovery and extraction run against an explicit viewer context\n- make the UpstreamCause fallback scan only jobs visible to that authentication, preventing all-job enumeration from including unreadable downstream jobs\n\nSecondary changes:\n- add a downstream run visibility check before reusing AI explanations or appending raw console output; unreadable build-step downstreams now emit a hidden placeholder instead of leaking content\n- keep DownstreamBuildAction discovery functional by resolving the downstream Run under SYSTEM, while still enforcing viewer-based permission checks before any downstream content is included\n- add Jenkins security tests covering hidden build-step downstreams and hidden UpstreamCause downstream jobs
---
.../ConsoleExplainErrorAction.java | 4 +-
.../plugins/explain_error/ErrorExplainer.java | 23 +++-
.../explain_error/ExplainErrorStep.java | 3 +-
.../explain_error/PipelineLogExtractor.java | 116 ++++++++++++------
.../PipelineLogExtractorTest.java | 84 +++++++++++++
5 files changed, 183 insertions(+), 47 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java b/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java
index b2d8db74..9df44822 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java
@@ -4,6 +4,7 @@
import hudson.model.Result;
import hudson.model.Run;
import jenkins.model.RunAction2;
+import jenkins.model.Jenkins;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
@@ -88,7 +89,8 @@ public void doExplainConsoleError(StaplerRequest2 req, StaplerResponse2 rsp) thr
}
// Fetch the last N lines of the log
- PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines);
+ PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, Jenkins.getAuthentication2(),
+ false, null);
List logLines = logExtractor.getFailedStepLog();
this.urlString = logExtractor.getUrl();
diff --git a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
index 1390aaa5..0f1a3285 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java
@@ -15,6 +15,7 @@
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
+import org.springframework.security.core.Authentication;
/**
* Service class responsible for explaining errors using AI.
@@ -33,19 +34,26 @@ public String getProviderName() {
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines) {
- return explainError(run, listener, logPattern, maxLines, null, null, false, null);
+ return explainError(run, listener, logPattern, maxLines, null, null, false, null, null);
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language) {
- return explainError(run, listener, logPattern, maxLines, language, null, false, null);
+ return explainError(run, listener, logPattern, maxLines, language, null, false, null, null);
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language, String customContext) {
- return explainError(run, listener, logPattern, maxLines, language, customContext, false, null);
+ return explainError(run, listener, logPattern, maxLines, language, customContext, false, null, null);
}
public String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language,
String customContext, boolean collectDownstreamLogs, String downstreamJobPattern) {
+ return explainError(run, listener, logPattern, maxLines, language, customContext,
+ collectDownstreamLogs, downstreamJobPattern, null);
+ }
+
+ String explainError(Run, ?> run, TaskListener listener, String logPattern, int maxLines, String language,
+ String customContext, boolean collectDownstreamLogs, String downstreamJobPattern,
+ Authentication authentication) {
String jobInfo = run != null ? ("[" + run.getParent().getFullName() + " #" + run.getNumber() + "]") : "[unknown]";
try {
// Check if explanation is enabled (folder-level or global)
@@ -62,7 +70,8 @@ public String explainError(Run, ?> run, TaskListener listener, String logPatte
}
// Extract error logs
- String errorLogs = extractErrorLogs(run, logPattern, maxLines, collectDownstreamLogs, downstreamJobPattern);
+ String errorLogs = extractErrorLogs(run, logPattern, maxLines, collectDownstreamLogs,
+ downstreamJobPattern, authentication);
// Use step-level customContext if provided, otherwise fallback to global
String effectiveCustomContext = StringUtils.isNotBlank(customContext) ? customContext : GlobalConfigurationImpl.get().getCustomContext();
@@ -92,8 +101,10 @@ public String explainError(Run, ?> run, TaskListener listener, String logPatte
}
private String extractErrorLogs(Run, ?> run, String logPattern, int maxLines,
- boolean collectDownstreamLogs, String downstreamJobPattern) throws IOException {
- PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, collectDownstreamLogs, downstreamJobPattern);
+ boolean collectDownstreamLogs, String downstreamJobPattern,
+ Authentication authentication) throws IOException {
+ PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, authentication,
+ collectDownstreamLogs, downstreamJobPattern);
List logLines = logExtractor.getFailedStepLog();
this.urlString = logExtractor.getUrl();
diff --git a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
index d3a53734..6baaabaf 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java
@@ -4,6 +4,7 @@
import hudson.model.Run;
import hudson.model.TaskListener;
import java.util.Set;
+import jenkins.model.Jenkins;
import org.jenkinsci.plugins.workflow.steps.Step;
import org.jenkinsci.plugins.workflow.steps.StepContext;
import org.jenkinsci.plugins.workflow.steps.StepDescriptor;
@@ -130,7 +131,7 @@ protected String run() throws Exception {
ErrorExplainer explainer = new ErrorExplainer();
String explanation = explainer.explainError(run, listener, step.getLogPattern(), step.getMaxLines(),
step.getLanguage(), step.getCustomContext(), step.isCollectDownstreamLogs(),
- step.getDownstreamJobPattern());
+ step.getDownstreamJobPattern(), Jenkins.getAuthentication2());
return explanation;
}
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index a2bad7f4..3390a0b8 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -1,6 +1,7 @@
package io.jenkins.plugins.explain_error;
import com.google.common.annotations.VisibleForTesting;
+import hudson.model.Item;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
import org.jenkinsci.plugins.workflow.flow.FlowExecution;
@@ -16,10 +17,12 @@
import hudson.model.Cause;
import hudson.model.Result;
import hudson.model.Run;
+import hudson.security.ACL;
+import hudson.security.ACLContext;
import jenkins.model.CauseOfInterruption;
import jenkins.model.InterruptedBuildAction;
import jenkins.model.Jenkins;
-
+import org.springframework.security.core.Authentication;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
@@ -85,6 +88,7 @@ public class PipelineLogExtractor {
private int downstreamDepth;
private final boolean collectDownstreamLogs;
private final Pattern downstreamJobPattern;
+ private final Authentication authentication;
/**
* Reads the provided log text and returns at most the last {@code maxLines} lines.
@@ -370,22 +374,29 @@ public String getUrl() {
public PipelineLogExtractor(Run, ?> run, int maxLines)
{
- this(run, maxLines, false, null);
+ this(run, maxLines, Jenkins.getAuthentication2(), false, null);
}
public PipelineLogExtractor(Run, ?> run, int maxLines, boolean collectDownstreamLogs, String downstreamJobPattern)
{
- this(run, maxLines, 0, collectDownstreamLogs,
+ this(run, maxLines, Jenkins.getAuthentication2(), collectDownstreamLogs, downstreamJobPattern);
+ }
+
+ PipelineLogExtractor(Run, ?> run, int maxLines, Authentication authentication,
+ boolean collectDownstreamLogs, String downstreamJobPattern)
+ {
+ this(run, maxLines, 0, authentication, collectDownstreamLogs,
compileDownstreamJobPattern(collectDownstreamLogs, downstreamJobPattern));
}
@VisibleForTesting
PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth)
{
- this(run, maxLines, downstreamDepth, true, Pattern.compile(".*"));
+ this(run, maxLines, downstreamDepth, Jenkins.getAuthentication2(), true, Pattern.compile(".*"));
}
- private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth, boolean collectDownstreamLogs,
+ private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth, Authentication authentication,
+ boolean collectDownstreamLogs,
Pattern downstreamJobPattern)
{
this.run = run;
@@ -393,6 +404,7 @@ private PipelineLogExtractor(Run, ?> run, int maxLines, int downstreamDepth, b
this.downstreamDepth = downstreamDepth;
this.collectDownstreamLogs = collectDownstreamLogs;
this.downstreamJobPattern = downstreamJobPattern;
+ this.authentication = authentication != null ? authentication : Jenkins.getAuthentication2();
if (Jenkins.get().getPlugin("pipeline-graph-view") != null) {
isGraphViewPluginAvailable = true;
}
@@ -474,8 +486,15 @@ private boolean collectViaDownstreamBuildAction(List accumulated, Set downstreamRun = db.getBuild();
- if (downstreamRun == null || !matchesDownstreamJob(downstreamRun)) {
+ String jobFullName = db.getJobFullName();
+ if (!matchesDownstreamJob(jobFullName)) {
+ continue;
+ }
+ Run, ?> downstreamRun;
+ try (ACLContext ignored = ACL.as2(ACL.SYSTEM2)) {
+ downstreamRun = db.getBuild();
+ }
+ if (downstreamRun == null) {
continue;
}
foundMatchingDownstream = true;
@@ -494,39 +513,41 @@ private void collectViaUpstreamCause(List accumulated, Set visit
int thisBuildNumber = run.getNumber();
long thisBuildStartTime = run.getTimeInMillis();
- for (hudson.model.Job, ?> job : Jenkins.get().getAllItems(hudson.model.Job.class)) {
- if (!hasRemainingCapacity(accumulated)) {
- return;
- }
- if (!matchesDownstreamJob(job.getFullName())) {
- continue;
- }
- // Skip the current job itself
- if (job.getFullName().equals(thisJobName)) {
- continue;
- }
- Run, ?> lastBuild = job.getLastBuild();
- if (lastBuild == null) {
- continue;
- }
- int scannedCandidates = 0;
- // Walk recent builds of this job to find ones triggered by our run
- for (Run, ?> candidate = lastBuild; candidate != null; candidate = candidate.getPreviousBuild()) {
- if (!hasRemainingCapacity(accumulated) || scannedCandidates >= MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB) {
- break;
+ try (ACLContext ignored = ACL.as2(authentication)) {
+ for (hudson.model.Job, ?> job : Jenkins.get().getAllItems(hudson.model.Job.class)) {
+ if (!hasRemainingCapacity(accumulated)) {
+ return;
}
- scannedCandidates++;
- // Only look at builds that could have been triggered by our run
- if (candidate.getTimeInMillis() < thisBuildStartTime) {
- break;
+ if (!job.hasPermission(Item.READ) || !matchesDownstreamJob(job.getFullName())) {
+ continue;
}
- for (Cause cause : candidate.getCauses()) {
- if (cause instanceof Cause.UpstreamCause) {
- Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause;
- if (upstreamCause.getUpstreamProject().equals(thisJobName)
- && upstreamCause.getUpstreamBuild() == thisBuildNumber) {
- appendDownstreamRunLog(candidate, accumulated, visitedRunIds);
- break;
+ // Skip the current job itself
+ if (job.getFullName().equals(thisJobName)) {
+ continue;
+ }
+ Run, ?> lastBuild = job.getLastBuild();
+ if (lastBuild == null) {
+ continue;
+ }
+ int scannedCandidates = 0;
+ // Walk recent builds of this job to find ones triggered by our run
+ for (Run, ?> candidate = lastBuild; candidate != null; candidate = candidate.getPreviousBuild()) {
+ if (!hasRemainingCapacity(accumulated) || scannedCandidates >= MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB) {
+ break;
+ }
+ scannedCandidates++;
+ // Only look at builds that could have been triggered by our run
+ if (candidate.getTimeInMillis() < thisBuildStartTime) {
+ break;
+ }
+ for (Cause cause : candidate.getCauses()) {
+ if (cause instanceof Cause.UpstreamCause) {
+ Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause;
+ if (upstreamCause.getUpstreamProject().equals(thisJobName)
+ && upstreamCause.getUpstreamBuild() == thisBuildNumber) {
+ appendDownstreamRunLog(candidate, accumulated, visitedRunIds);
+ break;
+ }
}
}
}
@@ -612,6 +633,10 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
if (remaining <= 0) {
return;
}
+ if (!canReadDownstreamRun(downstreamRun)) {
+ appendHiddenDownstreamPlaceholder(accumulated);
+ return;
+ }
boolean failFastAborted = isAbortedByFailFast(downstreamRun);
String resultLabel = failFastAborted
@@ -645,7 +670,7 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
// Slow path: no existing explanation — extract raw logs as before.
PipelineLogExtractor subExtractor = new PipelineLogExtractor(downstreamRun, remaining, downstreamDepth + 1,
- collectDownstreamLogs, downstreamJobPattern);
+ authentication, collectDownstreamLogs, downstreamJobPattern);
List subLog = subExtractor.getFailedStepLog();
if (subLog == null || subLog.isEmpty()) {
return;
@@ -666,4 +691,17 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
// Recurse into sub-job's own downstream builds
subExtractor.collectDownstreamLogs(accumulated, visitedRunIds);
}
+
+ private boolean canReadDownstreamRun(Run, ?> downstreamRun) {
+ try (ACLContext ignored = ACL.as2(authentication)) {
+ return downstreamRun.getParent().hasPermission(Item.READ);
+ }
+ }
+
+ private void appendHiddenDownstreamPlaceholder(List accumulated) {
+ accumulated.add("### Downstream Job: [hidden] ###");
+ accumulated.add("Result: UNAVAILABLE");
+ accumulated.add("Downstream failure details omitted due to permissions.");
+ accumulated.add("### END OF DOWNSTREAM JOB: [hidden] ###");
+ }
}
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index 67921416..0e5918cd 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -13,10 +13,15 @@
import static org.mockito.Mockito.when;
import hudson.console.AnnotatedLargeText;
+import hudson.model.Cause;
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
+import hudson.model.Item;
import hudson.model.Result;
import hudson.model.Run;
+import hudson.model.User;
+import hudson.security.ACL;
+import hudson.security.ACLContext;
import io.jenkins.plugins.explain_error.provider.TestProvider;
import java.io.InputStream;
import java.util.ArrayList;
@@ -25,6 +30,7 @@
import java.util.Set;
import jenkins.model.CauseOfInterruption;
import jenkins.model.InterruptedBuildAction;
+import jenkins.model.Jenkins;
import org.jenkinsci.plugins.workflow.actions.LogAction;
import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition;
import org.jenkinsci.plugins.workflow.flow.FlowExecution;
@@ -34,8 +40,11 @@
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledOnOs;
import org.junit.jupiter.api.condition.OS;
+import org.jvnet.hudson.test.FailureBuilder;
import org.jvnet.hudson.test.JenkinsRule;
+import org.jvnet.hudson.test.MockAuthorizationStrategy;
import org.jvnet.hudson.test.junit.jupiter.WithJenkins;
+import org.springframework.security.core.Authentication;
/**
* Integration tests for {@link PipelineLogExtractor}.
@@ -792,4 +801,79 @@ void downstream_patternMismatch_subJobFailureNotIncluded(JenkinsRule jenkins) th
assertFalse(log.contains("### Downstream Job: sub-job-pattern-miss"),
"No downstream section should be present for non-matching jobs.\nActual log:\n" + log);
}
+
+ @Test
+ @DisabledOnOs(OS.WINDOWS)
+ void downstream_invisibleBuildStepJob_replacedWithHiddenPlaceholder(JenkinsRule jenkins) throws Exception {
+ Authentication viewer = configureReadAccess(jenkins, "viewer-build-step");
+
+ WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "hidden-build-step-sub");
+ subJob.setDefinition(new CpsFlowDefinition(
+ "node { sh 'echo \"HIDDEN_BUILD_STEP_MARKER\" && exit 1' }", true));
+
+ WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "visible-build-step-parent");
+ parentJob.setDefinition(new CpsFlowDefinition(
+ "build job: 'hidden-build-step-sub', propagate: false\n"
+ + "currentBuild.result = 'FAILURE'",
+ true));
+
+ grantItemRead(jenkins, "viewer-build-step", parentJob);
+
+ WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0));
+
+ String log;
+ try (ACLContext ignored = ACL.as2(viewer)) {
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, viewer, true,
+ "hidden-build-step-sub");
+ log = String.join("\n", extractor.getFailedStepLog());
+ }
+
+ assertTrue(log.contains("### Downstream Job: [hidden] ###"),
+ "Unreadable downstream jobs should be represented by a hidden placeholder.\nActual log:\n" + log);
+ assertTrue(log.contains("Downstream failure details omitted due to permissions."),
+ "Hidden placeholder should explain why downstream details are missing.\nActual log:\n" + log);
+ assertFalse(log.contains("HIDDEN_BUILD_STEP_MARKER"),
+ "Hidden downstream logs must not be exposed.\nActual log:\n" + log);
+ }
+
+ @Test
+ void downstream_invisibleUpstreamCauseJob_skippedByVisibilityFilter(JenkinsRule jenkins) throws Exception {
+ Authentication viewer = configureReadAccess(jenkins, "viewer-upstream-cause");
+
+ FreeStyleProject parentJob = jenkins.createFreeStyleProject("visible-upstream-parent");
+ grantItemRead(jenkins, "viewer-upstream-cause", parentJob);
+ FreeStyleBuild parentRun = jenkins.buildAndAssertSuccess(parentJob);
+
+ FreeStyleProject hiddenSubJob = jenkins.createFreeStyleProject("hidden-upstream-sub");
+ hiddenSubJob.getBuildersList().add(new FailureBuilder());
+ FreeStyleBuild hiddenSubRun = jenkins.assertBuildStatus(Result.FAILURE,
+ hiddenSubJob.scheduleBuild2(0, new Cause.UpstreamCause(parentRun)));
+ assertNotNull(hiddenSubRun, "Hidden downstream build should be created");
+
+ String log;
+ try (ACLContext ignored = ACL.as2(viewer)) {
+ PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, viewer, true,
+ "hidden-upstream-sub");
+ log = String.join("\n", extractor.getFailedStepLog());
+ }
+
+ assertFalse(log.contains("### Downstream Job:"),
+ "UpstreamCause fallback should skip unreadable downstream jobs entirely.\nActual log:\n" + log);
+ }
+
+ private Authentication configureReadAccess(JenkinsRule jenkins, String username) {
+ jenkins.jenkins.setSecurityRealm(jenkins.createDummySecurityRealm());
+ MockAuthorizationStrategy strategy = new MockAuthorizationStrategy()
+ .grant(Jenkins.READ)
+ .everywhere()
+ .to(username);
+ jenkins.jenkins.setAuthorizationStrategy(strategy);
+ return User.getById(username, true).impersonate2();
+ }
+
+ private void grantItemRead(JenkinsRule jenkins, String username, Item item) {
+ MockAuthorizationStrategy strategy = (MockAuthorizationStrategy) jenkins.jenkins.getAuthorizationStrategy();
+ strategy.grant(Item.READ).onItems(item).to(username);
+ jenkins.jenkins.setAuthorizationStrategy(strategy);
+ }
}
From 8f7ce4471ff35fe3a6bc85286078862ad5a6e628 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 16:53:41 +0800
Subject: [PATCH 09/11] Remove unused downstream job matcher overload
- delete the unused private matchesDownstreamJob(Run, ?>) helper flagged by SpotBugs as UPM_UNCALLED_PRIVATE_METHOD\n- keep the string-based downstream job regex matcher as the single implementation path\n- no behavior changes; downstream filtering logic remains the same
---
.../jenkins/plugins/explain_error/PipelineLogExtractor.java | 4 ----
1 file changed, 4 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index 3390a0b8..088f081b 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -588,10 +588,6 @@ private boolean hasRemainingCapacity(List accumulated) {
return accumulated.size() < maxLines;
}
- private boolean matchesDownstreamJob(Run, ?> downstreamRun) {
- return matchesDownstreamJob(downstreamRun.getParent().getFullName());
- }
-
private boolean matchesDownstreamJob(String jobFullName) {
return downstreamJobPattern != null && downstreamJobPattern.matcher(jobFullName).matches();
}
From e0621c92e33d8be2f7b54296dbfbc19c4606a0e3 Mon Sep 17 00:00:00 2001
From: donghui <977675308@qq.com>
Date: Fri, 13 Mar 2026 17:46:38 +0800
Subject: [PATCH 10/11] Potential fix for pull request finding
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
---
.../provider/BaseAIProvider.java | 33 ++++++++++++++-----
1 file changed, 24 insertions(+), 9 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
index 700a5b3c..c735d97b 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java
@@ -127,11 +127,20 @@ The error logs may contain sections from downstream (sub-job) builds, clearly de
... (sub-job log lines, OR an "[AI explanation from sub-job]" block) ...
### END OF DOWNSTREAM JOB: ###
- The "Result:" line uses one of these values:
- - "FAILURE" — this sub-job genuinely failed and is the ROOT CAUSE of the overall failure.
+ The "Result:" line can use several values. Treat them as follows:
+ - "FAILURE" — this sub-job genuinely failed and is usually the ROOT CAUSE of the overall failure.
- "ABORTED (interrupted by fail-fast, not the root cause)" — this sub-job was still running
when a sibling branch failed; it was aborted automatically by parallelsAlwaysFailFast() or
parallel(failFast:true). It is NOT the root cause. Do NOT treat its logs as the primary error.
+ - "ABORTED" — this sub-job was aborted for other reasons (for example: manual abort, timeout,
+ upstream build abort, or infrastructure shutdown). It is not the special fail-fast case above.
+ Explain why it was aborted if the logs make that clear, but do NOT assume it is the main root
+ cause if other sub-jobs have Result: FAILURE.
+ - "UNSTABLE" — this sub-job completed but ended in an unstable state (for example: test failures
+ or quality gate issues). Treat this as an important problem to explain, especially if there is
+ no Result: FAILURE in any sub-job, but be clear that the build is unstable rather than failed.
+ - "UNAVAILABLE" — this sub-job's detailed logs or result are not accessible (for example: due to
+ permissions). You cannot analyze its internals; briefly note that its details are unavailable.
The log content of a downstream section may be either:
- Raw log lines from the sub-job's failing step, OR
@@ -142,11 +151,14 @@ Instead, incorporate its key findings (root cause, resolution steps) into your o
errorSummary and resolutionSteps for the parent job.
When downstream sections are present:
- - Identify WHICH sub-job(s) have Result: FAILURE — those are the root cause(s).
- - State their full name and build number explicitly in errorSummary.
- - Focus root-cause analysis and resolutionSteps on the FAILURE sections only.
- - Mention aborted sub-jobs briefly (e.g. "Job X was aborted due to fail-fast") but do NOT
- treat their logs as the source of the error.
+ - Identify WHICH sub-job(s) have Result: FAILURE — those are the primary root cause(s).
+ - If there are NO Result: FAILURE entries, look at Result: UNSTABLE or plain Result: ABORTED
+ sections to infer the most likely cause, and explain that clearly.
+ - State the full name and build number of important sub-jobs explicitly in errorSummary.
+ - Focus root-cause analysis and resolutionSteps on the FAILURE sections when they exist.
+ - Mention aborted sub-jobs briefly (for example: "Job X was aborted due to fail-fast" or
+ "Job Y was manually aborted after a timeout") but do NOT treat their logs as the primary
+ source of the error if a FAILURE section is present.
- If multiple sub-jobs have Result: FAILURE, summarize each one separately.
- Logs outside downstream sections belong to the parent (upstream) job.
""")
@@ -162,8 +174,11 @@ Instead, incorporate its key findings (root cause, resolution steps) into your o
Remember: Your ENTIRE response must be in {{language}}, including all field values.
If the logs contain "### Downstream Job: ..." sections:
- - Sub-jobs with Result: FAILURE are the ROOT CAUSE — identify them by name in errorSummary.
- - Sub-jobs with Result: ABORTED (interrupted by fail-fast, not the root cause) were killed by a sibling failure — do NOT treat them as the error source.
+ - Sub-jobs with Result: FAILURE are the primary ROOT CAUSE — identify them by name in errorSummary.
+ - Sub-jobs with Result: ABORTED (interrupted by fail-fast, not the root cause) were killed by a sibling failure via fail-fast — do NOT treat them as the error source.
+ - Sub-jobs with plain Result: ABORTED or Result: UNSTABLE indicate other types of problems (for example: manual aborts, timeouts, or test failures). Explain these issues,
+ especially when there is no Result: FAILURE, but clearly describe how they differ from a hard failure.
+ - Sub-jobs with Result: UNAVAILABLE cannot be analyzed in detail; briefly mention that their logs or results are not accessible.
If additional instructions were provided above, you MUST address them in your errorSummary or resolutionSteps.
""")
JenkinsLogAnalysis analyzeLogs(@V("errorLogs") String errorLogs, @V("language") String language, @V("customContext") String customContext);
From 947e7418462a7cef9662db54a54a92abe8257d40 Mon Sep 17 00:00:00 2001
From: donhui <977675308@qq.com>
Date: Fri, 13 Mar 2026 18:55:05 +0800
Subject: [PATCH 11/11] Tighten downstream section capacity handling
- truncate downstream headers, logs, and hidden placeholders to respect maxLines without overflowing the collected output
- only recurse into nested downstream builds when capacity remains, keeping downstream sections structurally bounded
- treat invalid downstream job regex values as a warning that disables collection instead of throwing from pattern compilation
- simplify PipelineLogExtractorTest by removing parent/build stubs that are no longer needed when downstream collection is opt-in
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
---
.../explain_error/PipelineLogExtractor.java | 64 ++++++++++++++++---
.../PipelineLogExtractorTest.java | 6 --
2 files changed, 54 insertions(+), 16 deletions(-)
diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
index 088f081b..f83841a6 100644
--- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
+++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java
@@ -417,7 +417,8 @@ private static Pattern compileDownstreamJobPattern(boolean collectDownstreamLogs
try {
return Pattern.compile(downstreamJobPattern);
} catch (PatternSyntaxException e) {
- throw new IllegalArgumentException("Invalid downstream job pattern: " + e.getMessage(), e);
+ LOGGER.log(Level.WARNING, "Invalid downstream job pattern \"{0}\". Downstream logs will not be collected.", downstreamJobPattern);
+ return null;
}
}
@@ -680,12 +681,44 @@ private void appendDownstreamRunLog(Run, ?> downstreamRun, List accumu
this.url = subExtractor.getUrl();
}
- accumulated.addAll(header);
- accumulated.addAll(subLog);
- accumulated.add("### END OF DOWNSTREAM JOB: " + jobFullName + " ###");
+ int remainingCapacity = maxLines - accumulated.size();
+ if (remainingCapacity <= 0) {
+ // No room left for this downstream section
+ return;
+ }
- // Recurse into sub-job's own downstream builds
- subExtractor.collectDownstreamLogs(accumulated, visitedRunIds);
+ // Append header, truncated if needed
+ if (header.size() > remainingCapacity) {
+ accumulated.addAll(header.subList(0, remainingCapacity));
+ // No room left for sub-log or footer
+ return;
+ } else {
+ accumulated.addAll(header);
+ remainingCapacity -= header.size();
+ }
+ // Reserve at least one line for footer if possible
+ final String endMarker = "### END OF DOWNSTREAM JOB: " + jobFullName + " ###";
+ if (remainingCapacity <= 0) {
+ // No space for sub-log or footer
+ return;
+ }
+ int spaceForSubLog = remainingCapacity - 1; // keep one line for footer
+ if (spaceForSubLog > 0) {
+ if (subLog.size() > spaceForSubLog) {
+ accumulated.addAll(subLog.subList(0, spaceForSubLog));
+ } else {
+ accumulated.addAll(subLog);
+ }
+ remainingCapacity = maxLines - accumulated.size();
+ }
+ // Append footer if there is still room
+ if (remainingCapacity > 0) {
+ accumulated.add(endMarker);
+ }
+ // Recurse into sub-job's own downstream builds only if capacity remains
+ if (maxLines - accumulated.size() > 0) {
+ subExtractor.collectDownstreamLogs(accumulated, visitedRunIds);
+ }
}
private boolean canReadDownstreamRun(Run, ?> downstreamRun) {
@@ -695,9 +728,20 @@ private boolean canReadDownstreamRun(Run, ?> downstreamRun) {
}
private void appendHiddenDownstreamPlaceholder(List accumulated) {
- accumulated.add("### Downstream Job: [hidden] ###");
- accumulated.add("Result: UNAVAILABLE");
- accumulated.add("Downstream failure details omitted due to permissions.");
- accumulated.add("### END OF DOWNSTREAM JOB: [hidden] ###");
+ int remainingCapacity = maxLines - accumulated.size();
+ if (remainingCapacity <= 0) {
+ return;
+ }
+ List placeholderLines = Arrays.asList(
+ "### Downstream Job: [hidden] ###",
+ "Result: UNAVAILABLE",
+ "Downstream failure details omitted due to permissions.",
+ "### END OF DOWNSTREAM JOB: [hidden] ###"
+ );
+ if (placeholderLines.size() <= remainingCapacity) {
+ accumulated.addAll(placeholderLines);
+ } else {
+ accumulated.addAll(placeholderLines.subList(0, remainingCapacity));
+ }
}
}
diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
index 0e5918cd..fa632473 100644
--- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
+++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java
@@ -73,12 +73,6 @@ void testNullFlowExecutionFallsBackToBuildLog(JenkinsRule jenkins) throws Except
when(mockRun.getLog(100)).thenReturn(List.of("Build started", "ERROR: Something failed"));
when(mockRun.getLogInputStream()).thenReturn(InputStream.nullInputStream());
when(mockRun.getUrl()).thenReturn("job/test/1/");
- // Stub getParent()/getNumber() so collectDownstreamLogs() can build the visitedRunIds
- // key and skip the current job when scanning Jenkins.getAllItems().
- WorkflowJob mockJob = mock(WorkflowJob.class);
- when(mockJob.getFullName()).thenReturn("test/mock-job");
- when(mockRun.getParent()).thenReturn(mockJob);
- when(mockRun.getNumber()).thenReturn(1);
PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100);