diff --git a/README.md b/README.md index fcfa842..bdcc8ce 100644 --- a/README.md +++ b/README.md @@ -240,6 +240,8 @@ post { | **logPattern** | Regex pattern to filter relevant log lines | `''` (no filtering) | | **language** | Language for the explanation | `'English'` | | **customContext** | Additional instructions or context for the AI. Overrides global custom context if specified. | Uses global configuration | +| **collectDownstreamLogs** | Whether to include logs from failed downstream jobs discovered via the `build` step or `Cause.UpstreamCause` | `false` | +| **downstreamJobPattern** | Regular expression matched against downstream job full names. Used only when downstream collection is enabled. | `''` (collect none) | ```groovy explainError( @@ -255,6 +257,18 @@ explainError( ) ``` +To include downstream failures, opt in explicitly and limit collection with a regex: + +```groovy +explainError( + collectDownstreamLogs: true, + downstreamJobPattern: 'team-folder/.*/deploy-.*' +) +``` + +This keeps the default behavior fast and predictable on large controllers. Only downstream jobs +whose full name matches `downstreamJobPattern` are scanned and included in the AI analysis. + Output appears in the sidebar of the failed job. ![Side Panel - AI Error Explanation](docs/images/side-panel.png) diff --git a/pom.xml b/pom.xml index 64efe37..4950008 100644 --- a/pom.xml +++ b/pom.xml @@ -111,6 +111,13 @@ true + + + org.jenkins-ci.plugins + pipeline-build-step + true + + io.jenkins.plugins ionicons-api diff --git a/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java b/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java index b2d8db7..9df4482 100644 --- a/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java +++ b/src/main/java/io/jenkins/plugins/explain_error/ConsoleExplainErrorAction.java @@ -4,6 +4,7 @@ import hudson.model.Result; import hudson.model.Run; import jenkins.model.RunAction2; +import jenkins.model.Jenkins; import java.io.IOException; import java.io.PrintWriter; import java.util.List; @@ -88,7 +89,8 @@ public void doExplainConsoleError(StaplerRequest2 req, StaplerResponse2 rsp) thr } // Fetch the last N lines of the log - PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines); + PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, Jenkins.getAuthentication2(), + false, null); List logLines = logExtractor.getFailedStepLog(); this.urlString = logExtractor.getUrl(); diff --git a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java index 844612d..0f1a328 100644 --- a/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java +++ b/src/main/java/io/jenkins/plugins/explain_error/ErrorExplainer.java @@ -9,16 +9,20 @@ import hudson.util.LogTaskListener; import io.jenkins.plugins.explain_error.provider.BaseAIProvider; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; +import org.springframework.security.core.Authentication; /** * Service class responsible for explaining errors using AI. */ public class ErrorExplainer { + static final String DOWNSTREAM_SECTION_START = "### Downstream Job: "; + static final String DOWNSTREAM_SECTION_END = "### END OF DOWNSTREAM JOB: "; private String providerName; private String urlString; @@ -30,14 +34,26 @@ public String getProviderName() { } public String explainError(Run run, TaskListener listener, String logPattern, int maxLines) { - return explainError(run, listener, logPattern, maxLines, null, null); + return explainError(run, listener, logPattern, maxLines, null, null, false, null, null); } public String explainError(Run run, TaskListener listener, String logPattern, int maxLines, String language) { - return explainError(run, listener, logPattern, maxLines, language, null); + return explainError(run, listener, logPattern, maxLines, language, null, false, null, null); } public String explainError(Run run, TaskListener listener, String logPattern, int maxLines, String language, String customContext) { + return explainError(run, listener, logPattern, maxLines, language, customContext, false, null, null); + } + + public String explainError(Run run, TaskListener listener, String logPattern, int maxLines, String language, + String customContext, boolean collectDownstreamLogs, String downstreamJobPattern) { + return explainError(run, listener, logPattern, maxLines, language, customContext, + collectDownstreamLogs, downstreamJobPattern, null); + } + + String explainError(Run run, TaskListener listener, String logPattern, int maxLines, String language, + String customContext, boolean collectDownstreamLogs, String downstreamJobPattern, + Authentication authentication) { String jobInfo = run != null ? ("[" + run.getParent().getFullName() + " #" + run.getNumber() + "]") : "[unknown]"; try { // Check if explanation is enabled (folder-level or global) @@ -54,7 +70,8 @@ public String explainError(Run run, TaskListener listener, String logPatte } // Extract error logs - String errorLogs = extractErrorLogs(run, logPattern, maxLines); + String errorLogs = extractErrorLogs(run, logPattern, maxLines, collectDownstreamLogs, + downstreamJobPattern, authentication); // Use step-level customContext if provided, otherwise fallback to global String effectiveCustomContext = StringUtils.isNotBlank(customContext) ? customContext : GlobalConfigurationImpl.get().getCustomContext(); @@ -83,26 +100,49 @@ public String explainError(Run run, TaskListener listener, String logPatte } } - private String extractErrorLogs(Run run, String logPattern, int maxLines) throws IOException { - PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines); + private String extractErrorLogs(Run run, String logPattern, int maxLines, + boolean collectDownstreamLogs, String downstreamJobPattern, + Authentication authentication) throws IOException { + PipelineLogExtractor logExtractor = new PipelineLogExtractor(run, maxLines, authentication, + collectDownstreamLogs, downstreamJobPattern); List logLines = logExtractor.getFailedStepLog(); this.urlString = logExtractor.getUrl(); + return filterErrorLogs(logLines, logPattern); + } + + String filterErrorLogs(List logLines, String logPattern) { if (StringUtils.isBlank(logPattern)) { - // Return last few lines if no pattern specified return String.join("\n", logLines); } Pattern pattern = Pattern.compile(logPattern, Pattern.CASE_INSENSITIVE); - StringBuilder errorLogs = new StringBuilder(); + List filteredLines = new ArrayList<>(); + boolean inDownstreamSection = false; for (String line : logLines) { - if (pattern.matcher(line).find()) { - errorLogs.append(line).append("\n"); + if (isDownstreamSectionStart(line)) { + inDownstreamSection = true; + } + + if (inDownstreamSection || pattern.matcher(line).find()) { + filteredLines.add(line); + } + + if (inDownstreamSection && isDownstreamSectionEnd(line)) { + inDownstreamSection = false; } } - return errorLogs.toString(); + return String.join("\n", filteredLines); + } + + private boolean isDownstreamSectionStart(String line) { + return line != null && line.startsWith(DOWNSTREAM_SECTION_START); + } + + private boolean isDownstreamSectionEnd(String line) { + return line != null && line.startsWith(DOWNSTREAM_SECTION_END); } /** diff --git a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java index 95f4dbe..6baaaba 100644 --- a/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java +++ b/src/main/java/io/jenkins/plugins/explain_error/ExplainErrorStep.java @@ -4,6 +4,7 @@ import hudson.model.Run; import hudson.model.TaskListener; import java.util.Set; +import jenkins.model.Jenkins; import org.jenkinsci.plugins.workflow.steps.Step; import org.jenkinsci.plugins.workflow.steps.StepContext; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; @@ -21,12 +22,17 @@ public class ExplainErrorStep extends Step { private int maxLines; private String language; private String customContext; + private boolean collectDownstreamLogs; + private String downstreamJobPattern; @DataBoundConstructor public ExplainErrorStep() { this.logPattern = ""; this.maxLines = 100; this.language = ""; + this.customContext = ""; + this.collectDownstreamLogs = false; + this.downstreamJobPattern = ""; } public String getLogPattern() { @@ -65,6 +71,24 @@ public void setCustomContext(String customContext) { this.customContext = customContext != null ? customContext : ""; } + public boolean isCollectDownstreamLogs() { + return collectDownstreamLogs; + } + + @DataBoundSetter + public void setCollectDownstreamLogs(boolean collectDownstreamLogs) { + this.collectDownstreamLogs = collectDownstreamLogs; + } + + public String getDownstreamJobPattern() { + return downstreamJobPattern; + } + + @DataBoundSetter + public void setDownstreamJobPattern(String downstreamJobPattern) { + this.downstreamJobPattern = downstreamJobPattern != null ? downstreamJobPattern : ""; + } + @Override public StepExecution start(StepContext context) throws Exception { return new ExplainErrorStepExecution(context, this); @@ -105,7 +129,9 @@ protected String run() throws Exception { TaskListener listener = getContext().get(TaskListener.class); ErrorExplainer explainer = new ErrorExplainer(); - String explanation = explainer.explainError(run, listener, step.getLogPattern(), step.getMaxLines(), step.getLanguage(), step.getCustomContext()); + String explanation = explainer.explainError(run, listener, step.getLogPattern(), step.getMaxLines(), + step.getLanguage(), step.getCustomContext(), step.isCollectDownstreamLogs(), + step.getDownstreamJobPattern(), Jenkins.getAuthentication2()); return explanation; } diff --git a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java index c891d59..f83841a 100644 --- a/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java +++ b/src/main/java/io/jenkins/plugins/explain_error/PipelineLogExtractor.java @@ -1,5 +1,7 @@ package io.jenkins.plugins.explain_error; +import com.google.common.annotations.VisibleForTesting; +import hudson.model.Item; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.jenkinsci.plugins.workflow.flow.FlowExecution; @@ -12,10 +14,15 @@ import hudson.console.AnnotatedLargeText; import hudson.console.ConsoleNote; +import hudson.model.Cause; import hudson.model.Result; import hudson.model.Run; +import hudson.security.ACL; +import hudson.security.ACLContext; +import jenkins.model.CauseOfInterruption; +import jenkins.model.InterruptedBuildAction; import jenkins.model.Jenkins; - +import org.springframework.security.core.Authentication; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; @@ -31,8 +38,10 @@ import java.util.List; import java.util.Queue; import java.util.Set; +import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; /** * Utility for extracting log lines related to a failing build or pipeline step @@ -66,10 +75,20 @@ public class PipelineLogExtractor { /** Lines of context to include before and after each error-pattern match. */ private static final int ERROR_CONTEXT_LINES = 5; + /** Maximum recursion depth when following downstream (sub-job) failures. */ + private static final int MAX_DOWNSTREAM_DEPTH = 5; + + /** Hard cap for recent builds scanned per job when falling back to UpstreamCause lookup. */ + private static final int MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB = 100; + private boolean isGraphViewPluginAvailable = false; private transient String url; private transient Run run; private int maxLines; + private int downstreamDepth; + private final boolean collectDownstreamLogs; + private final Pattern downstreamJobPattern; + private final Authentication authentication; /** * Reads the provided log text and returns at most the last {@code maxLines} lines. @@ -297,12 +316,20 @@ public List getFailedStepLog() throws IOException { if (!accumulated.isEmpty()) { setUrl(primaryNodeId != null ? primaryNodeId : "0"); - return accumulated; + } else { + // Final fallback: last N lines of the full build console log + setUrl("0"); + accumulated.addAll(run.getLog(maxLines)); } - // Final fallback: last N lines of the full build console log - setUrl("0"); - return run.getLog(maxLines); + // Collect logs from failed downstream (sub-job) builds, recursively + if (collectDownstreamLogs && downstreamDepth == 0) { + Set visitedRunIds = new HashSet<>(); + visitedRunIds.add(run.getParent().getFullName() + "#" + run.getNumber()); + collectDownstreamLogs(accumulated, visitedRunIds); + } + + return accumulated; } /** @@ -346,11 +373,375 @@ public String getUrl() { } public PipelineLogExtractor(Run run, int maxLines) + { + this(run, maxLines, Jenkins.getAuthentication2(), false, null); + } + + public PipelineLogExtractor(Run run, int maxLines, boolean collectDownstreamLogs, String downstreamJobPattern) + { + this(run, maxLines, Jenkins.getAuthentication2(), collectDownstreamLogs, downstreamJobPattern); + } + + PipelineLogExtractor(Run run, int maxLines, Authentication authentication, + boolean collectDownstreamLogs, String downstreamJobPattern) + { + this(run, maxLines, 0, authentication, collectDownstreamLogs, + compileDownstreamJobPattern(collectDownstreamLogs, downstreamJobPattern)); + } + + @VisibleForTesting + PipelineLogExtractor(Run run, int maxLines, int downstreamDepth) + { + this(run, maxLines, downstreamDepth, Jenkins.getAuthentication2(), true, Pattern.compile(".*")); + } + + private PipelineLogExtractor(Run run, int maxLines, int downstreamDepth, Authentication authentication, + boolean collectDownstreamLogs, + Pattern downstreamJobPattern) { this.run = run; this.maxLines = maxLines; + this.downstreamDepth = downstreamDepth; + this.collectDownstreamLogs = collectDownstreamLogs; + this.downstreamJobPattern = downstreamJobPattern; + this.authentication = authentication != null ? authentication : Jenkins.getAuthentication2(); if (Jenkins.get().getPlugin("pipeline-graph-view") != null) { isGraphViewPluginAvailable = true; } } + + private static Pattern compileDownstreamJobPattern(boolean collectDownstreamLogs, String downstreamJobPattern) { + if (!collectDownstreamLogs || downstreamJobPattern == null || downstreamJobPattern.isBlank()) { + return null; + } + try { + return Pattern.compile(downstreamJobPattern); + } catch (PatternSyntaxException e) { + LOGGER.log(Level.WARNING, "Invalid downstream job pattern \"{0}\". Downstream logs will not be collected.", downstreamJobPattern); + return null; + } + } + + /** + * Collects error logs from failed downstream (sub-job) builds triggered by this run. + *

+ * Supports two discovery mechanisms: + *

    + *
  1. DownstreamBuildAction (pipeline-build-step plugin): reads the + * {@link org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction} + * attached to the current run to find builds triggered by the {@code build} step.
  2. + *
  3. Cause.UpstreamCause: scans all jobs in Jenkins for builds whose + * {@link Cause.UpstreamCause} points back to this run. This covers cases where + * the pipeline-build-step plugin is not installed.
  4. + *
+ * Recursion is bounded by {@link #MAX_DOWNSTREAM_DEPTH} to prevent infinite loops. + * + * @param accumulated the list to append downstream log lines into + * @param visitedRunIds set of already-visited run IDs (job full name + "#" + build number) + * used to prevent duplicate processing across recursive calls + */ + void collectDownstreamLogs(List accumulated, Set visitedRunIds) { + boolean foundViaDownstreamBuildAction = false; + String runId = run.getParent().getFullName() + "#" + run.getNumber(); + if (!collectDownstreamLogs || downstreamJobPattern == null + || downstreamDepth >= MAX_DOWNSTREAM_DEPTH || !hasRemainingCapacity(accumulated)) { + return; + } + + // Strategy A: DownstreamBuildAction (pipeline-build-step plugin) + if (Jenkins.get().getPlugin("pipeline-build-step") != null) { + try { + foundViaDownstreamBuildAction = collectViaDownstreamBuildAction(accumulated, visitedRunIds); + } catch (Exception e) { + LOGGER.log(Level.WARNING, + "Failed to collect downstream logs via DownstreamBuildAction for " + runId, e); + } + } + + if (foundViaDownstreamBuildAction || !hasRemainingCapacity(accumulated)) { + return; + } + + // Strategy B: Cause.UpstreamCause — scan builds that list this run as upstream + try { + collectViaUpstreamCause(accumulated, visitedRunIds); + } catch (Exception e) { + LOGGER.log(Level.WARNING, + "Failed to collect downstream logs via UpstreamCause for " + runId, e); + } + } + + /** + * Discovers failed downstream builds via + * {@link org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction} + * and appends their logs to {@code accumulated}. + */ + private boolean collectViaDownstreamBuildAction(List accumulated, Set visitedRunIds) throws IOException { + org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction action = + run.getAction(org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction.class); + if (action == null) { + return false; + } + boolean foundMatchingDownstream = false; + for (org.jenkinsci.plugins.workflow.support.steps.build.DownstreamBuildAction.DownstreamBuild db : action.getDownstreamBuilds()) { + if (!hasRemainingCapacity(accumulated)) { + return foundMatchingDownstream; + } + String jobFullName = db.getJobFullName(); + if (!matchesDownstreamJob(jobFullName)) { + continue; + } + Run downstreamRun; + try (ACLContext ignored = ACL.as2(ACL.SYSTEM2)) { + downstreamRun = db.getBuild(); + } + if (downstreamRun == null) { + continue; + } + foundMatchingDownstream = true; + appendDownstreamRunLog(downstreamRun, accumulated, visitedRunIds); + } + return foundMatchingDownstream; + } + + /** + * Discovers failed downstream builds by scanning all jobs for builds whose + * {@link Cause.UpstreamCause} points to this run, and appends their logs to + * {@code accumulated}. + */ + private void collectViaUpstreamCause(List accumulated, Set visitedRunIds) throws IOException { + String thisJobName = run.getParent().getFullName(); + int thisBuildNumber = run.getNumber(); + long thisBuildStartTime = run.getTimeInMillis(); + + try (ACLContext ignored = ACL.as2(authentication)) { + for (hudson.model.Job job : Jenkins.get().getAllItems(hudson.model.Job.class)) { + if (!hasRemainingCapacity(accumulated)) { + return; + } + if (!job.hasPermission(Item.READ) || !matchesDownstreamJob(job.getFullName())) { + continue; + } + // Skip the current job itself + if (job.getFullName().equals(thisJobName)) { + continue; + } + Run lastBuild = job.getLastBuild(); + if (lastBuild == null) { + continue; + } + int scannedCandidates = 0; + // Walk recent builds of this job to find ones triggered by our run + for (Run candidate = lastBuild; candidate != null; candidate = candidate.getPreviousBuild()) { + if (!hasRemainingCapacity(accumulated) || scannedCandidates >= MAX_UPSTREAM_CAUSE_CANDIDATES_PER_JOB) { + break; + } + scannedCandidates++; + // Only look at builds that could have been triggered by our run + if (candidate.getTimeInMillis() < thisBuildStartTime) { + break; + } + for (Cause cause : candidate.getCauses()) { + if (cause instanceof Cause.UpstreamCause) { + Cause.UpstreamCause upstreamCause = (Cause.UpstreamCause) cause; + if (upstreamCause.getUpstreamProject().equals(thisJobName) + && upstreamCause.getUpstreamBuild() == thisBuildNumber) { + appendDownstreamRunLog(candidate, accumulated, visitedRunIds); + break; + } + } + } + } + } + } + } + + /** + * Returns {@code true} if the given run was aborted because a sibling branch triggered + * a fail-fast interruption (e.g. via {@code parallelsAlwaysFailFast()} or + * {@code parallel(failFast: true, ...)}). + *

+ * Jenkins records the interruption cause in an {@link InterruptedBuildAction} attached to + * the run. When the cause is a fail-fast signal, its + * {@link CauseOfInterruption#getShortDescription()} contains the phrase "fail fast" + * (case-insensitive). This distinguishes a sibling-aborted run from a run that was + * independently aborted by a user or another mechanism. + * + * @param run the build to inspect + * @return {@code true} if the build was interrupted by a fail-fast signal + */ + boolean isAbortedByFailFast(Run run) { + if (run.getResult() != Result.ABORTED) { + return false; + } + for (InterruptedBuildAction action : run.getActions(InterruptedBuildAction.class)) { + for (CauseOfInterruption cause : action.getCauses()) { + String desc = cause.getShortDescription(); + if (desc != null && desc.toLowerCase(java.util.Locale.ROOT).contains("fail fast")) { + return true; + } + } + } + return false; + } + + private boolean hasRemainingCapacity(List accumulated) { + return accumulated.size() < maxLines; + } + + private boolean matchesDownstreamJob(String jobFullName) { + return downstreamJobPattern != null && downstreamJobPattern.matcher(jobFullName).matches(); + } + + /** + * Appends the error content of a single downstream run to {@code accumulated}, + * then recurses into its own downstream builds. + *

+ * Fast path — reuse existing AI explanation: if the downstream run already has an + * {@link ErrorExplanationAction} (i.e. the sub-job called {@code explainError()} itself), + * its pre-computed explanation text is used directly. This avoids a redundant AI call and + * preserves the full context that was available when the sub-job ran. + *

+ * Slow path — extract raw logs: when no {@link ErrorExplanationAction} is present, + * a {@link PipelineLogExtractor} is created for the downstream run and its log lines are + * appended as before. + *

+ * Builds that were aborted by a fail-fast signal from a sibling branch are labelled + * {@code ABORTED (interrupted by fail-fast, not the root cause)} in the section header + * so that the AI can distinguish them from the build that actually caused the failure. + * + * @param downstreamRun the downstream build to extract content from + * @param accumulated the list to append content lines into + * @param visitedRunIds set of already-visited run IDs to prevent duplicates + */ + private void appendDownstreamRunLog(Run downstreamRun, List accumulated, + Set visitedRunIds) throws IOException { + Result downstreamResult = downstreamRun.getResult(); + if (downstreamResult == null || !downstreamResult.isWorseThan(Result.SUCCESS)) { + return; + } + String jobFullName = downstreamRun.getParent().getFullName(); + int buildNumber = downstreamRun.getNumber(); + String runId = jobFullName + "#" + buildNumber; + if (!visitedRunIds.add(runId)) { + return; // already processed + } + int remaining = this.maxLines - accumulated.size(); + if (remaining <= 0) { + return; + } + if (!canReadDownstreamRun(downstreamRun)) { + appendHiddenDownstreamPlaceholder(accumulated); + return; + } + + boolean failFastAborted = isAbortedByFailFast(downstreamRun); + String resultLabel = failFastAborted + ? "ABORTED (interrupted by fail-fast, not the root cause)" + : String.valueOf(downstreamResult); + + List header = Arrays.asList( + "### Downstream Job: " + jobFullName + " #" + buildNumber + " ###", + "Result: " + resultLabel, + "--- LOG CONTENT ---" + ); + + String runUrl = run.getUrl(); + + // Fast path: sub-job already has an AI explanation — reuse it directly. + ErrorExplanationAction existingExplanation = downstreamRun.getAction(ErrorExplanationAction.class); + if (existingExplanation != null && existingExplanation.hasValidExplanation()) { + // Redirect "View failure output" to the sub-job's own explanation URL when available. + if (!failFastAborted && existingExplanation.getUrlString() != null && this.url != null + && runUrl != null && this.url.contains(runUrl)) { + this.url = existingExplanation.getUrlString(); + } + accumulated.addAll(header); + accumulated.add("[AI explanation from sub-job]"); + accumulated.addAll(Arrays.asList(existingExplanation.getExplanation().split("\n", -1))); + accumulated.add("### END OF DOWNSTREAM JOB: " + jobFullName + " ###"); + // No need to recurse further — the sub-job's explanation already covers its own + // downstream failures (it was produced with full context at the time of the failure). + return; + } + + // Slow path: no existing explanation — extract raw logs as before. + PipelineLogExtractor subExtractor = new PipelineLogExtractor(downstreamRun, remaining, downstreamDepth + 1, + authentication, collectDownstreamLogs, downstreamJobPattern); + List subLog = subExtractor.getFailedStepLog(); + if (subLog == null || subLog.isEmpty()) { + return; + } + + // If this sub-job genuinely failed (not just aborted by fail-fast) and the parent + // URL still points to the parent job (i.e. no prior real sub-job failure has already + // claimed the URL), redirect "View failure output" to the sub-job's failing node. + if (!failFastAborted && subExtractor.getUrl() != null && this.url != null + && runUrl != null && this.url.contains(runUrl)) { + this.url = subExtractor.getUrl(); + } + + int remainingCapacity = maxLines - accumulated.size(); + if (remainingCapacity <= 0) { + // No room left for this downstream section + return; + } + + // Append header, truncated if needed + if (header.size() > remainingCapacity) { + accumulated.addAll(header.subList(0, remainingCapacity)); + // No room left for sub-log or footer + return; + } else { + accumulated.addAll(header); + remainingCapacity -= header.size(); + } + // Reserve at least one line for footer if possible + final String endMarker = "### END OF DOWNSTREAM JOB: " + jobFullName + " ###"; + if (remainingCapacity <= 0) { + // No space for sub-log or footer + return; + } + int spaceForSubLog = remainingCapacity - 1; // keep one line for footer + if (spaceForSubLog > 0) { + if (subLog.size() > spaceForSubLog) { + accumulated.addAll(subLog.subList(0, spaceForSubLog)); + } else { + accumulated.addAll(subLog); + } + remainingCapacity = maxLines - accumulated.size(); + } + // Append footer if there is still room + if (remainingCapacity > 0) { + accumulated.add(endMarker); + } + // Recurse into sub-job's own downstream builds only if capacity remains + if (maxLines - accumulated.size() > 0) { + subExtractor.collectDownstreamLogs(accumulated, visitedRunIds); + } + } + + private boolean canReadDownstreamRun(Run downstreamRun) { + try (ACLContext ignored = ACL.as2(authentication)) { + return downstreamRun.getParent().hasPermission(Item.READ); + } + } + + private void appendHiddenDownstreamPlaceholder(List accumulated) { + int remainingCapacity = maxLines - accumulated.size(); + if (remainingCapacity <= 0) { + return; + } + List placeholderLines = Arrays.asList( + "### Downstream Job: [hidden] ###", + "Result: UNAVAILABLE", + "Downstream failure details omitted due to permissions.", + "### END OF DOWNSTREAM JOB: [hidden] ###" + ); + if (placeholderLines.size() <= remainingCapacity) { + accumulated.addAll(placeholderLines); + } else { + accumulated.addAll(placeholderLines.subList(0, remainingCapacity)); + } + } } diff --git a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java index e2a2e45..c735d97 100644 --- a/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java +++ b/src/main/java/io/jenkins/plugins/explain_error/provider/BaseAIProvider.java @@ -119,18 +119,66 @@ public interface Assistant { You MUST follow ALL instructions provided by the user, including any additional context or requirements. When additional instructions are provided, you MUST incorporate them into your analysis fields, especially in errorSummary and resolutionSteps. + + The error logs may contain sections from downstream (sub-job) builds, clearly delimited like this: + ### Downstream Job: # ### + Result: + --- LOG CONTENT --- + ... (sub-job log lines, OR an "[AI explanation from sub-job]" block) ... + ### END OF DOWNSTREAM JOB: ### + + The "Result:" line can use several values. Treat them as follows: + - "FAILURE" — this sub-job genuinely failed and is usually the ROOT CAUSE of the overall failure. + - "ABORTED (interrupted by fail-fast, not the root cause)" — this sub-job was still running + when a sibling branch failed; it was aborted automatically by parallelsAlwaysFailFast() or + parallel(failFast:true). It is NOT the root cause. Do NOT treat its logs as the primary error. + - "ABORTED" — this sub-job was aborted for other reasons (for example: manual abort, timeout, + upstream build abort, or infrastructure shutdown). It is not the special fail-fast case above. + Explain why it was aborted if the logs make that clear, but do NOT assume it is the main root + cause if other sub-jobs have Result: FAILURE. + - "UNSTABLE" — this sub-job completed but ended in an unstable state (for example: test failures + or quality gate issues). Treat this as an important problem to explain, especially if there is + no Result: FAILURE in any sub-job, but be clear that the build is unstable rather than failed. + - "UNAVAILABLE" — this sub-job's detailed logs or result are not accessible (for example: due to + permissions). You cannot analyze its internals; briefly note that its details are unavailable. + + The log content of a downstream section may be either: + - Raw log lines from the sub-job's failing step, OR + - An "[AI explanation from sub-job]" block: a pre-computed AI analysis produced by the + sub-job itself when it called explainError(). Treat this block as a high-quality, + already-analysed summary of the sub-job's failure — do NOT re-analyse it from scratch. + Instead, incorporate its key findings (root cause, resolution steps) into your own + errorSummary and resolutionSteps for the parent job. + + When downstream sections are present: + - Identify WHICH sub-job(s) have Result: FAILURE — those are the primary root cause(s). + - If there are NO Result: FAILURE entries, look at Result: UNSTABLE or plain Result: ABORTED + sections to infer the most likely cause, and explain that clearly. + - State the full name and build number of important sub-jobs explicitly in errorSummary. + - Focus root-cause analysis and resolutionSteps on the FAILURE sections when they exist. + - Mention aborted sub-jobs briefly (for example: "Job X was aborted due to fail-fast" or + "Job Y was manually aborted after a timeout") but do NOT treat their logs as the primary + source of the error if a FAILURE section is present. + - If multiple sub-jobs have Result: FAILURE, summarize each one separately. + - Logs outside downstream sections belong to the parent (upstream) job. """) @UserMessage(""" Analyze the following Jenkins build error logs and provide a clear, actionable explanation. - + CRITICAL: You MUST respond ONLY in {{language}}. ALL text in your response must be in {{language}}. This includes: error summaries, resolution steps, best practices, and any other text. {{customContext}} - + ERROR LOGS: {{errorLogs}} - + Remember: Your ENTIRE response must be in {{language}}, including all field values. + If the logs contain "### Downstream Job: ..." sections: + - Sub-jobs with Result: FAILURE are the primary ROOT CAUSE — identify them by name in errorSummary. + - Sub-jobs with Result: ABORTED (interrupted by fail-fast, not the root cause) were killed by a sibling failure via fail-fast — do NOT treat them as the error source. + - Sub-jobs with plain Result: ABORTED or Result: UNSTABLE indicate other types of problems (for example: manual aborts, timeouts, or test failures). Explain these issues, + especially when there is no Result: FAILURE, but clearly describe how they differ from a hard failure. + - Sub-jobs with Result: UNAVAILABLE cannot be analyzed in detail; briefly mention that their logs or results are not accessible. If additional instructions were provided above, you MUST address them in your errorSummary or resolutionSteps. """) JenkinsLogAnalysis analyzeLogs(@V("errorLogs") String errorLogs, @V("language") String language, @V("customContext") String customContext); diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly index fdbd45d..f54552b 100644 --- a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly +++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/config.jelly @@ -19,4 +19,14 @@ description="Additional instructions or context for the AI. Overrides the global custom context if specified. Leave empty to use global configuration."> + + + + + + + + diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html new file mode 100644 index 0000000..7048aee --- /dev/null +++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-collectDownstreamLogs.html @@ -0,0 +1,8 @@ +Include logs from failed downstream jobs triggered by this build. +
+
+Downstream discovery is disabled by default to avoid expensive scans on large Jenkins controllers. +Enable this only when you want the AI analysis to include sub-job failures. +
+
+When enabled, use Downstream Job Pattern to restrict which downstream jobs are collected. diff --git a/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html new file mode 100644 index 0000000..e520de1 --- /dev/null +++ b/src/main/resources/io/jenkins/plugins/explain_error/ExplainErrorStep/help-downstreamJobPattern.html @@ -0,0 +1,14 @@ +Regular expression matched against downstream job full names. +
+
+This field is used only when Collect Downstream Logs is enabled. +Jobs that do not match the pattern are skipped. +
+
+Examples: +

    +
  • team-folder/.*/deploy-.* matches nested deploy jobs inside team-folder
  • +
  • release-pipeline|hotfix-pipeline matches either job name exactly
  • +
  • .* matches all downstream jobs (use with caution)
  • +
+Leave empty to collect no downstream jobs. diff --git a/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java b/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java index 89e9a34..4ebfe8b 100644 --- a/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java +++ b/src/test/java/io/jenkins/plugins/explain_error/ErrorExplainerTest.java @@ -2,7 +2,9 @@ import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.assertThrows; import com.cloudbees.hudson.plugins.folder.Folder; @@ -19,6 +21,54 @@ @WithJenkins class ErrorExplainerTest { + @Test + void filterErrorLogs_preservesEntireDownstreamSectionWhenPatternIsUsed() { + ErrorExplainer errorExplainer = new ErrorExplainer(); + + String filtered = errorExplainer.filterErrorLogs(java.util.List.of( + "parent info line", + "ERROR: upstream failed", + "### Downstream Job: team-folder/sub-job #12 ###", + "Result: FAILURE", + "--- LOG CONTENT ---", + "[AI explanation from sub-job]", + "Root cause: dependency mismatch", + "### END OF DOWNSTREAM JOB: team-folder/sub-job ###", + "non matching tail" + ), "ERROR"); + + assertTrue(filtered.contains("ERROR: upstream failed")); + assertTrue(filtered.contains("### Downstream Job: team-folder/sub-job #12 ###")); + assertTrue(filtered.contains("[AI explanation from sub-job]")); + assertTrue(filtered.contains("Root cause: dependency mismatch")); + assertTrue(filtered.contains("### END OF DOWNSTREAM JOB: team-folder/sub-job ###")); + assertFalse(filtered.contains("parent info line")); + assertFalse(filtered.contains("non matching tail")); + } + + @Test + void filterErrorLogs_keepsOnlyMatchingUpstreamLinesOutsideDownstreamSections() { + ErrorExplainer errorExplainer = new ErrorExplainer(); + + String filtered = errorExplainer.filterErrorLogs(java.util.List.of( + "upstream info", + "Exception: upstream failure", + "upstream debug", + "### Downstream Job: team-folder/sub-job #9 ###", + "Result: FAILURE", + "--- LOG CONTENT ---", + "sub-job debug line", + "### END OF DOWNSTREAM JOB: team-folder/sub-job ###", + "upstream trailing info" + ), "Exception"); + + assertFalse(filtered.contains("upstream info")); + assertTrue(filtered.contains("Exception: upstream failure")); + assertFalse(filtered.contains("upstream debug")); + assertTrue(filtered.contains("sub-job debug line")); + assertFalse(filtered.contains("upstream trailing info")); + } + @Test void testErrorExplainerBasicFunctionality(JenkinsRule jenkins) throws Exception { ErrorExplainer errorExplainer = new ErrorExplainer(); diff --git a/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java b/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java index eb7c53c..8a9c8f8 100644 --- a/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java +++ b/src/test/java/io/jenkins/plugins/explain_error/ExplainErrorStepConfigTest.java @@ -23,7 +23,8 @@ void testExplainErrorStepWithParameters(JenkinsRule jenkins) throws Exception { " echo 'This is a test build'\n" + " echo 'ERROR: Something went wrong'\n" + " echo 'FAILED: Build failed'\n" + - " explainError logPattern: 'ERROR|FAILED', maxLines: 50\n" + + " explainError logPattern: 'ERROR|FAILED', maxLines: 50, " + + "collectDownstreamLogs: true, downstreamJobPattern: 'team/.+'\n" + "}"; job.setDefinition(new CpsFlowDefinition(pipelineScript, true)); diff --git a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java index 8b0174a..fa63247 100644 --- a/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java +++ b/src/test/java/io/jenkins/plugins/explain_error/PipelineLogExtractorTest.java @@ -13,22 +13,38 @@ import static org.mockito.Mockito.when; import hudson.console.AnnotatedLargeText; +import hudson.model.Cause; import hudson.model.FreeStyleBuild; import hudson.model.FreeStyleProject; +import hudson.model.Item; +import hudson.model.Result; +import hudson.model.Run; +import hudson.model.User; +import hudson.security.ACL; +import hudson.security.ACLContext; import io.jenkins.plugins.explain_error.provider.TestProvider; import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import jenkins.model.CauseOfInterruption; +import jenkins.model.InterruptedBuildAction; +import jenkins.model.Jenkins; import org.jenkinsci.plugins.workflow.actions.LogAction; +import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.flow.FlowExecution; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import java.util.List; -import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.DisabledOnOs; import org.junit.jupiter.api.condition.OS; +import org.jvnet.hudson.test.FailureBuilder; import org.jvnet.hudson.test.JenkinsRule; +import org.jvnet.hudson.test.MockAuthorizationStrategy; import org.jvnet.hudson.test.junit.jupiter.WithJenkins; +import org.springframework.security.core.Authentication; /** * Integration tests for {@link PipelineLogExtractor}. @@ -403,4 +419,455 @@ void strategy1_originWithoutLogAction_logNullBranchCovered(JenkinsRule jenkins) assertNotNull(lines); assertFalse(lines.isEmpty(), "Strategy 3 should find the echo output from the console log"); } + + // ------------------------------------------------------------------------- + // isAbortedByFailFast unit tests + // ------------------------------------------------------------------------- + + /** + * isAbortedByFailFast — non-ABORTED result returns false immediately. + * A FAILURE build should never be considered a fail-fast abort regardless of + * any InterruptedBuildAction that might be attached. + */ + @Test + void isAbortedByFailFast_nonAbortedResult_returnsFalse(JenkinsRule jenkins) { + @SuppressWarnings("unchecked") + Run mockRun = mock(Run.class); + when(mockRun.getResult()).thenReturn(Result.FAILURE); + + PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100); + assertFalse(extractor.isAbortedByFailFast(mockRun), + "A FAILURE build must not be treated as a fail-fast abort"); + } + + /** + * isAbortedByFailFast — ABORTED build with a cause whose description contains + * "fail fast" (case-insensitive) returns true. + */ + @Test + void isAbortedByFailFast_abortedWithFailFastCause_returnsTrue(JenkinsRule jenkins) { + @SuppressWarnings("unchecked") + Run mockRun = mock(Run.class); + when(mockRun.getResult()).thenReturn(Result.ABORTED); + + CauseOfInterruption failFastCause = mock(CauseOfInterruption.class); + when(failFastCause.getShortDescription()).thenReturn("Fail Fast: sibling branch failed"); + + InterruptedBuildAction action = new InterruptedBuildAction(List.of(failFastCause)); + when(mockRun.getActions(InterruptedBuildAction.class)).thenReturn(List.of(action)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100); + assertTrue(extractor.isAbortedByFailFast(mockRun), + "An ABORTED build with a 'fail fast' cause description must return true"); + } + + /** + * isAbortedByFailFast — ABORTED build whose InterruptedBuildAction cause description + * does NOT contain "fail fast" returns false (e.g. a manual user abort). + */ + @Test + void isAbortedByFailFast_abortedWithNonFailFastCause_returnsFalse(JenkinsRule jenkins) { + @SuppressWarnings("unchecked") + Run mockRun = mock(Run.class); + when(mockRun.getResult()).thenReturn(Result.ABORTED); + + CauseOfInterruption userCause = mock(CauseOfInterruption.class); + when(userCause.getShortDescription()).thenReturn("Aborted by user admin"); + + InterruptedBuildAction action = new InterruptedBuildAction(List.of(userCause)); + when(mockRun.getActions(InterruptedBuildAction.class)).thenReturn(List.of(action)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(mockRun, 100); + assertFalse(extractor.isAbortedByFailFast(mockRun), + "A user-aborted build must not be treated as a fail-fast abort"); + } + + // ------------------------------------------------------------------------- + // Downstream sub-job integration tests + // ------------------------------------------------------------------------- + + /** + * Downstream collection is opt-in: when the parent triggers a failing sub-job but + * downstream collection is not explicitly enabled, the sub-job's log must not appear. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_defaultOff_subJobFailureNotIncluded(JenkinsRule jenkins) throws Exception { + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-default-off"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"DEFAULT_OFF_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-default-off"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-job-default-off', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + assertFalse(log.contains("DEFAULT_OFF_MARKER"), + "Downstream logs must be excluded unless explicitly enabled.\nActual log:\n" + log); + assertFalse(log.contains("### Downstream Job: sub-job-default-off"), + "No downstream section should be present when downstream collection is disabled.\nActual log:\n" + log); + } + + /** + * Downstream sub-job FAILURE: when a parent pipeline triggers a sub-job via the + * {@code build} step and that sub-job fails, the parent's extracted log must contain + * a downstream section with {@code Result: FAILURE} and the sub-job's error output. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_subJobFailure_logIncludedWithFailureHeader(JenkinsRule jenkins) throws Exception { + // Create the sub-job that will fail + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-failure"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"SUB_JOB_ERROR_MARKER\" && exit 1' }", true)); + + // Create the parent pipeline that triggers the sub-job + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-triggers-failing-sub"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-job-failure', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-job-failure"); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + assertTrue(log.contains("### Downstream Job: sub-job-failure"), + "Log must contain a downstream section header.\nActual log:\n" + log); + assertTrue(log.contains("Result: FAILURE"), + "Downstream section must be labelled Result: FAILURE.\nActual log:\n" + log); + assertTrue(log.contains("SUB_JOB_ERROR_MARKER"), + "Downstream section must include the sub-job's error output.\nActual log:\n" + log); + } + + /** + * Downstream sub-job SUCCESS: when the triggered sub-job succeeds, no downstream + * section should be appended to the parent's extracted log. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_subJobSuccess_logNotIncluded(JenkinsRule jenkins) throws Exception { + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-success"); + subJob.setDefinition(new CpsFlowDefinition( + "node { echo 'SUB_JOB_SUCCESS_MARKER' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-triggers-passing-sub"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-job-success'\n" + + "sh 'echo \"PARENT_FAILURE\" && exit 1'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-job-success"); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + assertFalse(log.contains("### Downstream Job: sub-job-success"), + "Successful sub-job must not produce a downstream section.\nActual log:\n" + log); + } + + // ------------------------------------------------------------------------- + // collectDownstreamLogs unit tests (depth guard, deduplication) + // ------------------------------------------------------------------------- + + /** + * MAX_DOWNSTREAM_DEPTH guard: when a PipelineLogExtractor is constructed at depth 5 + * (the maximum), {@code collectDownstreamLogs} must return immediately without + * appending anything to the accumulated list. + */ + @Test + void collectDownstreamLogs_atMaxDepth_appendsNothing(JenkinsRule jenkins) throws Exception { + // Use a real (successful) FreeStyleBuild so Jenkins.get() is available + FreeStyleProject project = jenkins.createFreeStyleProject("depth-guard-project"); + FreeStyleBuild build = jenkins.buildAndAssertSuccess(project); + + // Construct at depth = MAX (5) to verify the guard in collectDownstreamLogs + PipelineLogExtractor extractor = new PipelineLogExtractor(build, 200, 5); + + List accumulated = new ArrayList<>(); + Set visited = new HashSet<>(); + extractor.collectDownstreamLogs(accumulated, visited); + + assertTrue(accumulated.isEmpty(), + "collectDownstreamLogs at MAX_DOWNSTREAM_DEPTH must not append any lines"); + } + + /** + * visitedRunIds deduplication: if a downstream run ID is already in the visited set, + * {@code collectDownstreamLogs} must not append its log a second time. + *

+ * Verified by pre-populating visitedRunIds with the sub-job's ID before the parent + * run's extraction, then asserting the downstream section does not appear. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void collectDownstreamLogs_alreadyVisitedRunId_notAppendedTwice(JenkinsRule jenkins) throws Exception { + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-dedup"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"DEDUP_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-dedup"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-dedup', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + // Find the sub-job run that was triggered + WorkflowRun subRun = subJob.getLastBuild(); + assertNotNull(subRun, "Sub-job must have been triggered"); + + // Pre-populate visitedRunIds with the sub-job's ID so it is treated as already seen + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-dedup"); + // Call collectDownstreamLogs with a visited set that already contains both the parent + // and the sub-job → no downstream section should be added for the sub-job. + List accumulated = new ArrayList<>(); + Set visited = new HashSet<>(); + visited.add(parentRun.getParent().getFullName() + "#" + parentRun.getNumber()); + visited.add(subRun.getParent().getFullName() + "#" + subRun.getNumber()); + + extractor.collectDownstreamLogs(accumulated, visited); + + String log = String.join("\n", accumulated); + assertFalse(log.contains("DEDUP_MARKER"), + "Already-visited sub-job must not be appended again.\nActual log:\n" + log); + } + + /** + * Capacity guard: when {@code accumulated} has already reached {@code maxLines}, + * {@code collectDownstreamLogs} must return immediately instead of scanning jobs/builds. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void collectDownstreamLogs_atCapacity_skipsDownstreamScan(JenkinsRule jenkins) throws Exception { + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-capacity-guard"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"CAPACITY_GUARD_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-capacity-guard"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-capacity-guard', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 1, true, "sub-capacity-guard"); + List accumulated = new ArrayList<>(); + accumulated.add("already full"); + Set visited = new HashSet<>(); + visited.add(parentRun.getParent().getFullName() + "#" + parentRun.getNumber()); + + extractor.collectDownstreamLogs(accumulated, visited); + + assertEquals(List.of("already full"), accumulated, + "No downstream content should be appended after maxLines is already reached"); + } + + // ------------------------------------------------------------------------- + // Fast-path: reuse ErrorExplanationAction from sub-job + // ------------------------------------------------------------------------- + + /** + * Fast path — sub-job has ErrorExplanationAction: when the downstream run already + * carries an {@link ErrorExplanationAction} (i.e. it called {@code explainError()}), + * the parent's extracted log must contain the pre-computed explanation text wrapped in + * the "[AI explanation from sub-job]" marker, and must NOT contain raw log lines from + * the sub-job (no redundant log extraction). + *

+ * Strategy: run the parent pipeline first (which triggers the sub-job), then attach an + * {@link ErrorExplanationAction} to the sub-job run that was actually triggered, and + * finally re-run {@link PipelineLogExtractor} on the parent run to verify the fast path. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_subJobHasExplanationAction_explanationReusedInsteadOfRawLog(JenkinsRule jenkins) throws Exception { + // Sub-job: fails but does NOT call explainError() yet + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-with-explanation"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"RAW_SUB_LOG_SHOULD_NOT_APPEAR\" && exit 1' }", true)); + + // Parent pipeline: triggers the sub-job + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-reuses-explanation"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-with-explanation', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + // Find the sub-job run that was triggered by the parent + WorkflowRun subRun = subJob.getLastBuild(); + assertNotNull(subRun, "Sub-job must have been triggered"); + + // Simulate the sub-job having called explainError() by attaching an ErrorExplanationAction + subRun.addOrReplaceAction(new ErrorExplanationAction( + "SUB_JOB_AI_EXPLANATION: null pointer in Foo.bar()", + "http://localhost/job/sub-with-explanation/1/console", + "raw logs", + "Test")); + subRun.save(); + + // Now extract logs from the parent — the fast path should kick in + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-with-explanation"); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + // The pre-computed explanation must appear + assertTrue(log.contains("[AI explanation from sub-job]"), + "Log must contain the fast-path marker.\nActual log:\n" + log); + assertTrue(log.contains("SUB_JOB_AI_EXPLANATION"), + "Log must contain the sub-job's explanation text.\nActual log:\n" + log); + // Raw log lines from the sub-job's sh step must NOT be extracted again + assertFalse(log.contains("RAW_SUB_LOG_SHOULD_NOT_APPEAR"), + "Raw sub-job log lines must not appear when explanation is reused.\nActual log:\n" + log); + } + + /** + * Slow path — sub-job has no ErrorExplanationAction: when the downstream run has no + * {@link ErrorExplanationAction}, the parent falls back to raw log extraction and the + * sub-job's error output appears directly (no "[AI explanation from sub-job]" marker). + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_subJobHasNoExplanationAction_rawLogExtractedAsFallback(JenkinsRule jenkins) throws Exception { + // Sub-job: fails but does NOT call explainError() — no ErrorExplanationAction + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-no-explanation"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"RAW_FALLBACK_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-fallback-to-raw"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-no-explanation', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "sub-no-explanation"); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + // Raw log must be present + assertTrue(log.contains("RAW_FALLBACK_MARKER"), + "Slow path must extract raw sub-job log.\nActual log:\n" + log); + // Fast-path marker must NOT appear + assertFalse(log.contains("[AI explanation from sub-job]"), + "Fast-path marker must not appear when no explanation exists.\nActual log:\n" + log); + } + + /** + * Downstream regex filter: when downstream collection is enabled but the job name + * does not match the configured pattern, the sub-job log must be skipped. + */ + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_patternMismatch_subJobFailureNotIncluded(JenkinsRule jenkins) throws Exception { + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "sub-job-pattern-miss"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"PATTERN_MISS_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "parent-pattern-miss"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'sub-job-pattern-miss', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, true, "other-job-.*"); + List lines = extractor.getFailedStepLog(); + String log = String.join("\n", lines); + + assertFalse(log.contains("PATTERN_MISS_MARKER"), + "Sub-job logs must be skipped when the job name does not match the regex.\nActual log:\n" + log); + assertFalse(log.contains("### Downstream Job: sub-job-pattern-miss"), + "No downstream section should be present for non-matching jobs.\nActual log:\n" + log); + } + + @Test + @DisabledOnOs(OS.WINDOWS) + void downstream_invisibleBuildStepJob_replacedWithHiddenPlaceholder(JenkinsRule jenkins) throws Exception { + Authentication viewer = configureReadAccess(jenkins, "viewer-build-step"); + + WorkflowJob subJob = jenkins.createProject(WorkflowJob.class, "hidden-build-step-sub"); + subJob.setDefinition(new CpsFlowDefinition( + "node { sh 'echo \"HIDDEN_BUILD_STEP_MARKER\" && exit 1' }", true)); + + WorkflowJob parentJob = jenkins.createProject(WorkflowJob.class, "visible-build-step-parent"); + parentJob.setDefinition(new CpsFlowDefinition( + "build job: 'hidden-build-step-sub', propagate: false\n" + + "currentBuild.result = 'FAILURE'", + true)); + + grantItemRead(jenkins, "viewer-build-step", parentJob); + + WorkflowRun parentRun = jenkins.assertBuildStatus(Result.FAILURE, parentJob.scheduleBuild2(0)); + + String log; + try (ACLContext ignored = ACL.as2(viewer)) { + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, viewer, true, + "hidden-build-step-sub"); + log = String.join("\n", extractor.getFailedStepLog()); + } + + assertTrue(log.contains("### Downstream Job: [hidden] ###"), + "Unreadable downstream jobs should be represented by a hidden placeholder.\nActual log:\n" + log); + assertTrue(log.contains("Downstream failure details omitted due to permissions."), + "Hidden placeholder should explain why downstream details are missing.\nActual log:\n" + log); + assertFalse(log.contains("HIDDEN_BUILD_STEP_MARKER"), + "Hidden downstream logs must not be exposed.\nActual log:\n" + log); + } + + @Test + void downstream_invisibleUpstreamCauseJob_skippedByVisibilityFilter(JenkinsRule jenkins) throws Exception { + Authentication viewer = configureReadAccess(jenkins, "viewer-upstream-cause"); + + FreeStyleProject parentJob = jenkins.createFreeStyleProject("visible-upstream-parent"); + grantItemRead(jenkins, "viewer-upstream-cause", parentJob); + FreeStyleBuild parentRun = jenkins.buildAndAssertSuccess(parentJob); + + FreeStyleProject hiddenSubJob = jenkins.createFreeStyleProject("hidden-upstream-sub"); + hiddenSubJob.getBuildersList().add(new FailureBuilder()); + FreeStyleBuild hiddenSubRun = jenkins.assertBuildStatus(Result.FAILURE, + hiddenSubJob.scheduleBuild2(0, new Cause.UpstreamCause(parentRun))); + assertNotNull(hiddenSubRun, "Hidden downstream build should be created"); + + String log; + try (ACLContext ignored = ACL.as2(viewer)) { + PipelineLogExtractor extractor = new PipelineLogExtractor(parentRun, 500, viewer, true, + "hidden-upstream-sub"); + log = String.join("\n", extractor.getFailedStepLog()); + } + + assertFalse(log.contains("### Downstream Job:"), + "UpstreamCause fallback should skip unreadable downstream jobs entirely.\nActual log:\n" + log); + } + + private Authentication configureReadAccess(JenkinsRule jenkins, String username) { + jenkins.jenkins.setSecurityRealm(jenkins.createDummySecurityRealm()); + MockAuthorizationStrategy strategy = new MockAuthorizationStrategy() + .grant(Jenkins.READ) + .everywhere() + .to(username); + jenkins.jenkins.setAuthorizationStrategy(strategy); + return User.getById(username, true).impersonate2(); + } + + private void grantItemRead(JenkinsRule jenkins, String username, Item item) { + MockAuthorizationStrategy strategy = (MockAuthorizationStrategy) jenkins.jenkins.getAuthorizationStrategy(); + strategy.grant(Item.READ).onItems(item).to(username); + jenkins.jenkins.setAuthorizationStrategy(strategy); + } }