|
1 | 1 | import os |
2 | 2 | from typing import Any |
3 | 3 | from dateutil.parser import parse as parse_date |
| 4 | +from urllib.parse import urljoin |
4 | 5 |
|
5 | 6 | import httpx |
6 | 7 | from mcp.server.fastmcp import FastMCP |
7 | 8 |
|
8 | 9 | mcp = FastMCP("mcp-server") |
9 | 10 |
|
10 | 11 | PROW_URL = "https://prow.ci.openshift.org" |
| 12 | +GCS_URL = "https://gcsweb-ci.apps.ci.l2s4.p1.openshiftapps.com/gcs/test-platform-results/logs/" |
11 | 13 |
|
12 | 14 | async def make_request( |
13 | 15 | url: str, method: str = "GET", data: dict[str, Any] = None |
@@ -73,10 +75,96 @@ async def get_latest_job_run(job_name: str): |
73 | 75 | "start": status.get("startTime"), |
74 | 76 | "completion": status.get("completionTime"), |
75 | 77 | "url": status.get("url"), |
| 78 | + "build_id": status.get("build_id") |
76 | 79 | } |
77 | 80 | except Exception as e: |
78 | 81 | return {"error": f"Failed to fetch job info: {str(e)}"} |
79 | 82 |
|
80 | 83 |
|
| 84 | +@mcp.tool() |
| 85 | +async def get_job_logs(job_id: str): |
| 86 | + """Get the logs for a specific Prow job ID. |
| 87 | + |
| 88 | + Args: |
| 89 | + job_id: The ID of the job to get logs for |
| 90 | + |
| 91 | + Returns: |
| 92 | + Dictionary containing the job logs or error information |
| 93 | + """ |
| 94 | + url = f"{PROW_URL}/prowjobs.js" |
| 95 | + try: |
| 96 | + response = await make_request(url) |
| 97 | + if not response: |
| 98 | + return {"error": "No response from Prow API"} |
| 99 | + |
| 100 | + prowjobs = response.get("items", []) |
| 101 | + |
| 102 | + # Find the job with matching ID |
| 103 | + matching_job = next( |
| 104 | + (job for job in prowjobs if job["metadata"]["name"] == job_id), |
| 105 | + None |
| 106 | + ) |
| 107 | + |
| 108 | + if not matching_job: |
| 109 | + return {"error": f"No job found with ID: {job_id}"} |
| 110 | + |
| 111 | + # Get the build logs URL |
| 112 | + status = matching_job.get("status", {}) |
| 113 | + build_id = status.get("build_id") |
| 114 | + job_name = matching_job.get("spec", {}).get("job") |
| 115 | + |
| 116 | + if not build_id or not job_name: |
| 117 | + return {"error": "Could not find build ID or job name"} |
| 118 | + |
| 119 | + return await get_build_logs(job_name, build_id) |
| 120 | + |
| 121 | + except Exception as e: |
| 122 | + return {"error": f"Failed to fetch job info: {str(e)}"} |
| 123 | + |
| 124 | + |
| 125 | +@mcp.tool() |
| 126 | +async def get_build_logs(job_name: str, build_id: str): |
| 127 | + """Get the logs for a specific build ID and job name. |
| 128 | + |
| 129 | + Args: |
| 130 | + job_name: The name of the job |
| 131 | + build_id: The build ID to get logs for |
| 132 | + |
| 133 | + Returns: |
| 134 | + Dictionary containing the job logs or error information |
| 135 | + """ |
| 136 | + try: |
| 137 | + # Construct the artifacts URL |
| 138 | + artifacts_url = f"{GCS_URL}/{job_name}/{build_id}/artifacts" |
| 139 | + |
| 140 | + async with httpx.AsyncClient() as client: |
| 141 | + response = await client.get(f"{artifacts_url}/build-log.txt") |
| 142 | + response.raise_for_status() |
| 143 | + logs = response.text |
| 144 | + return { |
| 145 | + "build_id": build_id, |
| 146 | + "job_name": job_name, |
| 147 | + "logs": logs, |
| 148 | + "artifacts_url": artifacts_url |
| 149 | + } |
| 150 | + except Exception as e: |
| 151 | + return { |
| 152 | + "error": f"Failed to fetch logs: {str(e)}", |
| 153 | + "artifacts_url": artifacts_url if 'artifacts_url' in locals() else None |
| 154 | + } |
| 155 | + |
| 156 | + |
| 157 | +@mcp.tool() |
| 158 | +async def search_jobs(job: str): |
| 159 | + # Construct the URL with parameters |
| 160 | + endpoint = "/search" |
| 161 | + url = urljoin(API_BASE_URL, endpoint) |
| 162 | + |
| 163 | + # Add parameters if provided |
| 164 | + params = {} |
| 165 | + if job: |
| 166 | + params['job'] = job |
| 167 | + return await make_request(url, data=params) |
| 168 | + |
81 | 169 | if __name__ == "__main__": |
82 | 170 | mcp.run(transport=os.environ.get("MCP_TRANSPORT", "stdio")) |
0 commit comments