-
Notifications
You must be signed in to change notification settings - Fork 52
Create pipeline to validate DBX JSON certificate references #225
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,204 @@ | ||
| # @file | ||
| # | ||
| # Copyright (c) Microsoft Corporation. | ||
| # SPDX-License-Identifier: BSD-2-Clause-Patent | ||
| ## | ||
| """Test the validate_dbx_references.py script. | ||
|
|
||
| This module contains unit tests for the DBX certificate reference validation functionality. | ||
| """ | ||
| import json | ||
| import pathlib | ||
| import tempfile | ||
|
|
||
| import pytest | ||
| from validate_dbx_references import validate_certificate_references | ||
|
|
||
|
|
||
| def test_validate_certificate_references_no_certificates_section() -> None: | ||
| """Test validation when JSON has no certificates section.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create JSON file without certificates section | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| with json_file.open("w") as f: | ||
| json.dump({"images": {"x64": []}}, f) | ||
|
|
||
| # Create empty certificates directory | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
|
|
||
| # Should pass validation | ||
| errors = validate_certificate_references(json_file, certs_dir) | ||
| assert errors == [] | ||
|
|
||
|
|
||
| def test_validate_certificate_references_empty_certificates() -> None: | ||
| """Test validation when certificates section is empty.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create JSON file with empty certificates section | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| with json_file.open("w") as f: | ||
| json.dump({"certificates": []}, f) | ||
|
|
||
| # Create empty certificates directory | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
|
|
||
| # Should pass validation | ||
| errors = validate_certificate_references(json_file, certs_dir) | ||
| assert errors == [] | ||
|
|
||
|
|
||
| def test_validate_certificate_references_valid_certificates() -> None: | ||
| """Test validation when all certificate references are valid.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create certificates directory with test files | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
| (certs_dir / "cert1.cer").touch() | ||
| (certs_dir / "cert2.der").touch() | ||
|
|
||
| # Create JSON file referencing these certificates | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| json_data = { | ||
| "certificates": [ | ||
| { | ||
| "value": "cert1.cer", | ||
| "subjectName": "Test Subject 1", | ||
| "issuerName": "Test Issuer 1", | ||
| "thumbprint": "abc123", | ||
| "description": "Test certificate 1", | ||
| "dateOfAddition": "2024-01-01" | ||
| }, | ||
| { | ||
| "value": "cert2.der", | ||
| "subjectName": "Test Subject 2", | ||
| "issuerName": "Test Issuer 2", | ||
| "thumbprint": "def456", | ||
| "description": "Test certificate 2", | ||
| "dateOfAddition": "2024-01-01" | ||
| } | ||
| ] | ||
| } | ||
| with json_file.open("w") as f: | ||
| json.dump(json_data, f) | ||
|
|
||
| # Should pass validation | ||
| errors = validate_certificate_references(json_file, certs_dir) | ||
| assert errors == [] | ||
|
|
||
|
|
||
| def test_validate_certificate_references_missing_certificates() -> None: | ||
| """Test validation when some certificate references are missing.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create certificates directory with only one file | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
| (certs_dir / "cert1.cer").touch() | ||
|
|
||
| # Create JSON file referencing missing certificate | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| json_data = { | ||
| "certificates": [ | ||
| { | ||
| "value": "cert1.cer", | ||
| "subjectName": "Test Subject 1", | ||
| "issuerName": "Test Issuer 1", | ||
| "thumbprint": "abc123", | ||
| "description": "Test certificate 1", | ||
| "dateOfAddition": "2024-01-01" | ||
| }, | ||
| { | ||
| "value": "missing_cert.cer", | ||
| "subjectName": "Test Subject 2", | ||
| "issuerName": "Test Issuer 2", | ||
| "thumbprint": "def456", | ||
| "description": "Test certificate 2", | ||
| "dateOfAddition": "2024-01-01" | ||
| } | ||
| ] | ||
| } | ||
| with json_file.open("w") as f: | ||
| json.dump(json_data, f) | ||
|
|
||
| # Should fail validation | ||
| errors = validate_certificate_references(json_file, certs_dir) | ||
| assert len(errors) == 1 | ||
| assert "missing_cert.cer" in errors[0] | ||
| assert "not found" in errors[0] | ||
|
|
||
|
|
||
| def test_validate_certificate_references_missing_value_field() -> None: | ||
| """Test validation when certificate entry is missing value field.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create certificates directory | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
|
|
||
| # Create JSON file with malformed certificate entry | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| json_data = { | ||
| "certificates": [ | ||
| { | ||
| "subjectName": "Test Subject", | ||
| "issuerName": "Test Issuer", | ||
| "thumbprint": "abc123", | ||
| "description": "Test certificate", | ||
| "dateOfAddition": "2024-01-01" | ||
| # Missing "value" field | ||
| } | ||
| ] | ||
| } | ||
| with json_file.open("w") as f: | ||
| json.dump(json_data, f) | ||
|
|
||
| # Should fail validation | ||
| errors = validate_certificate_references(json_file, certs_dir) | ||
| assert len(errors) == 1 | ||
| assert "missing 'value' field" in errors[0] | ||
|
|
||
|
|
||
| def test_validate_certificate_references_file_not_found() -> None: | ||
| """Test validation when JSON file doesn't exist.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create certificates directory | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
|
|
||
| # Reference non-existent JSON file | ||
| json_file = temp_path / "nonexistent.json" | ||
|
|
||
| # Should raise FileNotFoundError | ||
| with pytest.raises(FileNotFoundError): | ||
| validate_certificate_references(json_file, certs_dir) | ||
|
|
||
|
|
||
| def test_validate_certificate_references_invalid_json() -> None: | ||
| """Test validation when JSON file is malformed.""" | ||
| with tempfile.TemporaryDirectory() as temp_dir: | ||
| temp_path = pathlib.Path(temp_dir) | ||
|
|
||
| # Create certificates directory | ||
| certs_dir = temp_path / "Certificates" | ||
| certs_dir.mkdir() | ||
|
|
||
| # Create malformed JSON file | ||
| json_file = temp_path / "dbx_info_msft_01_01_24.json" | ||
| with json_file.open("w") as f: | ||
| f.write("{ invalid json }") | ||
|
|
||
| # Should raise json.JSONDecodeError | ||
| with pytest.raises(json.JSONDecodeError): | ||
| validate_certificate_references(json_file, certs_dir) | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,175 @@ | ||
| # @file | ||
| # | ||
| # Copyright (c) Microsoft Corporation. | ||
| # SPDX-License-Identifier: BSD-2-Clause-Patent | ||
| ## | ||
| """Script to validate that DBX JSON files reference existing certificate files. | ||
|
|
||
| This script reads the latest DBX JSON file and validates that all certificate | ||
| files referenced in the "certificates" array actually exist in the | ||
| PreSignedObjects/DBX/Certificates folder. | ||
| """ | ||
| import argparse | ||
| import json | ||
| import logging | ||
| import pathlib | ||
| import sys | ||
| from typing import List | ||
|
|
||
|
|
||
| def get_latest_dbx_info_file(dbx_directory: pathlib.Path) -> pathlib.Path: | ||
| """Get the latest DBX info JSON file from the specified directory. | ||
|
|
||
| Args: | ||
| dbx_directory (pathlib.Path): The directory path to search for DBX JSON files. | ||
|
|
||
| Returns: | ||
| pathlib.Path: The path to the latest DBX info JSON file. | ||
|
|
||
| Raises: | ||
| FileNotFoundError: If no DBX info JSON files are found in the specified directory. | ||
| """ | ||
| # Look specifically for dbx_info_msft_*.json files | ||
| dbx_files = list(dbx_directory.glob("dbx_info_msft_*.json")) | ||
| if not dbx_files: | ||
| raise FileNotFoundError("No DBX info JSON files found in the specified directory.") | ||
|
|
||
| # Parse the date components from the filename (month_day_year format) | ||
| try: | ||
| latest_file = max(dbx_files, key=lambda f: list(map(int, f.stem.split("_")[-3:]))) | ||
| return latest_file | ||
| except (ValueError, IndexError) as e: | ||
| raise FileNotFoundError(f"Could not parse date from DBX info filenames: {e}") | ||
|
|
||
|
|
||
| def validate_certificate_references(dbx_json_path: pathlib.Path, certificates_dir: pathlib.Path) -> List[str]: | ||
| """Validate that certificate references in DBX JSON exist in the certificates directory. | ||
|
|
||
| Args: | ||
| dbx_json_path (pathlib.Path): Path to the DBX JSON file | ||
| certificates_dir (pathlib.Path): Path to the certificates directory | ||
|
|
||
| Returns: | ||
| List[str]: List of error messages for missing certificates (empty if all exist) | ||
|
|
||
| Raises: | ||
| FileNotFoundError: If the DBX JSON file doesn't exist | ||
| json.JSONDecodeError: If the JSON file is malformed | ||
| """ | ||
| errors = [] | ||
|
|
||
| # Load the DBX JSON file | ||
| with open(dbx_json_path, 'r') as f: | ||
| dbx_data = json.load(f) | ||
|
|
||
| # Check if certificates section exists | ||
| if 'certificates' not in dbx_data: | ||
| logging.info("No 'certificates' section found in DBX JSON file - validation passed") | ||
| return errors | ||
|
|
||
| certificates = dbx_data['certificates'] | ||
| if not certificates: | ||
| logging.info("Empty 'certificates' section found in DBX JSON file - validation passed") | ||
| return errors | ||
|
|
||
| logging.info(f"Found {len(certificates)} certificate references to validate") | ||
|
|
||
| # Validate each certificate reference | ||
| for i, cert_entry in enumerate(certificates): | ||
| if 'value' not in cert_entry: | ||
| errors.append(f"Certificate entry {i} missing 'value' field") | ||
| continue | ||
|
|
||
| cert_filename = cert_entry['value'] | ||
| cert_path = certificates_dir / cert_filename | ||
|
|
||
| if not cert_path.exists(): | ||
| errors.append(f"Certificate file '{cert_filename}' referenced in JSON but not found in {certificates_dir}") | ||
| logging.error(f"Missing certificate: {cert_filename}") | ||
| else: | ||
| logging.info(f"Certificate found: {cert_filename}") | ||
|
|
||
| return errors | ||
|
|
||
|
|
||
| def main() -> None: | ||
| """Main function to handle command-line arguments and validate DBX certificate references.""" | ||
| logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') | ||
|
|
||
| parser = argparse.ArgumentParser( | ||
| description="Validate that DBX JSON files reference existing certificate files." | ||
| ) | ||
| parser.add_argument( | ||
| "dbx_directory", | ||
| help="Path to the PreSignedObjects/DBX directory", | ||
| type=pathlib.Path | ||
| ) | ||
| parser.add_argument( | ||
| "--json-file", | ||
| help="Specific DBX JSON file to validate (default: latest dbx_info_msft_*.json)", | ||
| type=pathlib.Path | ||
| ) | ||
|
|
||
| args = parser.parse_args() | ||
|
|
||
| # Validate input directory | ||
| if not args.dbx_directory.is_dir(): | ||
| logging.error(f"DBX directory does not exist: {args.dbx_directory}") | ||
| sys.exit(1) | ||
|
|
||
| certificates_dir = args.dbx_directory / "Certificates" | ||
| if not certificates_dir.is_dir(): | ||
| logging.error(f"Certificates directory does not exist: {certificates_dir}") | ||
| sys.exit(1) | ||
|
|
||
| # Determine which JSON file to validate | ||
| if args.json_file: | ||
| dbx_json_path = args.json_file | ||
| if not dbx_json_path.is_absolute(): | ||
| dbx_json_path = args.dbx_directory / dbx_json_path | ||
| else: | ||
| try: | ||
| dbx_json_path = get_latest_dbx_info_file(args.dbx_directory) | ||
| logging.info(f"Using latest DBX JSON file: {dbx_json_path.name}") | ||
| except FileNotFoundError as e: | ||
| logging.error(f"No DBX JSON files found in {args.dbx_directory}: {e}") | ||
| sys.exit(1) | ||
|
|
||
| # Validate the JSON file exists | ||
| if not dbx_json_path.exists(): | ||
| logging.error(f"DBX JSON file does not exist: {dbx_json_path}") | ||
| sys.exit(1) | ||
|
|
||
| try: | ||
| # Perform validation | ||
| errors = validate_certificate_references(dbx_json_path, certificates_dir) | ||
|
|
||
| if errors: | ||
| logging.error("Certificate reference validation failed:") | ||
| for error in errors: | ||
| logging.error(f" - {error}") | ||
|
|
||
| # List available certificate files for debugging | ||
| available_certs = list(certificates_dir.glob("*")) | ||
| if available_certs: | ||
| logging.info("Available certificate files:") | ||
| for cert_file in available_certs: | ||
| logging.info(f" - {cert_file.name}") | ||
| else: | ||
| logging.warning("No certificate files found in the certificates directory") | ||
|
|
||
| sys.exit(1) | ||
| else: | ||
| logging.info("All certificate references validated successfully!") | ||
|
|
||
| except (FileNotFoundError, json.JSONDecodeError) as e: | ||
| logging.error(f"Error reading DBX JSON file: {e}") | ||
| sys.exit(1) | ||
| except Exception as e: | ||
| logging.error(f"Unexpected error during validation: {e}") | ||
| sys.exit(1) | ||
|
|
||
|
|
||
| if __name__ == "__main__": | ||
| main() | ||
|
|
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.