Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions datadog_checks_dev/changelog.d/23584.fixed
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixed Detection of Unpinned Dependencies for integrations-extras and marketplace repos
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def verify_base_dependency(source, check_name, dependency, force_pinned=True, mi
return not failed


def verify_dependency(source, name, python_versions, file):
def verify_dependency(source, name, python_versions, file, repo_core):
for dependency_definitions in python_versions.values():
# Identify dependencies that are defined multiple times for the same set of environment markers
requirements = [Requirement(dep) for dep in dependency_definitions]
Expand Down Expand Up @@ -143,29 +143,30 @@ def verify_dependency(source, name, python_versions, file):

return True

if not specifier_set:
message = f'Unpinned version found for dependency `{name}`: {format_check_usage(checks, source)}'
echo_failure(message)
annotate_error(file, message)
return False
elif len(specifier_set) > 1:
message = (
f'Multiple unstable version pins `{specifier_set}` found for dependency `{name}` '
f'(use a single == explicitly): {format_check_usage(checks, source)}'
)
echo_failure(message)
annotate_error(file, message)
return False

specifier = get_next(specifier_set)
if specifier.operator != '==':
message = (
f'Unstable version pin `{specifier}` found for dependency `{name}` '
f'(use == explicitly): {format_check_usage(checks, source)}'
)
echo_failure(message)
annotate_error(file, message)
return False
if repo_core:
if not specifier_set:
message = f'Unpinned version found for dependency `{name}`: {format_check_usage(checks, source)}'
echo_failure(message)
annotate_error(file, message)
return False
elif len(specifier_set) > 1:
message = (
f'Multiple unstable version pins `{specifier_set}` found for dependency `{name}` '
f'(use a single == explicitly): {format_check_usage(checks, source)}'
)
echo_failure(message)
annotate_error(file, message)
return False

specifier = get_next(specifier_set)
if specifier.operator != '==':
message = (
f'Unstable version pin `{specifier}` found for dependency `{name}` '
f'(use == explicitly): {format_check_usage(checks, source)}'
)
echo_failure(message)
annotate_error(file, message)
return False

return True

Expand All @@ -184,7 +185,7 @@ def dep(check, require_base_check_version, min_base_check_version):

\b
* Verify the uniqueness of dependency versions across all checks, or optionally a single check
* Verify all the dependencies are pinned.
* Verify all the dependencies are pinned when validating from integrations-core.
* Verify the embedded Python environment defined in the base check and requirements
listed in every integration are compatible.
* Verify each check specifies a `CHECKS_BASE_REQ` variable for `datadog-checks-base` requirement
Expand All @@ -197,6 +198,9 @@ def dep(check, require_base_check_version, min_base_check_version):
agent_dependencies, agent_errors = read_agent_dependencies()
agent_dependencies_file = get_agent_requirements()
annotate_errors(agent_dependencies_file, agent_errors)
ctx = click.get_current_context()
repo_core = ctx.obj['repo_choice'] == 'core'

if agent_errors:
for agent_error in agent_errors:
echo_failure(agent_error)
Expand Down Expand Up @@ -225,18 +229,17 @@ def dep(check, require_base_check_version, min_base_check_version):
echo_failure(check_error)

for name, versions in sorted(check_dependencies.items()):
if not verify_dependency('Checks', name, versions, req_source):
failed = True

if name not in agent_dependencies:
if not verify_dependency('Checks', name, versions, req_source, repo_core):
failed = True
message = (
f'Dependency {name} found in the {check_name} integration requirements '
'but not in the agent requirements, run `ddev dep freeze` to sync them.'
)
echo_failure(message)
annotate_error(req_source, message)

if repo_core:
if name not in agent_dependencies:
failed = True
message = (
f'Dependency {name} found in the {check_name} integration requirements '
'but not in the agent requirements, run `ddev dep freeze` to sync them.'
)
echo_failure(message)
annotate_error(req_source, message)
check_base_dependencies, check_base_errors = read_check_base_dependencies(checks)
check_dependencies, check_errors = read_check_dependencies(checks)

Expand All @@ -247,17 +250,18 @@ def dep(check, require_base_check_version, min_base_check_version):
failed = True

for name, python_versions in sorted(agent_dependencies.items()):
if not verify_dependency('Agent', name, python_versions, agent_dependencies_file):
if not verify_dependency('Agent', name, python_versions, agent_dependencies_file, repo_core):
failed = True

# Check that this dependency defined on the agent requirements is actually used
# This only makes sense when we take all check dependencies into account
if check is None and name not in check_dependencies:
failed = True
message = f'Stale dependency needs to be removed by syncing: {name}'
echo_failure(message)
annotate_error(agent_dependencies_file, message)
continue
if repo_core:
if check is None and name not in check_dependencies:
failed = True
message = f'Stale dependency needs to be removed by syncing: {name}'
echo_failure(message)
annotate_error(agent_dependencies_file, message)
continue

# Look for version mismatches for this dependency against individual checks
agent_dependency_definitions = get_dependency_set(python_versions)
Expand Down
126 changes: 116 additions & 10 deletions ddev/tests/cli/validate/test_dep.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# Licensed under a 3-clause BSD style license (see LICENSE)
import re

import pytest

from tests.helpers.api import write_file

error_regex = re.compile(r"(?s)^\s*[A-Za-z0-9_\/.-]+\.toml has the following errors:\n(?: - .+\n)+")
Expand Down Expand Up @@ -42,7 +44,8 @@ def test_invalid_third_party_integration(fake_repo, ddev):
assert error_regex.search(result.output), f"Unexpected output: {result.output}"


def test_multiple_invalid_third_party_integrations(fake_repo, ddev):
@pytest.mark.parametrize('check_name', ['bad_check_2', 'bad_check_3'])
def test_multiple_invalid_third_party_integrations(fake_repo, ddev, check_name):
write_file(
fake_repo.path / 'bad_check_2',
'pyproject.toml',
Expand All @@ -67,15 +70,13 @@ def test_multiple_invalid_third_party_integrations(fake_repo, ddev):
""",
)

result = ddev('validate', 'dep', 'bad_check_2')
result_2 = ddev('validate', 'dep', 'bad_check_3')
result = ddev('validate', 'dep', check_name)
assert result.exit_code == 1
assert error_regex.search(result.output), f"Unexpected output: {result.output}"
assert result_2.exit_code == 1
assert error_regex.search(result_2.output), f"Unexpected output: {result_2.output}"


def test_one_valid_one_invalid_integration(fake_repo, ddev):
@pytest.mark.parametrize('check_name', ['valid_check_2', 'bad_check_4'])
def test_one_valid_one_invalid_integration(fake_repo, ddev, check_name):
write_file(
fake_repo.path / 'valid_check_2',
'pyproject.toml',
Expand All @@ -99,9 +100,114 @@ def test_one_valid_one_invalid_integration(fake_repo, ddev):
""",
)

result = ddev('validate', 'dep', 'valid_check_2')
result_2 = ddev('validate', 'dep', 'bad_check_4')
result = ddev('validate', 'dep', check_name)
if check_name == 'valid_check_2':
assert result.exit_code == 0
assert match_regex.match(result.output), f"Unexpected output: {result.output}"
else:
assert result.exit_code == 1
assert error_regex.search(result.output), f"Unexpected output: {result.output}"


UNPINNED_OPT_CHECK = 'unpinned_opt_check'

UNPINNED_OPTIONAL_DEP = 'acme-unpinned-lib'
PYPROJECT_UNPINNED_OPTIONAL = f"""
[project]
dependencies = [
"datadog-checks-base>=37.21.0",
]

[project.optional-dependencies]
libs = [
"{UNPINNED_OPTIONAL_DEP}",
]
"""


def _write_git_dep_check(repo_path, check_name: str) -> None:
git_dep = 'sample_git_pkg @ git+https://github.com/pypa/pip.git@24.0'
write_file(
repo_path,
'agent_requirements.in',
f"""datadog-checks-base==37.21.0
{git_dep}
""",
)
write_file(
repo_path / check_name,
'pyproject.toml',
f"""
[project]
dependencies = [
"datadog-checks-base>=37.21.0",
]

[project.optional-dependencies]
libs = [
"{git_dep}",
]
""",
)


def test_core_rejects_unpinned_optional_dependency(fake_repo, ddev):
"""With `-c`, unpinned PyPI deps under optional-dependencies fail validation."""
write_file(
fake_repo.path,
'agent_requirements.in',
f"""datadog-checks-base==37.21.0
{UNPINNED_OPTIONAL_DEP}==1.0.0
""",
)
write_file(fake_repo.path, f'{UNPINNED_OPT_CHECK}/pyproject.toml', PYPROJECT_UNPINNED_OPTIONAL)
result = ddev('-c', 'validate', 'dep', UNPINNED_OPT_CHECK)
assert result.exit_code == 1
assert 'Unpinned version' in result.output

Comment thread
ddog-nasirthomas marked this conversation as resolved.

def test_core_rejects_unpinned_optional_dependency_default_repo(fake_repo, ddev):
"""Default repo is core: unpinned optional PyPI deps fail without passing `-c`."""
write_file(
fake_repo.path,
'agent_requirements.in',
f"""datadog-checks-base==37.21.0
{UNPINNED_OPTIONAL_DEP}==1.0.0
""",
)
write_file(fake_repo.path, f'{UNPINNED_OPT_CHECK}/pyproject.toml', PYPROJECT_UNPINNED_OPTIONAL)
result = ddev('validate', 'dep', UNPINNED_OPT_CHECK)
assert result.exit_code == 1
assert 'Unpinned version' in result.output


@pytest.mark.parametrize(
'repo_fixture, flag',
[
pytest.param('fake_extras_repo', '-e', id='extras'),
pytest.param('fake_marketplace_repo', '-m', id='marketplace'),
],
)
def test_non_core_repo_allows_unpinned_optional_dependency(repo_fixture, flag, ddev, request):
"""Non-core repos allow unpinned optional PyPI deps (no integrations-core pin rules)."""
repo = request.getfixturevalue(repo_fixture)
write_file(
repo.path,
'agent_requirements.in',
"""datadog-checks-base>=37.21.0
""",
)
write_file(repo.path, f'{UNPINNED_OPT_CHECK}/pyproject.toml', PYPROJECT_UNPINNED_OPTIONAL)
result = ddev(flag, 'validate', 'dep', UNPINNED_OPT_CHECK)
assert result.exit_code == 0, result.output
assert 'Unpinned version' not in result.output
assert match_regex.match(result.output), f"Unexpected output: {result.output}"


def test_validate_dep_git_url_succeeds_on_core(fake_repo, ddev):
"""Git URL deps return early from verify_dependency and are not treated as unpinned PyPI."""
_write_git_dep_check(fake_repo.path, 'git_dep_check')
result = ddev('-c', 'validate', 'dep', 'git_dep_check')
assert result.exit_code == 0
assert 'Unpinned version' not in result.output
assert match_regex.match(result.output), f"Unexpected output: {result.output}"
assert result_2.exit_code == 1
assert error_regex.search(result_2.output), f"Unexpected output: {result_2.output}"
Loading