Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions src/anchore_security_cli/identifiers/aliases.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def normalize(cls, alias: str) -> str:
return alias

@classmethod
def from_list(cls, aliases: list[str]): # noqa: C901, PLR0912, PLR0915
def from_list(cls, aliases: list[str], provider: str | None = None): # noqa: C901, PLR0912, PLR0915
cve = set()
gcve = set()
github = set()
Expand Down Expand Up @@ -226,7 +226,10 @@ def from_list(cls, aliases: list[str]): # noqa: C901, PLR0912, PLR0915
elif a.startswith("PHSA-"):
photon.add(a)
else:
logging.warning(f"encountered unsupported alias: {a!r}")
message = f"encountered unsupported alias: {a!r}"
if provider:
message = f"{provider}: {message}"
logging.warning(message)

return Aliases(
cve=list(cve),
Expand Down
4 changes: 4 additions & 0 deletions src/anchore_security_cli/identifiers/providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from anchore_security_cli.identifiers.providers.cve5 import CVE5
from anchore_security_cli.identifiers.providers.debian import Debian
from anchore_security_cli.identifiers.providers.echo import Echo
from anchore_security_cli.identifiers.providers.gcve import GCVE
from anchore_security_cli.identifiers.providers.github import GitHub
from anchore_security_cli.identifiers.providers.go import Go
from anchore_security_cli.identifiers.providers.grypedb import GrypeDB, GrypeDBExtraCVEs
Expand All @@ -35,6 +36,7 @@
class Providers:
cve5: CVE5
github: GitHub
gcve: GCVE
chainguard: Chainguard
bitnami: Bitnami
psf: PSF
Expand Down Expand Up @@ -106,6 +108,7 @@ def fetch_all() -> Providers:
with ThreadPoolExecutor() as executor:
cve5 = executor.submit(CVE5)
github = executor.submit(GitHub)
gcve = executor.submit(GCVE)
openssf_malicious_packages = executor.submit(OpenSSFMaliciousPackages)
ubuntu = executor.submit(Ubuntu)
chainguard = executor.submit(Chainguard)
Expand Down Expand Up @@ -136,6 +139,7 @@ def fetch_all() -> Providers:
return Providers(
cve5=cve5.result(),
github=github.result(),
gcve=gcve.result(),
chainguard=chainguard.result(),
bitnami=bitnami.result(),
psf=psf.result(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("related", [])])
aliases = Aliases.from_list([record_id, *data.get("related", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _fetch(self) -> list[ProviderRecord]:
ProviderRecord(
id=record_id,
published=self._parse_date(None),
aliases=Aliases.from_list([record_id, *aliases]),
aliases=Aliases.from_list([record_id, *aliases], provider=self.name),
),
)

Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/bellsoft.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
records.append(
ProviderRecord(
id=v,
aliases=Aliases.from_list([record_id]),
aliases=Aliases.from_list([record_id], provider=self.name),
published=self._parse_date(data.get("published")),
),
)
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/bitnami.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *self._parse_aliases(data)])
aliases = Aliases.from_list([record_id, *self._parse_aliases(data)], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/cpan.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def _fetch(self) -> list[ProviderRecord]:
ProviderRecord(
id=record_id,
published=published,
aliases=Aliases.from_list([record_id, *a.get("cves", [])]),
aliases=Aliases.from_list([record_id, *a.get("cves", [])], provider=self.name),
),
)

Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/cve5.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]: # noqa: C90
ProviderRecord(
id=record_id,
published=published,
aliases=Aliases.from_list(aliases),
aliases=Aliases.from_list(aliases, provider=self.name),
),
)

Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/debian.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/echo.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
71 changes: 71 additions & 0 deletions src/anchore_security_cli/identifiers/providers/gcve.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import logging

import orjson
import requests

from anchore_security_cli.identifiers.aliases import Aliases
from anchore_security_cli.identifiers.providers.provider import Provider, ProviderRecord


class GCVE(Provider):
def __init__(self):
self._ndjson_urls = [
"https://vulnerability.circl.lu/dumps/gna-1.ndjson",
"https://vulnerability.circl.lu/dumps/gna-1337.ndjson",
]
super().__init__(
name="GCVE identifiers",
)

def _normalise_identifier(self, identifier: str) -> str:
components = identifier.split("-", 1)
if len(components) < 2:
return identifier

prefix = components[0].upper()
return f"{prefix}-{components[1]}"

def _fetch(self) -> list[ProviderRecord]:
records = []
for url in self._ndjson_urls:
r = requests.get(
url,
timeout=30,
stream=True,
)
r.raise_for_status()

for record in r.iter_lines():
gcve = orjson.loads(record)
metadata = gcve.get("cveMetadata")

if not metadata:
continue

gcve_id = metadata.get("vulnId")
if not gcve_id:
continue

gcve_id = self._normalise_identifier(gcve_id)
if not gcve_id.startswith("GCVE-"):
logging.warning(f"Skipping GCVE record from {url} with unexpected id: {gcve_id!r}")
continue

cve_id = metadata.get("cveId")
aliases = [gcve_id]
if cve_id:
cve_id = self._normalise_identifier(cve_id)
aliases.append(cve_id)

published = metadata.get("datePublished")
logging.trace(f"processing GCVE record from {url} for {gcve_id}")

records.append(
ProviderRecord(
id=gcve_id,
published=self._parse_date(published),
aliases=Aliases.from_list(aliases, provider=self.name),
),
)

return records
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
ProviderRecord(
id=record_id,
published=published,
aliases=Aliases.from_list(aliases),
aliases=Aliases.from_list(aliases, provider=self.name),
),
)

Expand Down
4 changes: 2 additions & 2 deletions src/anchore_security_cli/identifiers/providers/go.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand All @@ -46,7 +46,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
record_id = data["id"]
cves = data.get("cves", [])
ghsas = data.get("ghsas", [])
aliases = Aliases.from_list(cves + ghsas)
aliases = Aliases.from_list(cves + ghsas, provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
4 changes: 2 additions & 2 deletions src/anchore_security_cli/identifiers/providers/grypedb.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _fetch(self) -> list[ProviderRecord]:
records.append(
ProviderRecord(
id=record_id,
aliases=Aliases.from_list([record_id, *aliases]),
aliases=Aliases.from_list([record_id, *aliases], provider=self.name),
published=self._parse_date(row["published"]),
),
)
Expand Down Expand Up @@ -135,7 +135,7 @@ def _fetch(self) -> list[ProviderRecord]:
records.append(
ProviderRecord(
id=cve_id,
aliases=Aliases.from_list([cve_id]),
aliases=Aliases.from_list([cve_id], provider=self.name),
published=published,
),
)
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/julia.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/mageia.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("related", [])])
aliases = Aliases.from_list([record_id, *data.get("related", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/minimos.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

for v in generate_all_openeuler_id_variants(record_id):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

if not record_id.startswith("MAL-"):
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/psf.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/pypa.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = yaml.safe_load(f)

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = yaml.safe_load(f)

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/redhat.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("related", [])])
aliases = Aliases.from_list([record_id, *data.get("related", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/rustsec.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("aliases", [])])
aliases = Aliases.from_list([record_id, *data.get("aliases", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
4 changes: 2 additions & 2 deletions src/anchore_security_cli/identifiers/providers/suse.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down Expand Up @@ -57,7 +57,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
2 changes: 1 addition & 1 deletion src/anchore_security_cli/identifiers/providers/ubuntu.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _process_fetch(self, content_dir: str) -> list[ProviderRecord]:
data = orjson.loads(f.read())

record_id = data["id"]
aliases = Aliases.from_list([record_id, *data.get("upstream", [])])
aliases = Aliases.from_list([record_id, *data.get("upstream", [])], provider=self.name)
published = self._parse_date(data.get("published"))

records.append(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self):
# records.append(
# ProviderRecord(
# id=cve,
# aliases=Aliases.from_list([cve, cve_to_gcve(cve)]),
# aliases=Aliases.from_list([cve, cve_to_gcve(cve)], provider=self.name),
# published=self._parse_date(published),
# ),
# )
Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.