Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ repos:
- --markdown-linebreak-ext=md
exclude: >
(?x)^(
pkg/macos/pkg-resources/.*\.rtf
pkg/macos/pkg-resources/.*\.rtf|
pkg/patches/.*\.patch
)$

- id: mixed-line-ending # Replaces or checks mixed line ending.
Expand Down
31 changes: 31 additions & 0 deletions pkg/patches/pip-urllib3/_version.py.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
--- a/pip/_vendor/urllib3/_version.py
+++ b/pip/_vendor/urllib3/_version.py
@@ -1,2 +1,26 @@
-# This file is protected via CODEOWNERS
-__version__ = "1.26.20"
+# This file is a Salt-maintained security patch of pip's vendored urllib3.
+#
+# The underlying code is urllib3 1.26.20 (the version vendored by pip 25.2)
+# with the following CVE fixes backported from upstream urllib3 2.6.3:
+#
+# CVE-2025-66418 (GHSA-gm62-xv2j-4w53): Unbounded Content-Encoding
+# decompression chain -- MultiDecoder now enforces a 5-link limit.
+# Upstream fix: urllib3 2.6.0 (commit 24d7b67).
+#
+# CVE-2026-21441 (GHSA-38jv-5279-wg99): drain_conn unnecessarily
+# decompressed the full body of HTTP redirect responses, creating a
+# decompression-bomb vector. Fixed by adding _has_decoded_content
+# tracking and only decoding in drain_conn when decoding was already
+# in progress.
+# Upstream fix: urllib3 2.6.3 (commit 8864ac4).
+#
+# CVE-2025-66471 (GHSA-2xpw-w6gg-jr37): Decompression bomb in the
+# streaming API via max_length parameter. NOT backported -- requires a
+# full 2.x streaming infrastructure refactor. Ubuntu did not backport
+# this to 1.26.x either. pip maintainers confirmed pip is not
+# affected because all pip network calls use decode_content=False.
+#
+# The version string "2.6.3" reflects the highest upstream release from
+# which fixes have been backported. The underlying API remains urllib3
+# 1.26.x -- this is NOT a port to urllib3 2.x.
+__version__ = "2.6.3"
64 changes: 64 additions & 0 deletions pkg/patches/pip-urllib3/response.py.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
--- a/pip/_vendor/urllib3/response.py
+++ b/pip/_vendor/urllib3/response.py
@@ -129,8 +129,18 @@
they were applied.
"""

+ # Maximum allowed number of chained HTTP encodings in the
+ # Content-Encoding header. CVE-2025-66418 (GHSA-gm62-xv2j-4w53).
+ max_decode_links = 5
+
def __init__(self, modes):
- self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+ encodings = [m.strip() for m in modes.split(",")]
+ if len(encodings) > self.max_decode_links:
+ raise DecodeError(
+ "Too many content encodings in the chain: "
+ "%d > %d" % (len(encodings), self.max_decode_links)
+ )
+ self._decoders = [_get_decoder(e) for e in encodings]

def flush(self):
return self._decoders[0].flush()
@@ -222,6 +232,9 @@
self.reason = reason
self.strict = strict
self.decode_content = decode_content
+ # CVE-2026-21441: tracks whether content decoding has been
+ # initiated so drain_conn can skip decompression on redirects.
+ self._has_decoded_content = False
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
@@ -286,7 +299,11 @@
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
- self.read()
+ self.read(
+ # CVE-2026-21441: Do not spend resources decoding the
+ # content unless decoding has already been initiated.
+ decode_content=self._has_decoded_content,
+ )
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass

@@ -394,11 +411,18 @@
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
+ # CVE-2026-21441: guard against toggling after decoding started.
+ if self._has_decoded_content:
+ raise RuntimeError(
+ "Calling read(decode_content=False) is not supported after "
+ "read(decode_content=True) was called."
+ )
return data

try:
if self._decoder:
data = self._decoder.decompress(data)
+ self._has_decoded_content = True
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
13 changes: 7 additions & 6 deletions salt/utils/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,12 +636,13 @@ def query(
except salt.ext.tornado.httpclient.HTTPError as exc:
ret["status"] = exc.code
ret["error"] = str(exc)
ret["body"], _ = _decode_result(
exc.response.body,
exc.response.headers,
backend,
decode_body=decode_body,
)
if exc.response is not None:
ret["body"], _ = _decode_result(
exc.response.body,
exc.response.headers,
backend,
decode_body=decode_body,
)
return ret
except (socket.herror, OSError, TimeoutError, socket.gaierror) as exc:
if status is True:
Expand Down
91 changes: 91 additions & 0 deletions tests/pytests/pkg/integration/test_pip_urllib3_patch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import pathlib
import re
import subprocess
import zipfile

import pytest

PATCHED_URLLIB3_VERSION = "2.6.3"


@pytest.fixture(autouse=True)
def skip_on_prev_version(install_salt):
"""
Skip urllib3 patch tests when running against the previous (downgraded)
Salt version, which does not contain the CVE backports.
"""
if install_salt.use_prev_version:
pytest.skip("urllib3 CVE patch is not present in the previous Salt version")


def _site_packages(install_salt) -> pathlib.Path:
"""Return the site-packages directory for the installed Salt Python."""
ret = subprocess.run(
install_salt.binary_paths["python"]
+ [
"-c",
"import pip, pathlib; print(pathlib.Path(pip.__file__).parent.parent)",
],
capture_output=True,
text=True,
check=False,
)
assert ret.returncode == 0, ret.stderr
return pathlib.Path(ret.stdout.strip())


def test_pip_vendored_urllib3_version(install_salt):
"""
Verify that pip's vendored urllib3 in the installed Salt package
reports the security-patched version string.
"""
ret = subprocess.run(
install_salt.binary_paths["python"]
+ [
"-c",
"import pip._vendor.urllib3; print(pip._vendor.urllib3.__version__)",
],
capture_output=True,
text=True,
check=False,
)
assert ret.returncode == 0, ret.stderr
version = ret.stdout.strip()
assert (
version == PATCHED_URLLIB3_VERSION
), f"pip's vendored urllib3 is {version!r}; expected {PATCHED_URLLIB3_VERSION!r}"


def test_virtualenv_embedded_pip_wheel_urllib3_version(install_salt):
"""
Verify that the pip wheel bundled inside virtualenv's seed/wheels/embed
directory also contains the security-patched urllib3. New virtualenvs
seeded from this wheel will inherit the CVE fixes.
"""
site_packages = _site_packages(install_salt)
embed_dir = site_packages / "virtualenv" / "seed" / "wheels" / "embed"

if not embed_dir.is_dir():
pytest.skip(f"virtualenv embed directory not found: {embed_dir}")

pip_wheels = sorted(embed_dir.glob("pip-*.whl"))
if not pip_wheels:
pytest.skip(f"No pip wheel found in {embed_dir}")

pip_wheel = pip_wheels[-1]
with zipfile.ZipFile(pip_wheel) as zf:
try:
with zf.open("pip/_vendor/urllib3/_version.py") as f:
content = f.read().decode("utf-8")
except KeyError:
pytest.fail(
f"pip/_vendor/urllib3/_version.py not found inside {pip_wheel.name}"
)

match = re.search(r'^__version__\s*=\s*["\']([^"\']+)["\']', content, re.MULTILINE)
assert match, f"Could not parse __version__ from {pip_wheel.name}"
version = match.group(1)
assert version == PATCHED_URLLIB3_VERSION, (
f"Embedded pip wheel {pip_wheel.name} contains urllib3 {version!r}; "
f"expected {PATCHED_URLLIB3_VERSION!r}"
)
23 changes: 23 additions & 0 deletions tests/pytests/unit/utils/test_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,29 @@ def test_query_error_handling():
assert isinstance(ret.get("error", None), str)


def test_query_tornado_httperror_no_response():
"""
Tests that http.query handles a Tornado HTTPError where exc.response is None.
This happens on connection-level failures such as a connect timeout (HTTP 599)
where no HTTP response is ever received from the server.
"""
import salt.ext.tornado.httpclient

http_error = salt.ext.tornado.httpclient.HTTPError(599, "Timeout while connecting")
assert http_error.response is None

mock_client = MagicMock()
mock_client.fetch.side_effect = http_error

with patch("salt.utils.http.HTTPClient", return_value=mock_client):
ret = http.query("https://example.com/test", backend="tornado")

assert isinstance(ret, dict)
assert ret.get("status") == 599
assert "Timeout while connecting" in ret.get("error", "")
assert "body" not in ret


def test_parse_cookie_header():
header = "; ".join(
[
Expand Down
Loading
Loading