diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a6ccfadc..35ca9354 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -26,10 +26,9 @@ jobs:
python-version: ${{ matrix.python }}
allow-prereleases: true
- - name: Install uv
- uses: astral-sh/setup-uv@v7
+ - uses: astral-sh/setup-uv@v7
with:
- version: "0.9.10"
+ version: "0.9.18"
- name: Run tests
- run: uv run --extra solrcloud -- python run-tests.py
+ run: uv run --extra=solrcloud --with=pytest -- python run-tests.py
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 207e466d..fc278703 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -36,7 +36,7 @@ repos:
- tomli
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.14.7
+ rev: v0.14.9
hooks:
- id: ruff-check
args: [ --fix ]
@@ -53,6 +53,6 @@ repos:
- id: validate-pyproject
- repo: https://github.com/astral-sh/uv-pre-commit
- rev: 0.9.14
+ rev: 0.9.18
hooks:
- id: uv-lock
diff --git a/pyproject.toml b/pyproject.toml
index ffcd4c1f..a342110a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,6 +33,7 @@ urls.Homepage = "https://github.com/django-haystack/pysolr/"
dev = [
"coverage",
"pre-commit>=4.4",
+ "pytest",
]
[tool.hatch.build.targets.sdist]
@@ -100,8 +101,6 @@ lint.ignore = [
"PGH004",
"PLR5501",
"PLW2901",
- "PT009",
- "PT027",
"PTH123",
"RET505",
"RET506",
@@ -115,6 +114,8 @@ lint.per-file-ignores."tests/*" = [
]
lint.mccabe.max-complexity = 16
lint.pylint.allow-magic-value-types = [
+ "bytes",
+ "float",
"int",
"str",
]
diff --git a/run-tests.py b/run-tests.py
index ef751844..c70eed76 100755
--- a/run-tests.py
+++ b/run-tests.py
@@ -2,6 +2,7 @@
import subprocess
import unittest
+from pathlib import Path
def main():
@@ -10,6 +11,11 @@ def main():
subprocess.run(["./solr-docker-test-env.sh", "setup"], check=True)
print("→ Running unit test suite...")
+ old_path = Path("tests/__init__.py")
+ new_path = old_path.with_name("z__init__.py")
+ old_path.rename(new_path) # rename tests/__init__.py to avoid duplicate tests
+ subprocess.run(["pytest"], check=True) # noqa: S607
+ new_path.rename(old_path)
unittest.main(module="tests", verbosity=1)
finally:
diff --git a/tests/test_admin.py b/tests/test_admin.py
index e909ab08..43d9fc7e 100644
--- a/tests/test_admin.py
+++ b/tests/test_admin.py
@@ -2,6 +2,8 @@
import json
import unittest
+import pytest
+
from pysolr import SolrCoreAdmin, SolrError
@@ -49,28 +51,28 @@ def test_status(self):
raw_all = self.solr_admin.status()
all_data = json.loads(raw_all)
- self.assertIn("core0", all_data["status"])
+ assert "core0" in all_data["status"]
# Status of a specific core
raw_single = self.solr_admin.status(core="core0")
single_data = json.loads(raw_single)
- self.assertEqual(single_data["status"]["core0"]["name"], "core0")
+ assert single_data["status"]["core0"]["name"] == "core0"
def test_create(self):
"""Test creating a core returns a successful response."""
raw_response = self.solr_admin.create("demo_core1")
data = json.loads(raw_response)
- self.assertEqual(data["responseHeader"]["status"], 0)
- self.assertEqual(data["core"], "demo_core1")
+ assert data["responseHeader"]["status"] == 0
+ assert data["core"] == "demo_core1"
def test_reload(self):
"""Test reloading a core returns a successful response."""
raw_response = self.solr_admin.reload("core0")
data = json.loads(raw_response)
- self.assertEqual(data["responseHeader"]["status"], 0)
+ assert data["responseHeader"]["status"] == 0
def test_rename(self):
"""Test renaming a core succeeds and the new name appears in the status."""
@@ -82,13 +84,13 @@ def test_rename(self):
raw_response = self.solr_admin.rename("demo_core1", "demo_core2")
data = json.loads(raw_response)
- self.assertEqual(data["responseHeader"]["status"], 0)
+ assert data["responseHeader"]["status"] == 0
# Verify that the renamed core appears in the status response
raw_response2 = self.solr_admin.status(core="demo_core2")
data2 = json.loads(raw_response2)
- self.assertEqual(data2["status"]["demo_core2"]["name"], "demo_core2")
+ assert data2["status"]["demo_core2"]["name"] == "demo_core2"
def test_swap(self):
"""
@@ -110,7 +112,7 @@ def test_swap(self):
raw_swap = self.solr_admin.swap("demo_core1", "demo_core2")
swap_data = json.loads(raw_swap)
- self.assertEqual(swap_data["responseHeader"]["status"], 0)
+ assert swap_data["responseHeader"]["status"] == 0
def test_unload(self):
"""
@@ -124,18 +126,19 @@ def test_unload(self):
raw_response = self.solr_admin.unload("demo_core1")
data = json.loads(raw_response)
- self.assertEqual(data["responseHeader"]["status"], 0)
+ assert data["responseHeader"]["status"] == 0
def test_load(self):
- self.assertRaises(NotImplementedError, self.solr_admin.load, "wheatley")
+ with pytest.raises(NotImplementedError):
+ self.solr_admin.load("wheatley")
def test_status__nonexistent_core_returns_empty_response(self):
"""Test that requesting status for a missing core returns an empty response."""
raw_response = self.solr_admin.status(core="not_exists")
data = json.loads(raw_response)
- self.assertNotIn("name", data["status"]["not_exists"])
- self.assertNotIn("instanceDir", data["status"]["not_exists"])
+ assert "name" not in data["status"]["not_exists"]
+ assert "instanceDir" not in data["status"]["not_exists"]
def test_create__existing_core_raises_error(self):
"""Test creating a core that already exists returns a 500 error."""
@@ -147,10 +150,8 @@ def test_create__existing_core_raises_error(self):
raw_response = self.solr_admin.create("demo_core1")
data = json.loads(raw_response)
- self.assertEqual(data["responseHeader"]["status"], 500)
- self.assertEqual(
- data["error"]["msg"], "Core with name 'demo_core1' already exists."
- )
+ assert data["responseHeader"]["status"] == 500
+ assert data["error"]["msg"] == "Core with name 'demo_core1' already exists."
def test_reload__nonexistent_core_raises_error(self):
"""Test that reloading a non-existent core returns a 400 error."""
@@ -158,9 +159,9 @@ def test_reload__nonexistent_core_raises_error(self):
data = json.loads(raw_response)
# Solr returns a 400 error for missing cores
- self.assertEqual(data["responseHeader"]["status"], 400)
- self.assertIn("No such core", data["error"]["msg"])
- self.assertIn("not_exists", data["error"]["msg"])
+ assert data["responseHeader"]["status"] == 400
+ assert "No such core" in data["error"]["msg"]
+ assert "not_exists" in data["error"]["msg"]
def test_rename__nonexistent_core_no_effect(self):
"""
@@ -179,8 +180,8 @@ def test_rename__nonexistent_core_no_effect(self):
data = json.loads(raw_response)
# The target core should not exist because the rename operation was ignored
- self.assertNotIn("name", data["status"]["demo_core99"])
- self.assertNotIn("instanceDir", data["status"]["demo_core99"])
+ assert "name" not in data["status"]["demo_core99"]
+ assert "instanceDir" not in data["status"]["demo_core99"]
def test_swap__missing_source_core_returns_error(self):
"""Test swapping when the source core is missing returns a 400 error."""
@@ -193,9 +194,9 @@ def test_swap__missing_source_core_returns_error(self):
data = json.loads(raw_response)
# Solr returns a 400 error when the source core does not exist
- self.assertEqual(data["responseHeader"]["status"], 400)
- self.assertIn("No such core", data["error"]["msg"])
- self.assertIn("not_exists", data["error"]["msg"])
+ assert data["responseHeader"]["status"] == 400
+ assert "No such core" in data["error"]["msg"]
+ assert "not_exists" in data["error"]["msg"]
def test_swap__missing_target_core_returns_error(self):
"""Test swapping when the target core is missing returns a 400 error."""
@@ -208,9 +209,9 @@ def test_swap__missing_target_core_returns_error(self):
data = json.loads(raw_response)
# Solr returns a 400 error when the target core does not exist
- self.assertEqual(data["responseHeader"]["status"], 400)
- self.assertIn("No such core", data["error"]["msg"])
- self.assertIn("not_exists", data["error"]["msg"])
+ assert data["responseHeader"]["status"] == 400
+ assert "No such core" in data["error"]["msg"]
+ assert "not_exists" in data["error"]["msg"]
def test_unload__nonexistent_core_returns_error(self):
"""Test unloading a non-existent core returns a 400 error response."""
@@ -220,6 +221,6 @@ def test_unload__nonexistent_core_returns_error(self):
data = json.loads(raw_response)
# Solr returns a 400 error for unloading a missing core
- self.assertEqual(data["responseHeader"]["status"], 400)
- self.assertIn("Cannot unload non-existent core", data["error"]["msg"])
- self.assertIn("not_exists", data["error"]["msg"])
+ assert data["responseHeader"]["status"] == 400
+ assert "Cannot unload non-existent core" in data["error"]["msg"]
+ assert "not_exists" in data["error"]["msg"]
diff --git a/tests/test_client.py b/tests/test_client.py
index b26e5351..7c3476e6 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -7,6 +7,8 @@
from urllib.parse import unquote_plus
from xml.etree import ElementTree # noqa: ICN001
+import pytest
+
from pysolr import (
NESTED_DOC_KEY,
Results,
@@ -24,67 +26,62 @@
class UtilsTestCase(unittest.TestCase):
def test_unescape_html(self):
- self.assertEqual(unescape_html("Hello world"), "Hello \x95 world")
- self.assertEqual(unescape_html("Hello d world"), "Hello d world")
- self.assertEqual(unescape_html("Hello & ☃"), "Hello & ☃")
- self.assertEqual(
- unescape_html("Hello &doesnotexist; world"), "Hello &doesnotexist; world"
+ assert unescape_html("Hello world") == "Hello \x95 world"
+ assert unescape_html("Hello d world") == "Hello d world"
+ assert unescape_html("Hello & ☃") == "Hello & ☃"
+ assert (
+ unescape_html("Hello &doesnotexist; world") == "Hello &doesnotexist; world"
)
def test_safe_urlencode(self):
- self.assertEqual(
+ assert (
force_unicode(
unquote_plus(safe_urlencode({"test": "Hello ☃! Helllo world!"}))
- ),
- "test=Hello ☃! Helllo world!",
+ )
+ == "test=Hello ☃! Helllo world!"
)
- self.assertEqual(
+ assert (
force_unicode(
unquote_plus(
safe_urlencode({"test": ["Hello ☃!", "Helllo world!"]}, True)
)
- ),
- "test=Hello \u2603!&test=Helllo world!",
+ )
+ == "test=Hello ☃!&test=Helllo world!"
)
- self.assertEqual(
+ assert (
force_unicode(
unquote_plus(
safe_urlencode({"test": ("Hello ☃!", "Helllo world!")}, True)
)
- ),
- "test=Hello \u2603!&test=Helllo world!",
+ )
+ == "test=Hello ☃!&test=Helllo world!"
)
def test_sanitize(self):
- (
- self.assertEqual(
- sanitize(
- "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19h\x1ae\x1bl\x1cl\x1do\x1e\x1f" # NOQA: E501
- ),
- "hello",
- ),
+ assert (
+ sanitize(
+ "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f\x10\x11\x12"
+ "\x13\x14\x15\x16\x17\x18\x19h\x1ae\x1bl\x1cl\x1do\x1e\x1f"
+ )
+ == "hello"
)
def test_force_unicode(self):
- self.assertEqual(force_unicode(b"Hello \xe2\x98\x83"), "Hello ☃")
+ assert force_unicode(b"Hello \xe2\x98\x83") == "Hello ☃"
# Don't mangle, it's already Unicode.
- self.assertEqual(force_unicode("Hello ☃"), "Hello ☃")
+ assert force_unicode("Hello ☃") == "Hello ☃"
- self.assertEqual(force_unicode(1), "1", "force_unicode() should convert ints")
- self.assertEqual(
- force_unicode(1.0), "1.0", "force_unicode() should convert floats"
- )
- self.assertEqual(
- force_unicode(None), "None", "force_unicode() should convert None"
- )
+ assert force_unicode(1) == "1", "force_unicode() should convert ints"
+ assert force_unicode(1.0) == "1.0", "force_unicode() should convert floats"
+ assert force_unicode(None) == "None", "force_unicode() should convert None"
def test_force_bytes(self):
- self.assertEqual(force_bytes("Hello ☃"), b"Hello \xe2\x98\x83")
+ assert force_bytes("Hello ☃") == b"Hello \xe2\x98\x83"
# Don't mangle, it's already a bytestring.
- self.assertEqual(force_bytes(b"Hello \xe2\x98\x83"), b"Hello \xe2\x98\x83")
+ assert force_bytes(b"Hello \xe2\x98\x83") == b"Hello \xe2\x98\x83"
def test_clean_xml_string(self):
- self.assertEqual(clean_xml_string("\x00\x0b\x0d\uffff"), "\x0d")
+ assert clean_xml_string("\x00\x0b\r\uffff") == "\r"
class ResultsTestCase(unittest.TestCase):
@@ -93,15 +90,15 @@ def test_init(self):
{"response": {"docs": [{"id": 1}, {"id": 2}], "numFound": 2}}
)
- self.assertEqual(default_results.docs, [{"id": 1}, {"id": 2}])
- self.assertEqual(default_results.hits, 2)
- self.assertEqual(default_results.highlighting, {})
- self.assertEqual(default_results.facets, {})
- self.assertEqual(default_results.spellcheck, {})
- self.assertEqual(default_results.stats, {})
- self.assertIsNone(default_results.qtime)
- self.assertEqual(default_results.debug, {})
- self.assertEqual(default_results.grouped, {})
+ assert default_results.docs == [{"id": 1}, {"id": 2}]
+ assert default_results.hits == 2
+ assert default_results.highlighting == {}
+ assert default_results.facets == {}
+ assert default_results.spellcheck == {}
+ assert default_results.stats == {}
+ assert default_results.qtime is None
+ assert default_results.debug == {}
+ assert default_results.grouped == {}
full_results = Results(
{
@@ -117,26 +114,26 @@ def test_init(self):
}
)
- self.assertEqual(full_results.docs, [{"id": 1}, {"id": 2}, {"id": 3}])
- self.assertEqual(full_results.hits, 3)
- self.assertEqual(full_results.highlighting, "hi")
- self.assertEqual(full_results.facets, "fa")
- self.assertEqual(full_results.spellcheck, "sp")
- self.assertEqual(full_results.stats, "st")
- self.assertEqual(full_results.qtime, "0.001")
- self.assertTrue(full_results.debug)
- self.assertEqual(full_results.grouped, ["a"])
+ assert full_results.docs == [{"id": 1}, {"id": 2}, {"id": 3}]
+ assert full_results.hits == 3
+ assert full_results.highlighting == "hi"
+ assert full_results.facets == "fa"
+ assert full_results.spellcheck == "sp"
+ assert full_results.stats == "st"
+ assert full_results.qtime == "0.001"
+ assert full_results.debug
+ assert full_results.grouped == ["a"]
def test_len(self):
small_results = Results(
{"response": {"docs": [{"id": 1}, {"id": 2}], "numFound": 2}}
)
- self.assertEqual(len(small_results), 2)
+ assert len(small_results) == 2
wrong_hits_results = Results(
{"response": {"docs": [{"id": 1}, {"id": 2}, {"id": 3}], "numFound": 7}}
)
- self.assertEqual(len(wrong_hits_results), 3)
+ assert len(wrong_hits_results) == 3
def test_iter(self):
long_results = Results(
@@ -144,9 +141,9 @@ def test_iter(self):
)
to_iter = list(long_results)
- self.assertEqual(to_iter[0], {"id": 1})
- self.assertEqual(to_iter[1], {"id": 2})
- self.assertEqual(to_iter[2], {"id": 3})
+ assert to_iter[0] == {"id": 1}
+ assert to_iter[1] == {"id": 2}
+ assert to_iter[2] == {"id": 3}
class SolrTestCaseMixin(object):
@@ -236,27 +233,25 @@ def assertURLStartsWith(self, URL, path):
"""
# Note that we do not use urljoin to ensure that any changes in trailing
# slash handling are caught quickly:
- return self.assertEqual(
- URL, "%s/%s" % (self.solr.url.replace("/core0", ""), path)
- )
+ assert URL == "%s/%s" % (self.solr.url.replace("/core0", ""), path)
def test_init(self):
- self.assertEqual(self.solr.url, "http://localhost:8983/solr/core0")
- self.assertIsInstance(self.solr.decoder, json.JSONDecoder)
- self.assertIsInstance(self.solr.encoder, json.JSONEncoder)
- self.assertEqual(self.solr.timeout, 60)
+ assert self.solr.url == "http://localhost:8983/solr/core0"
+ assert isinstance(self.solr.decoder, json.JSONDecoder)
+ assert isinstance(self.solr.encoder, json.JSONEncoder)
+ assert self.solr.timeout == 60
custom_solr = self.get_solr("core0", timeout=17, always_commit=True)
- self.assertEqual(custom_solr.timeout, 17)
- self.assertTrue(custom_solr.always_commit)
+ assert custom_solr.timeout == 17
+ assert custom_solr.always_commit
def test_custom_results_class(self):
solr = Solr("http://localhost:8983/solr/core0", results_cls=dict)
results = solr.search(q="*:*")
- self.assertIsInstance(results, dict)
- self.assertIn("responseHeader", results)
- self.assertIn("response", results)
+ assert isinstance(results, dict)
+ assert "responseHeader" in results
+ assert "response" in results
def test_cursor_traversal(self):
solr = Solr("http://localhost:8983/solr/core0")
@@ -264,9 +259,9 @@ def test_cursor_traversal(self):
expected = solr.search(q="*:*", rows=len(self.docs) * 3, sort="id asc").docs
results = solr.search(q="*:*", cursorMark="*", rows=2, sort="id asc")
all_docs = list(results)
- self.assertEqual(len(expected), len(all_docs))
- self.assertEqual(len(results), len(all_docs))
- self.assertEqual(expected, all_docs)
+ assert len(expected) == len(all_docs)
+ assert len(results) == len(all_docs)
+ assert expected == all_docs
def test__create_full_url_base(self):
self.assertURLStartsWith(self.solr._create_full_url(path=""), "core0")
@@ -287,17 +282,20 @@ def test__create_full_url_with_path_and_querystring(self):
def test__send_request(self):
# Test a valid request.
resp_body = self.solr._send_request("GET", "select/?q=doc&wt=json")
- self.assertIn('"numFound":3', resp_body)
+ assert '"numFound":3' in resp_body
# Test a lowercase method & a body.
- xml_body = 'doc_12Whee! ☃' # NOQA: E501
+ xml_body = (
+ 'doc_12'
+ 'Whee! ☃'
+ )
resp_body = self.solr._send_request(
"POST",
"update/?commit=true",
body=xml_body,
headers={"Content-type": "text/xml; charset=utf-8"},
)
- self.assertIn('0', resp_body)
+ assert '0' in resp_body
# Test JSON Array
json_body = '[{"id":"doc_13","title":"Whee hoo! ☃"}]'
@@ -307,66 +305,70 @@ def test__send_request(self):
body=json_body,
headers={"Content-type": "application/json; charset=utf-8"},
)
- self.assertIn('"status":0', resp_body)
+ assert '"status":0' in resp_body
def test__send_request_to_bad_path(self):
# Test a non-existent URL:
self.solr.url = "http://127.0.0.1:56789/whatever"
- self.assertRaises(
- SolrError, self.solr._send_request, "get", "select/?q=doc&wt=json"
- )
+ with pytest.raises(SolrError):
+ self.solr._send_request("get", "select/?q=doc&wt=json")
def test_send_request_to_bad_core(self):
# Test a bad core on a valid URL:
self.solr.url = "http://localhost:8983/solr/bad_core"
- self.assertRaises(
- SolrError, self.solr._send_request, "get", "select/?q=doc&wt=json"
- )
+ with pytest.raises(SolrError):
+ self.solr._send_request("get", "select/?q=doc&wt=json")
def test__select(self):
# Short params.
resp_body = self.solr._select({"q": "doc"})
resp_data = json.loads(resp_body)
- self.assertEqual(resp_data["response"]["numFound"], 3)
+ assert resp_data["response"]["numFound"] == 3
# Long params.
resp_body = self.solr._select({"q": "doc" * 1024})
resp_data = json.loads(resp_body)
- self.assertEqual(resp_data["response"]["numFound"], 0)
- self.assertEqual(len(resp_data["responseHeader"]["params"]["q"]), 3 * 1024)
+ assert resp_data["response"]["numFound"] == 0
+ assert len(resp_data["responseHeader"]["params"]["q"]) == 3 * 1024
# Test Deep Pagination CursorMark
resp_body = self.solr._select(
{"q": "*", "cursorMark": "*", "sort": "id desc", "start": 0, "rows": 2}
)
resp_data = json.loads(resp_body)
- self.assertEqual(len(resp_data["response"]["docs"]), 2)
- self.assertIn("nextCursorMark", resp_data)
+ assert len(resp_data["response"]["docs"]) == 2
+ assert "nextCursorMark" in resp_data
def test__select_wt_xml(self):
resp_body = self.solr._select({"q": "doc", "wt": "xml"})
response = ElementTree.fromstring(resp_body)
- self.assertEqual(int(response.find("result").get("numFound")), 3)
+ assert int(response.find("result").get("numFound")) == 3
def test__mlt(self):
resp_body = self.solr._mlt({"q": "id:doc_1", "mlt.fl": "title"})
resp_data = json.loads(resp_body)
- self.assertEqual(resp_data["response"]["numFound"], 0)
+ assert resp_data["response"]["numFound"] == 0
def test__suggest_terms(self):
resp_body = self.solr._select({"terms.fl": "title"})
resp_data = json.loads(resp_body)
- self.assertEqual(resp_data["response"]["numFound"], 0)
+ assert resp_data["response"]["numFound"] == 0
def test__update(self):
- xml_body = 'doc_12Whee!' # NOQA: E501
+ xml_body = (
+ 'doc_12'
+ 'Whee!'
+ )
resp_body = self.solr._update(xml_body)
- self.assertIn('0', resp_body)
+ assert '0' in resp_body
def test__soft_commit(self):
- xml_body = 'doc_12Whee!' # NOQA: E501
+ xml_body = (
+ 'doc_12'
+ 'Whee!'
+ )
resp_body = self.solr._update(xml_body, softCommit=True)
- self.assertIn('0', resp_body)
+ assert '0' in resp_body
def test__extract_error(self):
class RubbishResponse(object):
@@ -384,36 +386,28 @@ def json(self):
# Just the reason.
resp_1 = RubbishResponse("We don't care.", {"reason": "Something went wrong."})
- self.assertEqual(
- self.solr._extract_error(resp_1), "[Reason: Something went wrong.]"
- )
+ assert self.solr._extract_error(resp_1) == "[Reason: Something went wrong.]"
# Empty reason.
resp_2 = RubbishResponse("We don't care.", {"reason": None})
- self.assertEqual(
- self.solr._extract_error(resp_2), "[Reason: None]\nWe don't care."
- )
+ assert self.solr._extract_error(resp_2) == "[Reason: None]\nWe don't care."
# No reason. Time to scrape.
resp_3 = RubbishResponse(
"
Something is broke.
",
{"server": "jetty"},
)
- self.assertEqual(
- self.solr._extract_error(resp_3), "[Reason: Something is broke.]"
- )
+ assert self.solr._extract_error(resp_3) == "[Reason: Something is broke.]"
# No reason. JSON response.
resp_4 = RubbishResponse(
b'\n {"error": {"msg": "It happens"}}', {"server": "tomcat"}
)
- self.assertEqual(self.solr._extract_error(resp_4), "[Reason: It happens]")
+ assert self.solr._extract_error(resp_4) == "[Reason: It happens]"
# No reason. Weird JSON response.
resp_5 = RubbishResponse(b'{"kinda": "weird"}', {"server": "jetty"})
- self.assertEqual(
- self.solr._extract_error(resp_5), '[Reason: None]\n{"kinda": "weird"}'
- )
+ assert self.solr._extract_error(resp_5) == '[Reason: None]\n{"kinda": "weird"}'
def test__scrape_response(self):
# Jetty.
@@ -421,26 +415,28 @@ def test__scrape_response(self):
{"server": "jetty"},
"Something is broke.
",
)
- self.assertEqual(resp_1, ("Something is broke.", ""))
+ assert resp_1 == ("Something is broke.", "")
# Other.
resp_2 = self.solr._scrape_response(
{"server": "crapzilla"},
- "Wow. Seriously weird.Something is broke.
", # NOQA: E501
+ "Wow. Seriously weird."
+ "Something is broke.
",
)
- self.assertEqual(resp_2, ("Wow. Seriously weird.", ""))
+ assert resp_2 == ("Wow. Seriously weird.", "")
def test__scrape_response_coyote_xml(self):
resp_3 = self.solr._scrape_response(
{"server": "coyote"},
- '\n\n4000Invalid Date String:\'2015-03-23 10:43:33\'400\n\n', # NOQA: E501
+ '\n\n'
+ '4000'
+ ''
+ "Invalid Date String:'2015-03-23 10:43:33'"
+ '400\n\n',
)
- self.assertEqual(
- resp_3,
- (
- "Invalid Date String:'2015-03-23 10:43:33'",
- "Invalid Date String:'2015-03-23 10:43:33'",
- ),
+ assert resp_3 == (
+ "Invalid Date String:'2015-03-23 10:43:33'",
+ "Invalid Date String:'2015-03-23 10:43:33'",
)
# Valid XML with a traceback
@@ -448,14 +444,16 @@ def test__scrape_response_coyote_xml(self):
{"server": "coyote"},
"""
-500138Internal Server Errororg.apache.solr.common.SolrException: Internal Server Error at java.lang.Thread.run(Thread.java:745)500
-""", # NOQA: E501
+500138
+Internal Server Error
+org.apache.solr.common.SolrException: Internal Server Error at java.lang.Thread.run(Thread.java:745)500
+""", # noqa: E501
)
- self.assertEqual(
- resp_4,
+ assert resp_4 == (
+ "Internal Server Error",
(
- "Internal Server Error",
- "org.apache.solr.common.SolrException: Internal Server Error at java.lang.Thread.run(Thread.java:745)", # NOQA: E501
+ "org.apache.solr.common.SolrException: Internal Server Error at "
+ "java.lang.Thread.run(Thread.java:745)"
),
)
@@ -464,32 +462,39 @@ def test__scrape_response_tomcat(self):
resp_0 = self.solr._scrape_response(
{"server": "coyote"},
- "Something broke!
gigantic stack trace
", # NOQA: E501
+ "Something broke!
gigantic stack trace
"
+ "",
)
- self.assertEqual(resp_0, ("Something broke!", ""))
+ assert resp_0 == ("Something broke!", "")
# Invalid XML
- bogus_xml = '\n\n4000Invalid Date String:\'2015-03-23 10:43:33\'400' # NOQA: E501
+ bogus_xml = (
+ '\n\n'
+ '4000'
+ ''
+ "Invalid Date String:'2015-03-23 10:43:33'"
+ '400'
+ )
reason, full_html = self.solr._scrape_response({"server": "coyote"}, bogus_xml)
- self.assertIsNone(reason, None)
- self.assertEqual(full_html, bogus_xml.replace("\n", ""))
+ assert reason is None
+ assert full_html == bogus_xml.replace("\n", "")
def test__from_python(self):
- self.assertEqual(self.solr._from_python(True), "true")
- self.assertEqual(self.solr._from_python(False), "false")
- self.assertEqual(self.solr._from_python(1), "1")
- self.assertEqual(self.solr._from_python(1.2), "1.2")
- self.assertEqual(self.solr._from_python(b"hello"), "hello")
- self.assertEqual(self.solr._from_python("hello ☃"), "hello ☃")
- self.assertEqual(self.solr._from_python("\x01test\x02"), "test")
+ assert self.solr._from_python(True) == "true"
+ assert self.solr._from_python(False) == "false"
+ assert self.solr._from_python(1) == "1"
+ assert self.solr._from_python(1.2) == "1.2"
+ assert self.solr._from_python(b"hello") == "hello"
+ assert self.solr._from_python("hello ☃") == "hello ☃"
+ assert self.solr._from_python("\x01test\x02") == "test"
def test__from_python_dates(self):
- self.assertEqual(
- self.solr._from_python(datetime.date(2013, 1, 18)), "2013-01-18T00:00:00Z"
+ assert (
+ self.solr._from_python(datetime.date(2013, 1, 18)) == "2013-01-18T00:00:00Z"
)
- self.assertEqual(
- self.solr._from_python(datetime.datetime(2013, 1, 18, 0, 30, 28)),
- "2013-01-18T00:30:28Z",
+ assert (
+ self.solr._from_python(datetime.datetime(2013, 1, 18, 0, 30, 28))
+ == "2013-01-18T00:30:28Z"
)
class FakeTimeZone(datetime.tzinfo):
@@ -502,61 +507,58 @@ def dst(self):
return None
# Check a UTC timestamp
- self.assertEqual(
+ assert (
self.solr._from_python(
datetime.datetime(2013, 1, 18, 0, 30, 28, tzinfo=FakeTimeZone())
- ),
- "2013-01-18T00:30:28Z",
+ )
+ == "2013-01-18T00:30:28Z"
)
# Check a US Eastern Standard Time timestamp
FakeTimeZone.offset = -(5 * 60)
- self.assertEqual(
+ assert (
self.solr._from_python(
datetime.datetime(2013, 1, 18, 0, 30, 28, tzinfo=FakeTimeZone())
- ),
- "2013-01-18T05:30:28Z",
+ )
+ == "2013-01-18T05:30:28Z"
)
def test__to_python(self):
- self.assertEqual(
- self.solr._to_python("2013-01-18T00:00:00Z"), datetime.datetime(2013, 1, 18)
- )
- self.assertEqual(
- self.solr._to_python("2013-01-18T00:30:28Z"),
- datetime.datetime(2013, 1, 18, 0, 30, 28),
+ assert self.solr._to_python("2013-01-18T00:00:00Z") == datetime.datetime(
+ 2013, 1, 18
)
- self.assertTrue(self.solr._to_python("true"))
- self.assertFalse(self.solr._to_python("false"))
- self.assertEqual(self.solr._to_python(1), 1)
- self.assertEqual(self.solr._to_python(1.2), 1.2)
- self.assertEqual(self.solr._to_python(b"hello"), "hello")
- self.assertEqual(self.solr._to_python("hello ☃"), "hello ☃")
- self.assertEqual(self.solr._to_python(["foo", "bar"]), ["foo", "bar"])
- self.assertEqual(self.solr._to_python(("foo", "bar")), ("foo", "bar"))
- self.assertEqual(
- self.solr._to_python('tuple("foo", "bar")'), 'tuple("foo", "bar")'
+ assert self.solr._to_python("2013-01-18T00:30:28Z") == datetime.datetime(
+ 2013, 1, 18, 0, 30, 28
)
+ assert self.solr._to_python("true")
+ assert not self.solr._to_python("false")
+ assert self.solr._to_python(1) == 1
+ assert self.solr._to_python(1.2) == 1.2
+ assert self.solr._to_python(b"hello") == "hello"
+ assert self.solr._to_python("hello ☃") == "hello ☃"
+ assert self.solr._to_python(["foo", "bar"]) == ["foo", "bar"]
+ assert self.solr._to_python(("foo", "bar")) == ("foo", "bar")
+ assert self.solr._to_python('tuple("foo", "bar")') == 'tuple("foo", "bar")'
def test__is_null_value(self):
- self.assertTrue(self.solr._is_null_value(None))
- self.assertTrue(self.solr._is_null_value(""))
+ assert self.solr._is_null_value(None)
+ assert self.solr._is_null_value("")
- self.assertFalse(self.solr._is_null_value("Hello"))
- self.assertFalse(self.solr._is_null_value(1))
+ assert not self.solr._is_null_value("Hello")
+ assert not self.solr._is_null_value(1)
def test_search(self):
results = self.solr.search("doc")
- self.assertEqual(len(results), 3)
+ assert len(results) == 3
# search should default to 'select' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("select/?"))
+ assert args[1].startswith("select/?")
results = self.solr.search("example")
- self.assertEqual(len(results), 2)
+ assert len(results) == 2
results = self.solr.search("nothing")
- self.assertEqual(len(results), 0)
+ assert len(results) == 0
# Advanced options.
results = self.solr.search(
@@ -572,85 +574,80 @@ def test_search(self):
"spellcheck.count": 1,
},
)
- self.assertEqual(len(results), 3)
- self.assertIn("explain", results.debug)
- self.assertEqual(results.highlighting, {"doc_4": {}, "doc_2": {}, "doc_1": {}})
- self.assertEqual(results.spellcheck, {})
+ assert len(results) == 3
+ assert "explain" in results.debug
+ assert results.highlighting == {"doc_4": {}, "doc_2": {}, "doc_1": {}}
+ assert results.spellcheck == {}
# Facet counts apply only to documents matching the main query ("q=doc").
# Only docs with popularity=10 and 7 contain "doc" in their text fields, so the
# facet output is ["10", 2, "7", 1]. Values like 2 and 8 do not appear because
# those documents do not match the query and are excluded from facet counts.
- self.assertEqual(
- results.facets["facet_fields"]["popularity"], ["10", 2, "7", 1]
- )
- self.assertIsNotNone(results.qtime)
+ assert results.facets["facet_fields"]["popularity"] == ["10", 2, "7", 1]
+ assert results.qtime is not None
# Nested search #1: find parent where child's comment has 'hello'
results = self.solr.search("{!parent which=type_s:parent}comment_t:hello")
- self.assertEqual(len(results), 1)
+ assert len(results) == 1
# Nested search #2: find child with a child
results = self.solr.search("{!parent which=type_s:child}comment_t:blah")
- self.assertEqual(len(results), 1)
+ assert len(results) == 1
def test_multiple_search_handlers(self):
misspelled_words = "anthr thng"
# By default, the 'select' search handler should be used
results = self.solr.search(q=misspelled_words)
- self.assertEqual(results.spellcheck, {})
+ assert results.spellcheck == {}
# spell search handler should return suggestions
# NB: this test relies on the spell search handler in the
# solrconfig (see the SOLR_ARCHIVE used by the start-solr-test-server script)
results = self.solr.search(q=misspelled_words, search_handler="spell")
- self.assertNotEqual(results.spellcheck, {})
+ assert results.spellcheck != {}
# search should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.search("doc", search_handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_more_like_this(self):
results = self.solr.more_like_this("id:doc_1", "text")
- self.assertEqual(len(results), 0)
+ assert len(results) == 0
# more_like_this should default to 'mlt' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("mlt/?"))
+ assert args[1].startswith("mlt/?")
# more_like_this should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.more_like_this("id:doc_1", "text", handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_suggest_terms(self):
results = self.solr.suggest_terms("title", "")
- self.assertEqual(len(results), 1)
- self.assertEqual(
- results,
- {
- "title": [
- ("doc", 3),
- ("another", 2),
- ("example", 2),
- ("1", 1),
- ("2", 1),
- ("boring", 1),
- ("rock", 1),
- ("thing", 1),
- ("☃", 1),
- ]
- },
- )
+ assert len(results) == 1
+ assert results == {
+ "title": [
+ ("doc", 3),
+ ("another", 2),
+ ("example", 2),
+ ("1", 1),
+ ("2", 1),
+ ("boring", 1),
+ ("rock", 1),
+ ("thing", 1),
+ ("☃", 1),
+ ]
+ }
# suggest_terms should default to 'mlt' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("terms/?"))
+ assert args[1].startswith("terms/?")
# suggest_terms should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.suggest_terms("title", "", handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test__build_xml_doc(self):
doc = {
@@ -662,20 +659,20 @@ def test__build_xml_doc(self):
doc_xml = force_unicode(
ElementTree.tostring(self.solr._build_xml_doc(doc), encoding="utf-8")
)
- self.assertIn('Example doc ☃ 1', doc_xml)
- self.assertIn('doc_1', doc_xml)
- self.assertEqual(len(doc_xml), 152)
+ assert 'Example doc ☃ 1' in doc_xml
+ assert 'doc_1' in doc_xml
+ assert len(doc_xml) == 152
def test__build_xml_doc_with_sets(self):
doc = {"id": "doc_1", "title": "Set test doc", "tags": {"alpha", "beta"}}
doc_xml = force_unicode(
ElementTree.tostring(self.solr._build_xml_doc(doc), encoding="utf-8")
)
- self.assertIn('doc_1', doc_xml)
- self.assertIn('Set test doc', doc_xml)
- self.assertIn('alpha', doc_xml)
- self.assertIn('beta', doc_xml)
- self.assertEqual(len(doc_xml), 144)
+ assert 'doc_1' in doc_xml
+ assert 'Set test doc' in doc_xml
+ assert 'alpha' in doc_xml
+ assert 'beta' in doc_xml
+ assert len(doc_xml) == 144
def test__build_xml_doc_with_sub_docs(self):
sub_docs = [
@@ -700,13 +697,13 @@ def test__build_xml_doc_with_sub_docs(self):
"_doc": sub_docs,
}
doc_xml = self.solr._build_xml_doc(doc)
- self.assertEqual(doc_xml.find("*[@name='id']").text, doc["id"])
+ assert doc_xml.find("*[@name='id']").text == doc["id"]
children_docs = doc_xml.findall("doc")
- self.assertEqual(len(children_docs), len(sub_docs))
+ assert len(children_docs) == len(sub_docs)
- self.assertEqual(children_docs[0].find("*[@name='id']").text, sub_docs[0]["id"])
- self.assertEqual(children_docs[1].find("*[@name='id']").text, sub_docs[1]["id"])
+ assert children_docs[0].find("*[@name='id']").text == sub_docs[0]["id"]
+ assert children_docs[1].find("*[@name='id']").text == sub_docs[1]["id"]
def test__build_xml_doc_with_empty_values(self):
doc = {
@@ -718,11 +715,11 @@ def test__build_xml_doc_with_empty_values(self):
doc_xml = force_unicode(
ElementTree.tostring(self.solr._build_xml_doc(doc), encoding="utf-8")
)
- self.assertNotIn('', doc_xml)
- self.assertNotIn('', doc_xml)
- self.assertNotIn('', doc_xml)
- self.assertIn('doc_1', doc_xml)
- self.assertEqual(len(doc_xml), 41)
+ assert '' not in doc_xml
+ assert '' not in doc_xml
+ assert '' not in doc_xml
+ assert 'doc_1' in doc_xml
+ assert len(doc_xml) == 41
def test__build_xml_doc_with_empty_values_and_field_updates(self):
doc = {
@@ -741,40 +738,40 @@ def test__build_xml_doc_with_empty_values_and_field_updates(self):
encoding="utf-8",
)
)
- self.assertIn('', doc_xml)
- self.assertNotIn('', doc_xml)
- self.assertIn('', doc_xml)
- self.assertIn('doc_1', doc_xml)
- self.assertEqual(len(doc_xml), 134)
+ assert '' in doc_xml
+ assert '' not in doc_xml
+ assert '' in doc_xml
+ assert 'doc_1' in doc_xml
+ assert len(doc_xml) == 134
def test_build_json_doc_matches_xml(self):
doc = {"id": "doc_1", "title": "", "price": 12.59, "popularity": 10}
doc_json = self.solr._build_json_doc(doc)
doc_xml = self.solr._build_xml_doc(doc)
- self.assertNotIn("title", doc_json)
- self.assertIsNone(doc_xml.find("*[name='title']"))
+ assert "title" not in doc_json
+ assert doc_xml.find("*[name='title']") is None
def test__build_docs_plain(self):
docs = [{"id": "doc_1", "title": "", "price": 12.59, "popularity": 10}]
solrapi, _m, _len_message = self.solr._build_docs(docs)
- self.assertEqual(solrapi, "JSON")
+ assert solrapi == "JSON"
def test__build_docs_boost(self):
docs = [{"id": "doc_1", "title": "", "price": 12.59, "popularity": 10}]
solrapi, _m, _len_message = self.solr._build_docs(docs, boost={"title": 10.0})
- self.assertEqual(solrapi, "XML")
+ assert solrapi == "XML"
def test__build_docs_field_updates(self):
docs = [{"id": "doc_1", "popularity": 10}]
solrapi, _m, _len_message = self.solr._build_docs(
docs, fieldUpdates={"popularity": "inc"}
)
- self.assertEqual(solrapi, "JSON")
+ assert solrapi == "JSON"
def test_add(self):
- self.assertEqual(len(self.solr.search("doc")), 3)
- self.assertEqual(len(self.solr.search("example")), 2)
+ assert len(self.solr.search("doc")) == 3
+ assert len(self.solr.search("example")) == 2
self.solr.add(
[
@@ -785,19 +782,19 @@ def test_add(self):
)
# add should default to 'update' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/?"))
+ assert args[1].startswith("update/?")
- self.assertEqual(len(self.solr.search("doc")), 5)
- self.assertEqual(len(self.solr.search("example")), 3)
+ assert len(self.solr.search("doc")) == 5
+ assert len(self.solr.search("example")) == 3
# add should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.add([], handler="fakehandler", commit=True)
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_add_with_boost(self):
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
# Add documents (index-time boost is removed in Solr 8+)
# Ref: https://solr.apache.org/guide/solr/latest/upgrade-notes/major-changes-in-solr-8.html#indexing-changes-in-8-0
@@ -812,15 +809,15 @@ def test_add_with_boost(self):
)
res = self.solr.search("doc")
- self.assertEqual(len(res), 5)
+ assert len(res) == 5
# Solr 8+ no longer supports index-time boosts.
# TF/IDF scoring places doc_7 first because "doc" appears twice
# consecutively in its title ("Spam doc doc").
- self.assertEqual("doc_7", res.docs[0]["id"])
+ assert "doc_7" == res.docs[0]["id"]
def test_add_with_commit_within(self):
- self.assertEqual(len(self.solr.search("commitWithin")), 0)
+ assert len(self.solr.search("commitWithin")) == 0
commit_within_ms = 50
self.solr.add(
@@ -830,54 +827,50 @@ def test_add_with_commit_within(self):
commitWithin=commit_within_ms,
)
# we should not see the doc immediately
- self.assertEqual(len(self.solr.search("commitWithin")), 0)
+ assert len(self.solr.search("commitWithin")) == 0
# but we should see it after commitWithin period (+ small grace period)
time.sleep((commit_within_ms / 1000.0) + 0.01)
- self.assertEqual(len(self.solr.search("commitWithin")), 1)
+ assert len(self.solr.search("commitWithin")) == 1
def test_field_update_inc(self):
originalDocs = self.solr.search("doc")
- self.assertEqual(len(originalDocs), 3)
+ assert len(originalDocs) == 3
updateList = []
for doc in originalDocs:
updateList.append({"id": doc["id"], "popularity": 5})
self.solr.add(updateList, fieldUpdates={"popularity": "inc"}, commit=True)
updatedDocs = self.solr.search("doc")
- self.assertEqual(len(updatedDocs), 3)
+ assert len(updatedDocs) == 3
for originalDoc, updatedDoc in zip(originalDocs, updatedDocs, strict=True):
- self.assertEqual(len(updatedDoc.keys()), len(originalDoc.keys()))
- self.assertEqual(updatedDoc["popularity"], originalDoc["popularity"] + 5)
+ assert len(updatedDoc.keys()) == len(originalDoc.keys())
+ assert updatedDoc["popularity"] == originalDoc["popularity"] + 5
# TODO: change this to use assertSetEqual:
- self.assertTrue(
- all(
- updatedDoc[k] == originalDoc[k]
- for k in updatedDoc.keys()
- if k not in ["_version_", "popularity"]
- )
+ assert all(
+ updatedDoc[k] == originalDoc[k]
+ for k in updatedDoc.keys()
+ if k not in ["_version_", "popularity"]
)
def test_field_update_set(self):
originalDocs = self.solr.search("doc")
updated_popularity = 10
- self.assertEqual(len(originalDocs), 3)
+ assert len(originalDocs) == 3
updateList = []
for doc in originalDocs:
updateList.append({"id": doc["id"], "popularity": updated_popularity})
self.solr.add(updateList, fieldUpdates={"popularity": "set"}, commit=True)
updatedDocs = self.solr.search("doc")
- self.assertEqual(len(updatedDocs), 3)
+ assert len(updatedDocs) == 3
for originalDoc, updatedDoc in zip(originalDocs, updatedDocs, strict=True):
- self.assertEqual(len(updatedDoc.keys()), len(originalDoc.keys()))
- self.assertEqual(updatedDoc["popularity"], updated_popularity)
+ assert len(updatedDoc.keys()) == len(originalDoc.keys())
+ assert updatedDoc["popularity"] == updated_popularity
# TODO: change this to use assertSetEqual:
- self.assertTrue(
- all(
- updatedDoc[k] == originalDoc[k]
- for k in updatedDoc.keys()
- if k not in ["_version_", "popularity"]
- )
+ assert all(
+ updatedDoc[k] == originalDoc[k]
+ for k in updatedDoc.keys()
+ if k not in ["_version_", "popularity"]
)
def test_field_update_add(self):
@@ -898,39 +891,38 @@ def test_field_update_add(self):
)
originalDocs = self.solr.search("multivalued")
- self.assertEqual(len(originalDocs), 2)
+ assert len(originalDocs) == 2
updateList = []
for doc in originalDocs:
updateList.append({"id": doc["id"], "word_ss": ["epsilon", "gamma"]})
self.solr.add(updateList, fieldUpdates={"word_ss": "add"}, commit=True)
updatedDocs = self.solr.search("multivalued")
- self.assertEqual(len(updatedDocs), 2)
+ assert len(updatedDocs) == 2
for originalDoc, updatedDoc in zip(originalDocs, updatedDocs, strict=True):
- self.assertEqual(len(updatedDoc.keys()), len(originalDoc.keys()))
- self.assertEqual(
- updatedDoc["word_ss"], originalDoc["word_ss"] + ["epsilon", "gamma"]
- )
+ assert len(updatedDoc.keys()) == len(originalDoc.keys())
+ assert updatedDoc["word_ss"] == originalDoc["word_ss"] + [
+ "epsilon",
+ "gamma",
+ ]
# TODO: change this to use assertSetEqual:
- self.assertTrue(
- all(
- updatedDoc[k] == originalDoc[k]
- for k in updatedDoc.keys()
- if k not in ["_version_", "word_ss"]
- )
+ assert all(
+ updatedDoc[k] == originalDoc[k]
+ for k in updatedDoc.keys()
+ if k not in ["_version_", "word_ss"]
)
def test_delete(self):
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
self.solr.delete(id="doc_1", commit=True)
# delete should default to 'update' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/?"))
+ assert args[1].startswith("update/?")
- self.assertEqual(len(self.solr.search("doc")), 2)
- self.assertEqual(len(self.solr.search("type_s:parent")), 2)
- self.assertEqual(len(self.solr.search("type_s:child")), 3)
- self.assertEqual(len(self.solr.search("type_s:grandchild")), 1)
+ assert len(self.solr.search("doc")) == 2
+ assert len(self.solr.search("type_s:parent")) == 2
+ assert len(self.solr.search("type_s:child")) == 3
+ assert len(self.solr.search("type_s:grandchild")) == 1
self.solr.delete(q="price:[0 TO 15]")
self.solr.delete(q="type_s:parent", commit=True)
@@ -943,14 +935,14 @@ def test_delete(self):
# one simple doc should remain
# parent documents were also deleted but children remain as orphans
- self.assertEqual(len(self.solr.search("doc")), 1)
- self.assertEqual(len(self.solr.search("type_s:parent")), 0)
- self.assertEqual(len(self.solr.search("type_s:child")), 3)
+ assert len(self.solr.search("doc")) == 1
+ assert len(self.solr.search("type_s:parent")) == 0
+ assert len(self.solr.search("type_s:child")) == 3
self.solr.delete(q="type_s:child OR type_s:grandchild", commit=True)
- self.assertEqual(len(self.solr.search("*:*")), 1)
+ assert len(self.solr.search("*:*")) == 1
self.solr.delete(q="*:*", commit=True)
- self.assertEqual(len(self.solr.search("*:*")), 0)
+ assert len(self.solr.search("*:*")) == 0
# Test delete() with `id' being a list.
# Solr's ability to delete parent/children docs by id is simply assumed
@@ -965,33 +957,40 @@ def leaf_doc(doc):
self.solr.commit()
leaf_q = "price:[* TO *]"
- self.assertEqual(len(self.solr.search(leaf_q)), len(to_delete_docs))
+ assert len(self.solr.search(leaf_q)) == len(to_delete_docs)
# Extract a random doc from the list, to later check it wasn't deleted.
graced_doc_id = to_delete_ids.pop(
random.randint(0, len(to_delete_ids) - 1) # NOQA: S311
)
self.solr.delete(id=to_delete_ids, commit=True)
# There should be only one left, our graced id
- self.assertEqual(len(self.solr.search(leaf_q)), 1)
- self.assertEqual(len(self.solr.search("id:%s" % graced_doc_id)), 1)
+ assert len(self.solr.search(leaf_q)) == 1
+ assert len(self.solr.search("id:%s" % graced_doc_id)) == 1
# Now we can wipe the graced document too. None should be left.
self.solr.delete(id=graced_doc_id, commit=True)
- self.assertEqual(len(self.solr.search(leaf_q)), 0)
+ assert len(self.solr.search(leaf_q)) == 0
# Can't delete when the list of documents is empty
- self.assertRaises(ValueError, self.solr.delete, id=[None, None, None])
- self.assertRaises(ValueError, self.solr.delete, id=[None])
+ msg = "The list of documents to delete was empty."
+ with pytest.raises(ValueError, match=msg):
+ self.solr.delete(id=[None, None, None])
+ with pytest.raises(ValueError, match=msg):
+ self.solr.delete(id=[None])
# Need at least one of either `id' or `q'
- self.assertRaises(ValueError, self.solr.delete)
+ msg = 'You must specify "id" or "q".'
+ with pytest.raises(ValueError, match=msg):
+ self.solr.delete()
# Can't have both.
- self.assertRaises(ValueError, self.solr.delete, id="foo", q="bar")
+ msg = 'You many only specify "id" OR "q", not both.'
+ with pytest.raises(ValueError, match=msg):
+ self.solr.delete(id="foo", q="bar")
# delete should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.delete(id="doc_1", handler="fakehandler", commit=True)
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_delete__accepts_string_and_integer_values(self):
"""
@@ -1024,14 +1023,14 @@ def test_delete__accepts_string_and_integer_values(self):
self.solr.delete(q=104, commit=True)
def test_commit(self):
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
self.solr.add([{"id": "doc_6", "title": "Newly added doc"}])
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
self.solr.commit()
# commit should default to 'update' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/?"))
- self.assertEqual(len(self.solr.search("doc")), 4)
+ assert args[1].startswith("update/?")
+ assert len(self.solr.search("doc")) == 4
def test_can_handles_default_commit_policy(self):
expected_commits = [False, True, False]
@@ -1041,10 +1040,10 @@ def test_can_handles_default_commit_policy(self):
self.solr.add([{"id": "doc_6", "title": "Newly added doc"}], commit=arg)
args, _ = self.solr._send_request.call_args
committing_in_url = "commit" in args[1]
- self.assertEqual(expected_commit, committing_in_url)
+ assert expected_commit == committing_in_url
def test_overwrite(self):
- self.assertEqual(len(self.solr.search("id:doc_overwrite_1")), 0)
+ assert len(self.solr.search("id:doc_overwrite_1")) == 0
self.solr.add(
[
{"id": "doc_overwrite_1", "title": "Kim is awesome."},
@@ -1053,30 +1052,30 @@ def test_overwrite(self):
overwrite=False,
commit=True,
)
- self.assertEqual(len(self.solr.search("id:doc_overwrite_1")), 2)
+ assert len(self.solr.search("id:doc_overwrite_1")) == 2
# commit should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.commit(handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_optimize(self):
# Make sure it doesn't blow up. Side effects are hard to measure. :/
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
self.solr.add([{"id": "doc_6", "title": "Newly added doc"}], commit=False)
- self.assertEqual(len(self.solr.search("doc")), 3)
+ assert len(self.solr.search("doc")) == 3
self.solr.optimize()
# optimize should default to 'update' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/?"))
- self.assertEqual(len(self.solr.search("doc")), 4)
+ assert args[1].startswith("update/?")
+ assert len(self.solr.search("doc")) == 4
# optimize should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.optimize(handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
def test_extract(self):
fake_f = StringIO(
@@ -1095,31 +1094,31 @@ def test_extract(self):
extracted = self.solr.extract(fake_f)
# extract should default to 'update/extract' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/extract"))
+ assert args[1].startswith("update/extract")
# extract should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.extract(fake_f, handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
# Verify documented response structure:
- self.assertIn("contents", extracted)
- self.assertIn("metadata", extracted)
+ assert "contents" in extracted
+ assert "metadata" in extracted
- self.assertIn("foobar", extracted["contents"])
+ assert "foobar" in extracted["contents"]
m = extracted["metadata"]
- self.assertIn("file", m["stream_name"])
+ assert "file" in m["stream_name"]
- self.assertIn("haystack-test", m, "HTML metadata should have been extracted!")
- self.assertEqual(["test 1234"], m["haystack-test"])
+ assert "haystack-test" in m, "HTML metadata should have been extracted!"
+ assert ["test 1234"] == m["haystack-test"]
# Note the underhanded use of a double snowman to verify both that Tika
# correctly decoded entities and that our UTF-8 characters survived the
# round-trip:
- self.assertEqual(["Test Title ☃☃"], m["title"])
+ assert ["Test Title ☃☃"] == m["title"]
def test_extract_special_char_in_filename(self):
fake_f = StringIO(
@@ -1138,38 +1137,38 @@ def test_extract_special_char_in_filename(self):
extracted = self.solr.extract(fake_f)
# extract should default to 'update/extract' handler
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("update/extract"))
+ assert args[1].startswith("update/extract")
# extract should support custom handlers
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.extract(fake_f, handler="fakehandler")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("fakehandler"))
+ assert args[1].startswith("fakehandler")
# Verify documented response structure:
- self.assertIn("contents", extracted)
- self.assertIn("metadata", extracted)
+ assert "contents" in extracted
+ assert "metadata" in extracted
- self.assertIn("foobar", extracted["contents"])
+ assert "foobar" in extracted["contents"]
m = extracted["metadata"]
- self.assertIn("file", m["stream_name"])
+ assert "file" in m["stream_name"]
- self.assertIn("haystack-test", m, "HTML metadata should have been extracted!")
- self.assertEqual(["test 1234"], m["haystack-test"])
+ assert "haystack-test" in m, "HTML metadata should have been extracted!"
+ assert ["test 1234"] == m["haystack-test"]
# Note the underhanded use of a double snowman to verify both that Tika
# correctly decoded entities and that our UTF-8 characters survived the
# round-trip:
- self.assertEqual(["Test Title ☃☃"], m["title"])
+ assert ["Test Title ☃☃"] == m["title"]
def test_full_url(self):
self.solr.url = "http://localhost:8983/solr/core0"
full_url = self.solr._create_full_url(path="/update")
# Make sure trailing and leading slashes do not collide:
- self.assertEqual(full_url, "http://localhost:8983/solr/core0/update")
+ assert full_url == "http://localhost:8983/solr/core0/update"
def test_request_handler(self):
before_test_use_qt_param = self.solr.use_qt_param
@@ -1179,27 +1178,27 @@ def test_request_handler(self):
self.solr.search("my query")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("select"))
+ assert args[1].startswith("select")
self.solr.search("my", search_handler="/autocomplete")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("select"))
- self.assertGreaterEqual(args[1].find("qt=%2Fautocomplete"), 0)
+ assert args[1].startswith("select")
+ assert args[1].find("qt=%2Fautocomplete") >= 0
self.solr.search_handler = "/autocomplete"
self.solr.search("my")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("select"))
- self.assertGreaterEqual(args[1].find("qt=%2Fautocomplete"), 0)
+ assert args[1].startswith("select")
+ assert args[1].find("qt=%2Fautocomplete") >= 0
self.solr.use_qt_param = False
# will change the path, so expect a 404
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.search("my")
args, _kwargs = self.solr._send_request.call_args
- self.assertTrue(args[1].startswith("/autocomplete"))
- self.assertLess(args[1].find("qt=%2Fautocomplete"), 0)
+ assert args[1].startswith("/autocomplete")
+ assert args[1].find("qt=%2Fautocomplete") < 0
# reset the values to what they were before the test
self.solr.use_qt_param = before_test_use_qt_param
@@ -1207,7 +1206,7 @@ def test_request_handler(self):
def test_ping(self):
self.solr.ping()
- with self.assertRaises(SolrError):
+ with pytest.raises(SolrError):
self.solr.ping(handler="fakehandler")
@@ -1226,19 +1225,19 @@ def test_does_not_require_commit(self):
# add should not require commit arg
self.solr.add(self.docs)
- self.assertEqual(len(self.solr.search("doc")), 2)
- self.assertEqual(len(self.solr.search("example")), 1)
+ assert len(self.solr.search("doc")) == 2
+ assert len(self.solr.search("example")) == 1
# update should not require commit arg
self.docs[0]["title"] = "Updated Doc"
self.docs[1]["title"] = "Another example updated doc"
self.solr.add(self.docs, fieldUpdates={"title": "set"})
- self.assertEqual(len(self.solr.search("updated")), 2)
- self.assertEqual(len(self.solr.search("example")), 1)
+ assert len(self.solr.search("updated")) == 2
+ assert len(self.solr.search("example")) == 1
# delete should not require commit arg
self.solr.delete(q="*:*")
- self.assertEqual(len(self.solr.search("*")), 0)
+ assert len(self.solr.search("*")) == 0
def test_can_handles_default_commit_policy(self):
self.solr._send_request = Mock(wraps=self.solr._send_request)
@@ -1249,4 +1248,4 @@ def test_can_handles_default_commit_policy(self):
self.solr.add(self.docs, commit=arg)
args, _ = self.solr._send_request.call_args
committing_in_url = "commit" in args[1]
- self.assertEqual(expected_commit, committing_in_url)
+ assert expected_commit == committing_in_url
diff --git a/tests/test_cloud.py b/tests/test_cloud.py
index 5756f029..8f966a61 100644
--- a/tests/test_cloud.py
+++ b/tests/test_cloud.py
@@ -1,10 +1,12 @@
import contextlib
+import re
import unittest
from typing import ClassVar
-from pysolr import SolrCloud, SolrError, ZooKeeper, json
+import pytest
-from .test_client import SolrTestCase
+from pysolr import SolrCloud, SolrError, ZooKeeper, json
+from tests.test_client import SolrTestCase
try:
from kazoo.exceptions import KazooException
@@ -72,26 +74,26 @@ def setUpClass(cls):
def assertURLStartsWith(self, url, path):
node_urls = self.zk.getHosts("core0")
- self.assertIn(url, {"%s/%s" % (node_url, path) for node_url in node_urls})
+ assert url in {"%s/%s" % (node_url, path) for node_url in node_urls}
def get_solr(self, collection, timeout=60):
return SolrCloud(self.zk, collection, timeout=timeout)
def test_init(self):
- self.assertTrue(self.solr.url.endswith("/solr/core0"))
- self.assertIsInstance(self.solr.decoder, json.JSONDecoder)
- self.assertEqual(self.solr.timeout, 60)
+ assert self.solr.url.endswith("/solr/core0")
+ assert isinstance(self.solr.decoder, json.JSONDecoder)
+ assert self.solr.timeout == 60
custom_solr = self.get_solr("core0", timeout=17)
- self.assertEqual(custom_solr.timeout, 17)
+ assert custom_solr.timeout == 17
def test_custom_results_class(self):
solr = SolrCloud(self.zk, "core0", results_cls=dict)
results = solr.search(q="*:*")
- self.assertIsInstance(results, dict)
- self.assertIn("responseHeader", results)
- self.assertIn("response", results)
+ assert isinstance(results, dict)
+ assert "responseHeader" in results
+ assert "response" in results
def test__send_request_to_bad_path(self):
raise unittest.SkipTest("This test makes no sense in a SolrCloud world")
@@ -100,23 +102,23 @@ def test_send_request_to_bad_core(self):
raise unittest.SkipTest("This test makes no sense in a SolrCloud world")
def test_invalid_collection(self):
- self.assertRaises(SolrError, SolrCloud, self.zk, "core12345")
+ with pytest.raises(SolrError):
+ SolrCloud(self.zk, "core12345")
def test__create_full_url(self):
# Nada.
- self.assertRegex(
- self.solr._create_full_url(path=""),
- r"http://localhost:89../solr/core0$",
+ assert re.search(
+ r"http://localhost:89../solr/core0$", self.solr._create_full_url(path="")
)
# Basic path.
- self.assertRegex(
- self.solr._create_full_url(path="pysolr_tests"),
+ assert re.search(
r"http://localhost:89../solr/core0/pysolr_tests$",
+ self.solr._create_full_url(path="pysolr_tests"),
)
# Leading slash (& making sure we don't touch the trailing slash).
- self.assertRegex(
- self.solr._create_full_url(path="/pysolr_tests/select/?whatever=/"),
+ assert re.search(
r"http://localhost:89../solr/core0/pysolr_tests/select/\?whatever=/",
+ self.solr._create_full_url(path="/pysolr_tests/select/?whatever=/"),
)
@classmethod
diff --git a/uv.lock b/uv.lock
index 313fde14..ceb13710 100644
--- a/uv.lock
+++ b/uv.lock
@@ -109,6 +109,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
]
+[[package]]
+name = "colorama"
+version = "0.4.6"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
+]
+
[[package]]
name = "coverage"
version = "7.11.3"
@@ -217,6 +226,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
]
+[[package]]
+name = "exceptiongroup"
+version = "1.3.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
+]
+
[[package]]
name = "filelock"
version = "3.20.0"
@@ -244,6 +265,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
+[[package]]
+name = "iniconfig"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
+]
+
[[package]]
name = "kazoo"
version = "2.10.0"
@@ -262,6 +292,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
]
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
+]
+
[[package]]
name = "platformdirs"
version = "4.5.0"
@@ -271,6 +310,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
]
+[[package]]
+name = "pluggy"
+version = "1.6.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
+]
+
[[package]]
name = "pre-commit"
version = "4.4.0"
@@ -287,6 +335,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/27/11/574fe7d13acf30bfd0a8dd7fa1647040f2b8064f13f43e8c963b1e65093b/pre_commit-4.4.0-py2.py3-none-any.whl", hash = "sha256:b35ea52957cbf83dcc5d8ee636cbead8624e3a15fbfa61a370e42158ac8a5813", size = 226049, upload-time = "2025-11-08T21:12:10.228Z" },
]
+[[package]]
+name = "pygments"
+version = "2.19.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
+]
+
[[package]]
name = "pysolr"
version = "3.11.0"
@@ -304,6 +361,7 @@ solrcloud = [
dev = [
{ name = "coverage" },
{ name = "pre-commit" },
+ { name = "pytest" },
]
[package.metadata]
@@ -316,7 +374,26 @@ provides-extras = ["solrcloud"]
[package.metadata.requires-dev]
dev = [
{ name = "coverage" },
- { name = "pre-commit", specifier = ">=4.4.0" },
+ { name = "pre-commit", specifier = ">=4.4" },
+ { name = "pytest" },
+]
+
+[[package]]
+name = "pytest"
+version = "9.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama", marker = "sys_platform == 'win32'" },
+ { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+ { name = "iniconfig" },
+ { name = "packaging" },
+ { name = "pluggy" },
+ { name = "pygments" },
+ { name = "tomli", marker = "python_full_version < '3.11'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
]
[[package]]
@@ -398,6 +475,55 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
]
+[[package]]
+name = "tomli"
+version = "2.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
+ { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
+ { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
+ { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
+ { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
+ { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
+ { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
+ { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
+ { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
+ { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
+ { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
+ { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" },
+ { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" },
+ { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" },
+ { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" },
+ { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" },
+ { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" },
+ { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" },
+ { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" },
+ { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" },
+ { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" },
+ { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" },
+ { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" },
+ { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" },
+ { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" },
+ { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" },
+ { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" },
+ { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" },
+ { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" },
+ { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" },
+ { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.15.0"