|
10 | 10 | from redisvl.utils.vectorize import HFTextVectorizer |
11 | 11 |
|
12 | 12 |
|
| 13 | +@pytest.fixture(scope="session") |
| 14 | +def worker_id(request): |
| 15 | + """ |
| 16 | + Get the worker ID for the current test. |
| 17 | +
|
| 18 | + In pytest-xdist, the config has "workerid" in workerinput. |
| 19 | + This fixture abstracts that logic to provide a consistent worker_id |
| 20 | + across all tests. |
| 21 | + """ |
| 22 | + workerinput = getattr(request.config, "workerinput", {}) |
| 23 | + return workerinput.get("workerid", "master") |
| 24 | + |
| 25 | + |
13 | 26 | @pytest.fixture(autouse=True) |
14 | 27 | def set_tokenizers_parallelism(): |
15 | 28 | """Disable tokenizers parallelism in tests to avoid deadlocks""" |
16 | 29 | os.environ["TOKENIZERS_PARALLELISM"] = "false" |
17 | 30 |
|
18 | 31 |
|
19 | 32 | @pytest.fixture(scope="session", autouse=True) |
20 | | -def redis_container(request): |
| 33 | +def redis_container(worker_id): |
21 | 34 | """ |
22 | 35 | If using xdist, create a unique Compose project for each xdist worker by |
23 | 36 | setting COMPOSE_PROJECT_NAME. That prevents collisions on container/volume |
24 | 37 | names. |
25 | 38 | """ |
26 | | - # In xdist, the config has "workerid" in workerinput |
27 | | - workerinput = getattr(request.config, "workerinput", {}) |
28 | | - worker_id = workerinput.get("workerid", "master") |
29 | | - |
30 | 39 | # Set the Compose project name so containers do not clash across workers |
31 | 40 | os.environ["COMPOSE_PROJECT_NAME"] = f"redis_test_{worker_id}" |
32 | 41 | os.environ.setdefault("REDIS_IMAGE", "redis/redis-stack-server:latest") |
@@ -80,6 +89,16 @@ def hf_vectorizer(): |
80 | 89 | ) |
81 | 90 |
|
82 | 91 |
|
| 92 | +@pytest.fixture(scope="session") |
| 93 | +def hf_vectorizer_float16(): |
| 94 | + return HFTextVectorizer(dtype="float16") |
| 95 | + |
| 96 | + |
| 97 | +@pytest.fixture(scope="session") |
| 98 | +def hf_vectorizer_with_model(): |
| 99 | + return HFTextVectorizer("sentence-transformers/all-mpnet-base-v2") |
| 100 | + |
| 101 | + |
83 | 102 | @pytest.fixture |
84 | 103 | def sample_datetimes(): |
85 | 104 | return { |
@@ -196,16 +215,17 @@ def pytest_collection_modifyitems( |
196 | 215 |
|
197 | 216 |
|
198 | 217 | @pytest.fixture |
199 | | -def flat_index(sample_data, redis_url): |
| 218 | +def flat_index(sample_data, redis_url, worker_id): |
200 | 219 | """ |
201 | 220 | A fixture that uses the "flag" algorithm for its vector field. |
202 | 221 | """ |
| 222 | + |
203 | 223 | # construct a search index from the schema |
204 | 224 | index = SearchIndex.from_dict( |
205 | 225 | { |
206 | 226 | "index": { |
207 | | - "name": "user_index", |
208 | | - "prefix": "v1", |
| 227 | + "name": f"user_index_{worker_id}", |
| 228 | + "prefix": f"v1_{worker_id}", |
209 | 229 | "storage_type": "hash", |
210 | 230 | }, |
211 | 231 | "fields": [ |
@@ -250,16 +270,17 @@ def hash_preprocess(item: dict) -> dict: |
250 | 270 |
|
251 | 271 |
|
252 | 272 | @pytest.fixture |
253 | | -async def async_flat_index(sample_data, redis_url): |
| 273 | +async def async_flat_index(sample_data, redis_url, worker_id): |
254 | 274 | """ |
255 | 275 | A fixture that uses the "flag" algorithm for its vector field. |
256 | 276 | """ |
| 277 | + |
257 | 278 | # construct a search index from the schema |
258 | 279 | index = AsyncSearchIndex.from_dict( |
259 | 280 | { |
260 | 281 | "index": { |
261 | | - "name": "user_index", |
262 | | - "prefix": "v1", |
| 282 | + "name": f"user_index_{worker_id}", |
| 283 | + "prefix": f"v1_{worker_id}", |
263 | 284 | "storage_type": "hash", |
264 | 285 | }, |
265 | 286 | "fields": [ |
@@ -304,15 +325,16 @@ def hash_preprocess(item: dict) -> dict: |
304 | 325 |
|
305 | 326 |
|
306 | 327 | @pytest.fixture |
307 | | -async def async_hnsw_index(sample_data, redis_url): |
| 328 | +async def async_hnsw_index(sample_data, redis_url, worker_id): |
308 | 329 | """ |
309 | 330 | A fixture that uses the "hnsw" algorithm for its vector field. |
310 | 331 | """ |
| 332 | + |
311 | 333 | index = AsyncSearchIndex.from_dict( |
312 | 334 | { |
313 | 335 | "index": { |
314 | | - "name": "user_index", |
315 | | - "prefix": "v1", |
| 336 | + "name": f"user_index_{worker_id}", |
| 337 | + "prefix": f"v1_{worker_id}", |
316 | 338 | "storage_type": "hash", |
317 | 339 | }, |
318 | 340 | "fields": [ |
@@ -354,15 +376,16 @@ def hash_preprocess(item: dict) -> dict: |
354 | 376 |
|
355 | 377 |
|
356 | 378 | @pytest.fixture |
357 | | -def hnsw_index(sample_data, redis_url): |
| 379 | +def hnsw_index(sample_data, redis_url, worker_id): |
358 | 380 | """ |
359 | 381 | A fixture that uses the "hnsw" algorithm for its vector field. |
360 | 382 | """ |
| 383 | + |
361 | 384 | index = SearchIndex.from_dict( |
362 | 385 | { |
363 | 386 | "index": { |
364 | | - "name": "user_index", |
365 | | - "prefix": "v1", |
| 387 | + "name": f"user_index_{worker_id}", |
| 388 | + "prefix": f"v1_{worker_id}", |
366 | 389 | "storage_type": "hash", |
367 | 390 | }, |
368 | 391 | "fields": [ |
|
0 commit comments