diff --git a/.gitignore b/.gitignore
index 3824f4c..48bfaab 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,3 +14,4 @@ dist
.envrc
codegen.log
Brewfile.lock.json
+uv.lock
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index d04f223..4208b5c 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.5.1"
+ ".": "0.6.0"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index 778c22b..9a8c935 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 24
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web%2Fparallel-sdk-66ee13c3475d2c76f0956f258f0469903155b83ef02e839641be94cdc2014cf3.yml
-openapi_spec_hash: 88af7b88725bead1f8ccdcaeb436fadb
-config_hash: e17d82e9cb35004e5f9a9d3c4cf51aeb
+configured_endpoints: 38
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/parallel-web/parallel-sdk-b134f034fe11499d713c03d07778aba8a395d7e3cbbc3d8a4bd2891f0aa970ba.yml
+openapi_spec_hash: c4fc5b0cb3bc48076f736a0ad2b2e75e
+config_hash: 27cd9354fb0ac3129cbf269737cece6c
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7c1dd5c..d784261 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,36 @@
# Changelog
+## 0.6.0 (2026-05-06)
+
+Full Changelog: [v0.5.1...v0.6.0](https://github.com/parallel-web/parallel-sdk-python/compare/v0.5.1...v0.6.0)
+
+### Features
+
+* **api:** manual updates ([8cb17ac](https://github.com/parallel-web/parallel-sdk-python/commit/8cb17ac6d6c9a5a9e6443d9ecf5c121250b2eed2))
+* **api:** manual updates ([e4008e4](https://github.com/parallel-web/parallel-sdk-python/commit/e4008e4f8432a6c3e2341d18fee37fcedf8a530e))
+* **api:** manual updates ([174407f](https://github.com/parallel-web/parallel-sdk-python/commit/174407ff7467789e19d3abeffcaccd36fd35e786))
+* **api:** manual updates ([1c47c8b](https://github.com/parallel-web/parallel-sdk-python/commit/1c47c8baa579a403064e59a5fbb75400315951c9))
+* **api:** Task Groups v1 added to SDK ([198e317](https://github.com/parallel-web/parallel-sdk-python/commit/198e317bb3b5e5961e63c1c04bac0eb593385b7b))
+* support setting headers via env ([6dd2b05](https://github.com/parallel-web/parallel-sdk-python/commit/6dd2b05bcf25259ae50e650376c3182913439487))
+
+
+### Bug Fixes
+
+* re-export TaskGroup from parallel.types.beta and silence reportDeprecated ([dc6119f](https://github.com/parallel-web/parallel-sdk-python/commit/dc6119f6dccdef75bdbe4ce838dd8ba51cc2428a))
+* **scripts:** remove unreachable check and redundant type annotation ([f63ad0a](https://github.com/parallel-web/parallel-sdk-python/commit/f63ad0ae48994be8b16c20438bb80c61cd943fec))
+* **scripts:** satisfy pyright in alias resolver ([892f474](https://github.com/parallel-web/parallel-sdk-python/commit/892f4743488ae5d8c4683123193da4f4e61d2464))
+* **types:** preserve back-compat aliases for renamed inline classes ([532ee8f](https://github.com/parallel-web/parallel-sdk-python/commit/532ee8f32ecbaf611f0ff5f6c80a532e2f0bcd66))
+* **types:** use module-level alias instead of import-as ([b1b9858](https://github.com/parallel-web/parallel-sdk-python/commit/b1b9858ffa95d90871ce36b06e510638e3546bde))
+* use correct field name format for multipart file arrays ([1e34228](https://github.com/parallel-web/parallel-sdk-python/commit/1e3422859a25c66804f7f588fd24276417c52057))
+
+
+### Chores
+
+* **internal:** more robust bootstrap script ([d4c7737](https://github.com/parallel-web/parallel-sdk-python/commit/d4c773762e983a657fb4c6f5d39529c3467e0994))
+* **internal:** reformat pyproject.toml ([b3c0639](https://github.com/parallel-web/parallel-sdk-python/commit/b3c063982542886a490679f840ca0fad94695227))
+* **scripts:** follow alias and attribute redirections in breaking-change detection ([89b1495](https://github.com/parallel-web/parallel-sdk-python/commit/89b1495236f280eee8af7c48ecff3ff2453291bd))
+* stop tracking uv.lock (project uses requirements*.lock from rye) ([540471a](https://github.com/parallel-web/parallel-sdk-python/commit/540471afb4da3a6ed485af15554cb548e7086bbd))
+
## 0.5.1 (2026-04-22)
Full Changelog: [v0.5.0...v0.5.1](https://github.com/parallel-web/parallel-sdk-python/compare/v0.5.0...v0.5.1)
diff --git a/api.md b/api.md
index 6462851..c4e0ebd 100644
--- a/api.md
+++ b/api.md
@@ -17,6 +17,7 @@ from parallel.types import (
ExtractResponse,
ExtractResult,
FetchPolicy,
+ FullContentSettings,
SearchResult,
UsageItem,
WebSearchResult,
@@ -40,9 +41,13 @@ from parallel.types import (
JsonSchema,
ParsedTaskRunResult,
RunInput,
+ TaskAdvancedSettings,
TaskRun,
TaskRunJsonOutput,
+ TaskRunProgressMessageEvent,
+ TaskRunProgressStatsEvent,
TaskRunResult,
+ TaskRunSourceStats,
TaskRunTextOutput,
TaskSpec,
TextSchema,
@@ -54,9 +59,70 @@ Methods:
- client.task_run.create(\*\*params) -> TaskRun
- client.task_run.retrieve(run_id) -> TaskRun
- client.task_run.result(run_id, \*\*params) -> TaskRunResult
+- client.task_run.retrieve_input(run_id) -> RunInput
Convenience methods:
- client.task_run.execute(input, processor, output: OutputSchema) -> TaskRunResult
- client.task_run.execute(input, processor, output: Type[OutputT]) -> ParsedTaskRunResult[OutputT]
+# TaskGroup
+
+Types:
+
+```python
+from parallel.types import (
+ TaskGroup,
+ TaskGroupRunResponse,
+ TaskGroupStatus,
+ TaskGroupStatusEvent,
+ TaskGroupEventsResponse,
+ TaskGroupGetRunsResponse,
+)
+```
+
+Methods:
+
+- client.task_group.create(\*\*params) -> TaskGroup
+- client.task_group.retrieve(task_group_id) -> TaskGroup
+- client.task_group.add_runs(task_group_id, \*\*params) -> TaskGroupRunResponse
+- client.task_group.events(task_group_id, \*\*params) -> TaskGroupEventsResponse
+- client.task_group.get_runs(task_group_id, \*\*params) -> TaskGroupGetRunsResponse
+- client.task_group.retrieve_run(run_id, \*, task_group_id) -> TaskRun
+
+# Monitor
+
+Types:
+
+```python
+from parallel.types import (
+ AdvancedMonitorSettings,
+ CreateMonitorRequest,
+ Monitor,
+ MonitorCompletionEvent,
+ MonitorErrorEvent,
+ MonitorEventStreamEvent,
+ MonitorEventStreamResponseSettings,
+ MonitorEventStreamSettings,
+ MonitorSnapshotEvent,
+ MonitorSnapshotOutput,
+ MonitorSnapshotResponseSettings,
+ MonitorSnapshotSettings,
+ MonitorWebhook,
+ PaginatedMonitorEvents,
+ PaginatedMonitorResponse,
+ UpdateMonitorEventStreamSettings,
+ UpdateMonitorRequest,
+)
+```
+
+Methods:
+
+- client.monitor.create(\*\*params) -> Monitor
+- client.monitor.retrieve(monitor_id) -> Monitor
+- client.monitor.update(monitor_id, \*\*params) -> Monitor
+- client.monitor.list(\*\*params) -> PaginatedMonitorResponse
+- client.monitor.cancel(monitor_id) -> Monitor
+- client.monitor.events(monitor_id, \*\*params) -> PaginatedMonitorEvents
+- client.monitor.trigger(monitor_id) -> None
+
# [Beta](src/parallel/resources/beta/api.md)
diff --git a/pyproject.toml b/pyproject.toml
index 621c3da..c56061f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "parallel-web"
-version = "0.5.1"
+version = "0.6.0"
description = "The official Python library for the Parallel API"
dynamic = ["readme"]
license = "MIT"
@@ -158,6 +158,11 @@ reportOverlappingOverload = false
reportImportCycles = false
reportPrivateUsage = false
+# Deprecation is a runtime concern; type-check warnings produce noise when the
+# breaking-change detector compares against pre-deprecation baseline tests.
+# Newly-generated tests already add a per-file `# pyright: reportDeprecated=false`
+# marker for deprecated resources, so this just promotes that to project-level.
+reportDeprecated = false
[tool.mypy]
pretty = true
@@ -169,7 +174,7 @@ show_error_codes = true
#
# We also exclude our `tests` as mypy doesn't always infer
# types correctly and Pyright will still catch any type errors.
-exclude = ['src/parallel/_files.py', '_dev/.*.py', 'tests/.*']
+exclude = ["src/parallel/_files.py", "_dev/.*.py", "tests/.*"]
strict_equality = true
implicit_reexport = true
diff --git a/scripts/bootstrap b/scripts/bootstrap
index b430fee..fe8451e 100755
--- a/scripts/bootstrap
+++ b/scripts/bootstrap
@@ -4,7 +4,7 @@ set -e
cd "$(dirname "$0")/.."
-if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then
+if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "${SKIP_BREW:-}" != "1" ] && [ -t 0 ]; then
brew bundle check >/dev/null 2>&1 || {
echo -n "==> Install Homebrew dependencies? (y/N): "
read -r response
diff --git a/scripts/detect-breaking-changes.py b/scripts/detect-breaking-changes.py
index 4fc5250..4912c05 100644
--- a/scripts/detect-breaking-changes.py
+++ b/scripts/detect-breaking-changes.py
@@ -10,11 +10,67 @@
from rich.style import Style
+def _resolve_redirect(obj: griffe.Object | griffe.Alias) -> griffe.Object | griffe.Alias | None:
+ """Follow back-compat redirections to a real class definition.
+
+ We use two patterns to preserve import paths after a schema rename:
+ * `from .new_path import NewClass` → `griffe.Alias`
+ * `OldName = NewClass` (module-level assignment) → `griffe.Attribute`
+ Both are runtime-equivalent re-exports and should expose the target
+ class's attributes for breaking-change detection. Without resolving them,
+ griffe reports every old attribute as removed.
+ """
+ visited: set[int] = set()
+ current: griffe.Object | griffe.Alias = obj
+ while id(current) not in visited:
+ visited.add(id(current))
+ if isinstance(current, griffe.Alias):
+ try:
+ return current.final_target
+ except Exception:
+ return None
+ if isinstance(current, griffe.Attribute):
+ value = current.value
+ parent = current.parent
+ if parent is None or value is None:
+ return None
+ if isinstance(value, griffe.ExprName):
+ next_obj = parent.members.get(str(value))
+ if next_obj is None:
+ return None
+ current = next_obj
+ continue
+ if isinstance(value, griffe.ExprAttribute):
+ # Qualified path like `task_group_status.TaskGroupStatus`.
+ # Walk segment by segment, resolving any module aliases as we go.
+ next_obj = parent
+ for segment in value.values:
+ if not isinstance(segment, griffe.ExprName):
+ continue
+ if isinstance(next_obj, griffe.Alias):
+ try:
+ next_obj = next_obj.final_target
+ except Exception:
+ return None
+ if next_obj is None or not hasattr(next_obj, "members"):
+ return None
+ next_obj = next_obj.members.get(str(segment))
+ if next_obj is None:
+ return None
+ current = next_obj
+ continue
+ return None
+ return current
+ return None
+
+
def public_members(obj: griffe.Object | griffe.Alias) -> dict[str, griffe.Object | griffe.Alias]:
+ target = _resolve_redirect(obj)
+ if target is not None and target is not obj:
+ obj = target
+
if isinstance(obj, griffe.Alias):
- # ignore imports for now, they're technically part of the public API
- # but we don't have good preventative measures in place to prevent
- # changing them
+ # Truly opaque alias we couldn't resolve.
return {}
return {name: value for name, value in obj.all_members.items() if not name.startswith("_")}
diff --git a/src/parallel/_client.py b/src/parallel/_client.py
index 3ce3e24..321e80e 100644
--- a/src/parallel/_client.py
+++ b/src/parallel/_client.py
@@ -27,6 +27,7 @@
)
from ._utils import (
is_given,
+ is_mapping_t,
maybe_transform,
get_async_library,
async_maybe_transform,
@@ -53,9 +54,11 @@
from .types.advanced_extract_settings_param import AdvancedExtractSettingsParam
if TYPE_CHECKING:
- from .resources import beta, task_run
+ from .resources import beta, monitor, task_run, task_group
+ from .resources.monitor import MonitorResource, AsyncMonitorResource
from .resources.task_run import TaskRunResource, AsyncTaskRunResource
from .resources.beta.beta import BetaResource, AsyncBetaResource
+ from .resources.task_group import TaskGroupResource, AsyncTaskGroupResource
__all__ = [
"Timeout",
@@ -113,6 +116,15 @@ def __init__(
if base_url is None:
base_url = f"https://api.parallel.ai"
+ custom_headers_env = os.environ.get("PARALLEL_CUSTOM_HEADERS")
+ if custom_headers_env is not None:
+ parsed: dict[str, str] = {}
+ for line in custom_headers_env.split("\n"):
+ colon = line.find(":")
+ if colon >= 0:
+ parsed[line[:colon].strip()] = line[colon + 1 :].strip()
+ default_headers = {**parsed, **(default_headers if is_mapping_t(default_headers) else {})}
+
super().__init__(
version=__version__,
base_url=base_url,
@@ -132,7 +144,7 @@ def task_run(self) -> TaskRunResource:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -142,6 +154,38 @@ def task_run(self) -> TaskRunResource:
return TaskRunResource(self)
+ @cached_property
+ def task_group(self) -> TaskGroupResource:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import TaskGroupResource
+
+ return TaskGroupResource(self)
+
+ @cached_property
+ def monitor(self) -> MonitorResource:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import MonitorResource
+
+ return MonitorResource(self)
+
@cached_property
def beta(self) -> BetaResource:
from .resources.beta import BetaResource
@@ -464,6 +508,15 @@ def __init__(
if base_url is None:
base_url = f"https://api.parallel.ai"
+ custom_headers_env = os.environ.get("PARALLEL_CUSTOM_HEADERS")
+ if custom_headers_env is not None:
+ parsed: dict[str, str] = {}
+ for line in custom_headers_env.split("\n"):
+ colon = line.find(":")
+ if colon >= 0:
+ parsed[line[:colon].strip()] = line[colon + 1 :].strip()
+ default_headers = {**parsed, **(default_headers if is_mapping_t(default_headers) else {})}
+
super().__init__(
version=__version__,
base_url=base_url,
@@ -483,7 +536,7 @@ def task_run(self) -> AsyncTaskRunResource:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -493,6 +546,38 @@ def task_run(self) -> AsyncTaskRunResource:
return AsyncTaskRunResource(self)
+ @cached_property
+ def task_group(self) -> AsyncTaskGroupResource:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import AsyncTaskGroupResource
+
+ return AsyncTaskGroupResource(self)
+
+ @cached_property
+ def monitor(self) -> AsyncMonitorResource:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import AsyncMonitorResource
+
+ return AsyncMonitorResource(self)
+
@cached_property
def beta(self) -> AsyncBetaResource:
from .resources.beta import AsyncBetaResource
@@ -792,7 +877,7 @@ def task_run(self) -> task_run.TaskRunResourceWithRawResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -802,6 +887,38 @@ def task_run(self) -> task_run.TaskRunResourceWithRawResponse:
return TaskRunResourceWithRawResponse(self._client.task_run)
+ @cached_property
+ def task_group(self) -> task_group.TaskGroupResourceWithRawResponse:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import TaskGroupResourceWithRawResponse
+
+ return TaskGroupResourceWithRawResponse(self._client.task_group)
+
+ @cached_property
+ def monitor(self) -> monitor.MonitorResourceWithRawResponse:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import MonitorResourceWithRawResponse
+
+ return MonitorResourceWithRawResponse(self._client.monitor)
+
@cached_property
def beta(self) -> beta.BetaResourceWithRawResponse:
from .resources.beta import BetaResourceWithRawResponse
@@ -830,7 +947,7 @@ def task_run(self) -> task_run.AsyncTaskRunResourceWithRawResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -840,6 +957,38 @@ def task_run(self) -> task_run.AsyncTaskRunResourceWithRawResponse:
return AsyncTaskRunResourceWithRawResponse(self._client.task_run)
+ @cached_property
+ def task_group(self) -> task_group.AsyncTaskGroupResourceWithRawResponse:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import AsyncTaskGroupResourceWithRawResponse
+
+ return AsyncTaskGroupResourceWithRawResponse(self._client.task_group)
+
+ @cached_property
+ def monitor(self) -> monitor.AsyncMonitorResourceWithRawResponse:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import AsyncMonitorResourceWithRawResponse
+
+ return AsyncMonitorResourceWithRawResponse(self._client.monitor)
+
@cached_property
def beta(self) -> beta.AsyncBetaResourceWithRawResponse:
from .resources.beta import AsyncBetaResourceWithRawResponse
@@ -868,7 +1017,7 @@ def task_run(self) -> task_run.TaskRunResourceWithStreamingResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -878,6 +1027,38 @@ def task_run(self) -> task_run.TaskRunResourceWithStreamingResponse:
return TaskRunResourceWithStreamingResponse(self._client.task_run)
+ @cached_property
+ def task_group(self) -> task_group.TaskGroupResourceWithStreamingResponse:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import TaskGroupResourceWithStreamingResponse
+
+ return TaskGroupResourceWithStreamingResponse(self._client.task_group)
+
+ @cached_property
+ def monitor(self) -> monitor.MonitorResourceWithStreamingResponse:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import MonitorResourceWithStreamingResponse
+
+ return MonitorResourceWithStreamingResponse(self._client.monitor)
+
@cached_property
def beta(self) -> beta.BetaResourceWithStreamingResponse:
from .resources.beta import BetaResourceWithStreamingResponse
@@ -906,7 +1087,7 @@ def task_run(self) -> task_run.AsyncTaskRunResourceWithStreamingResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -916,6 +1097,38 @@ def task_run(self) -> task_run.AsyncTaskRunResourceWithStreamingResponse:
return AsyncTaskRunResourceWithStreamingResponse(self._client.task_run)
+ @cached_property
+ def task_group(self) -> task_group.AsyncTaskGroupResourceWithStreamingResponse:
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+ from .resources.task_group import AsyncTaskGroupResourceWithStreamingResponse
+
+ return AsyncTaskGroupResourceWithStreamingResponse(self._client.task_group)
+
+ @cached_property
+ def monitor(self) -> monitor.AsyncMonitorResourceWithStreamingResponse:
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+ from .resources.monitor import AsyncMonitorResourceWithStreamingResponse
+
+ return AsyncMonitorResourceWithStreamingResponse(self._client.monitor)
+
@cached_property
def beta(self) -> beta.AsyncBetaResourceWithStreamingResponse:
from .resources.beta import AsyncBetaResourceWithStreamingResponse
diff --git a/src/parallel/_qs.py b/src/parallel/_qs.py
index de8c99b..4127c19 100644
--- a/src/parallel/_qs.py
+++ b/src/parallel/_qs.py
@@ -2,17 +2,13 @@
from typing import Any, List, Tuple, Union, Mapping, TypeVar
from urllib.parse import parse_qs, urlencode
-from typing_extensions import Literal, get_args
+from typing_extensions import get_args
-from ._types import NotGiven, not_given
+from ._types import NotGiven, ArrayFormat, NestedFormat, not_given
from ._utils import flatten
_T = TypeVar("_T")
-
-ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
-NestedFormat = Literal["dots", "brackets"]
-
PrimitiveData = Union[str, int, float, bool, None]
# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"]
# https://github.com/microsoft/pyright/issues/3555
diff --git a/src/parallel/_types.py b/src/parallel/_types.py
index 12af1c0..9680e45 100644
--- a/src/parallel/_types.py
+++ b/src/parallel/_types.py
@@ -47,6 +47,9 @@
ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
_T = TypeVar("_T")
+ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
+NestedFormat = Literal["dots", "brackets"]
+
# Approximates httpx internal ProxiesTypes and RequestFiles types
# while adding support for `PathLike` instances
diff --git a/src/parallel/_utils/_utils.py b/src/parallel/_utils/_utils.py
index 463155d..b5596a0 100644
--- a/src/parallel/_utils/_utils.py
+++ b/src/parallel/_utils/_utils.py
@@ -17,11 +17,11 @@
)
from pathlib import Path
from datetime import date, datetime
-from typing_extensions import TypeGuard
+from typing_extensions import TypeGuard, get_args
import sniffio
-from .._types import Omit, NotGiven, FileTypes, HeadersLike
+from .._types import Omit, NotGiven, FileTypes, ArrayFormat, HeadersLike
_T = TypeVar("_T")
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
@@ -40,25 +40,45 @@ def extract_files(
query: Mapping[str, object],
*,
paths: Sequence[Sequence[str]],
+ array_format: ArrayFormat = "brackets",
) -> list[tuple[str, FileTypes]]:
"""Recursively extract files from the given dictionary based on specified paths.
A path may look like this ['foo', 'files', '', 'data'].
+ ``array_format`` controls how ```` segments contribute to the emitted
+ field name. Supported values: ``"brackets"`` (``foo[]``), ``"repeat"`` and
+ ``"comma"`` (``foo``), ``"indices"`` (``foo[0]``, ``foo[1]``).
+
Note: this mutates the given dictionary.
"""
files: list[tuple[str, FileTypes]] = []
for path in paths:
- files.extend(_extract_items(query, path, index=0, flattened_key=None))
+ files.extend(_extract_items(query, path, index=0, flattened_key=None, array_format=array_format))
return files
+def _array_suffix(array_format: ArrayFormat, array_index: int) -> str:
+ if array_format == "brackets":
+ return "[]"
+ if array_format == "indices":
+ return f"[{array_index}]"
+ if array_format == "repeat" or array_format == "comma":
+ # Both repeat the bare field name for each file part; there is no
+ # meaningful way to comma-join binary parts.
+ return ""
+ raise NotImplementedError(
+ f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}"
+ )
+
+
def _extract_items(
obj: object,
path: Sequence[str],
*,
index: int,
flattened_key: str | None,
+ array_format: ArrayFormat,
) -> list[tuple[str, FileTypes]]:
try:
key = path[index]
@@ -75,9 +95,11 @@ def _extract_items(
if is_list(obj):
files: list[tuple[str, FileTypes]] = []
- for entry in obj:
- assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
- files.append((flattened_key + "[]", cast(FileTypes, entry)))
+ for array_index, entry in enumerate(obj):
+ suffix = _array_suffix(array_format, array_index)
+ emitted_key = (flattened_key + suffix) if flattened_key else suffix
+ assert_is_file_content(entry, key=emitted_key)
+ files.append((emitted_key, cast(FileTypes, entry)))
return files
assert_is_file_content(obj, key=flattened_key)
@@ -106,6 +128,7 @@ def _extract_items(
path,
index=index,
flattened_key=flattened_key,
+ array_format=array_format,
)
elif is_list(obj):
if key != "":
@@ -117,9 +140,12 @@ def _extract_items(
item,
path,
index=index,
- flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
+ flattened_key=(
+ (flattened_key if flattened_key is not None else "") + _array_suffix(array_format, array_index)
+ ),
+ array_format=array_format,
)
- for item in obj
+ for array_index, item in enumerate(obj)
]
)
diff --git a/src/parallel/_version.py b/src/parallel/_version.py
index ee42083..a28127b 100644
--- a/src/parallel/_version.py
+++ b/src/parallel/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "parallel"
-__version__ = "0.5.1" # x-release-please-version
+__version__ = "0.6.0" # x-release-please-version
diff --git a/src/parallel/resources/__init__.py b/src/parallel/resources/__init__.py
index 6fc7c06..5097ea0 100644
--- a/src/parallel/resources/__init__.py
+++ b/src/parallel/resources/__init__.py
@@ -1,5 +1,13 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+from .monitor import (
+ MonitorResource,
+ AsyncMonitorResource,
+ MonitorResourceWithRawResponse,
+ AsyncMonitorResourceWithRawResponse,
+ MonitorResourceWithStreamingResponse,
+ AsyncMonitorResourceWithStreamingResponse,
+)
from .task_run import (
TaskRunResource,
AsyncTaskRunResource,
@@ -8,6 +16,14 @@
TaskRunResourceWithStreamingResponse,
AsyncTaskRunResourceWithStreamingResponse,
)
+from .task_group import (
+ TaskGroupResource,
+ AsyncTaskGroupResource,
+ TaskGroupResourceWithRawResponse,
+ AsyncTaskGroupResourceWithRawResponse,
+ TaskGroupResourceWithStreamingResponse,
+ AsyncTaskGroupResourceWithStreamingResponse,
+)
__all__ = [
"TaskRunResource",
@@ -16,4 +32,16 @@
"AsyncTaskRunResourceWithRawResponse",
"TaskRunResourceWithStreamingResponse",
"AsyncTaskRunResourceWithStreamingResponse",
+ "TaskGroupResource",
+ "AsyncTaskGroupResource",
+ "TaskGroupResourceWithRawResponse",
+ "AsyncTaskGroupResourceWithRawResponse",
+ "TaskGroupResourceWithStreamingResponse",
+ "AsyncTaskGroupResourceWithStreamingResponse",
+ "MonitorResource",
+ "AsyncMonitorResource",
+ "MonitorResourceWithRawResponse",
+ "AsyncMonitorResourceWithRawResponse",
+ "MonitorResourceWithStreamingResponse",
+ "AsyncMonitorResourceWithStreamingResponse",
]
diff --git a/src/parallel/resources/beta/api.md b/src/parallel/resources/beta/api.md
index 3ad7b56..ed048b7 100644
--- a/src/parallel/resources/beta/api.md
+++ b/src/parallel/resources/beta/api.md
@@ -11,6 +11,7 @@ from parallel.types.beta import (
WebSearchResult,
ExtractError,
FetchPolicy,
+ FullContentSettings,
UsageItem,
)
```
@@ -50,19 +51,19 @@ Types:
```python
from parallel.types.beta import (
- TaskGroup,
- TaskGroupRunResponse,
- TaskGroupStatus,
TaskGroupEventsResponse,
TaskGroupGetRunsResponse,
+ TaskGroupStatus,
+ TaskGroupStatusEvent,
+ TaskGroupRunResponse,
)
```
Methods:
-- client.beta.task_group.create(\*\*params) -> TaskGroup
-- client.beta.task_group.retrieve(task_group_id) -> TaskGroup
-- client.beta.task_group.add_runs(task_group_id, \*\*params) -> TaskGroupRunResponse
+- client.beta.task_group.create(\*\*params) -> TaskGroup
+- client.beta.task_group.retrieve(task_group_id) -> TaskGroup
+- client.beta.task_group.add_runs(task_group_id, \*\*params) -> TaskGroupRunResponse
- client.beta.task_group.events(task_group_id, \*\*params) -> TaskGroupEventsResponse
- client.beta.task_group.get_runs(task_group_id, \*\*params) -> TaskGroupGetRunsResponse
@@ -72,7 +73,9 @@ Types:
```python
from parallel.types.beta import (
+ FindAllCandidate,
FindAllCandidateMatchStatusEvent,
+ FindAllCandidateMetrics,
FindAllCandidatesRequest,
FindAllCandidatesResponse,
FindAllEnrichInput,
@@ -80,10 +83,12 @@ from parallel.types.beta import (
FindAllRun,
FindAllRunInput,
FindAllRunResult,
+ FindAllRunStatus,
FindAllRunStatusEvent,
FindAllSchema,
FindAllSchemaUpdatedEvent,
IngestInput,
+ MatchCondition,
FindAllEventsResponse,
)
```
@@ -92,7 +97,7 @@ Methods:
- client.beta.findall.create(\*\*params) -> FindAllRun
- client.beta.findall.retrieve(findall_id) -> FindAllRun
-- client.beta.findall.cancel(findall_id) -> object
+- client.beta.findall.cancel(findall_id) -> None
- client.beta.findall.candidates(\*\*params) -> FindAllCandidatesResponse
- client.beta.findall.enrich(findall_id, \*\*params) -> FindAllSchema
- client.beta.findall.events(findall_id, \*\*params) -> FindAllEventsResponse
diff --git a/src/parallel/resources/beta/beta.py b/src/parallel/resources/beta/beta.py
index ef62ac6..4aa87a1 100644
--- a/src/parallel/resources/beta/beta.py
+++ b/src/parallel/resources/beta/beta.py
@@ -64,7 +64,7 @@ def task_run(self) -> TaskRunResource:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -74,18 +74,7 @@ def task_run(self) -> TaskRunResource:
@cached_property
def task_group(self) -> TaskGroupResource:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return TaskGroupResource(self._client)
@cached_property
@@ -331,7 +320,7 @@ def task_run(self) -> AsyncTaskRunResource:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -341,18 +330,7 @@ def task_run(self) -> AsyncTaskRunResource:
@cached_property
def task_group(self) -> AsyncTaskGroupResource:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return AsyncTaskGroupResource(self._client)
@cached_property
@@ -612,7 +590,7 @@ def task_run(self) -> TaskRunResourceWithRawResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -622,18 +600,7 @@ def task_run(self) -> TaskRunResourceWithRawResponse:
@cached_property
def task_group(self) -> TaskGroupResourceWithRawResponse:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return TaskGroupResourceWithRawResponse(self._beta.task_group)
@cached_property
@@ -667,7 +634,7 @@ def task_run(self) -> AsyncTaskRunResourceWithRawResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -677,18 +644,7 @@ def task_run(self) -> AsyncTaskRunResourceWithRawResponse:
@cached_property
def task_group(self) -> AsyncTaskGroupResourceWithRawResponse:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return AsyncTaskGroupResourceWithRawResponse(self._beta.task_group)
@cached_property
@@ -722,7 +678,7 @@ def task_run(self) -> TaskRunResourceWithStreamingResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -732,18 +688,7 @@ def task_run(self) -> TaskRunResourceWithStreamingResponse:
@cached_property
def task_group(self) -> TaskGroupResourceWithStreamingResponse:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return TaskGroupResourceWithStreamingResponse(self._beta.task_group)
@cached_property
@@ -777,7 +722,7 @@ def task_run(self) -> AsyncTaskRunResourceWithStreamingResponse:
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -787,18 +732,7 @@ def task_run(self) -> AsyncTaskRunResourceWithStreamingResponse:
@cached_property
def task_group(self) -> AsyncTaskGroupResourceWithStreamingResponse:
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
return AsyncTaskGroupResourceWithStreamingResponse(self._beta.task_group)
@cached_property
diff --git a/src/parallel/resources/beta/findall.py b/src/parallel/resources/beta/findall.py
index b5d2ab2..ba6ecd4 100644
--- a/src/parallel/resources/beta/findall.py
+++ b/src/parallel/resources/beta/findall.py
@@ -8,7 +8,7 @@
import httpx
-from ..._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
from ..._utils import is_given, path_template, maybe_transform, strip_not_given, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -35,6 +35,7 @@
from ...types.beta.mcp_server_param import McpServerParam
from ...types.beta.findall_run_result import FindAllRunResult
from ...types.beta.parallel_beta_param import ParallelBetaParam
+from ...types.beta.match_condition_param import MatchConditionParam
from ...types.beta.findall_events_response import FindAllEventsResponse
from ...types.beta.findall_candidates_response import FindAllCandidatesResponse
@@ -83,7 +84,7 @@ def create(
*,
entity_type: str,
generator: Literal["base", "core", "pro", "preview"],
- match_conditions: Iterable[findall_create_params.MatchCondition],
+ match_conditions: Iterable[MatchConditionParam],
match_limit: int,
objective: str,
exclude_list: Optional[Iterable[findall_create_params.ExcludeList]] | Omit = omit,
@@ -229,7 +230,7 @@ def cancel(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> object:
+ ) -> None:
"""
Cancel a FindAll run.
@@ -246,6 +247,7 @@ def cancel(
"""
if not findall_id:
raise ValueError(f"Expected a non-empty value for `findall_id` but received {findall_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
extra_headers = {
**strip_not_given(
{
@@ -256,13 +258,13 @@ def cancel(
),
**(extra_headers or {}),
}
- extra_headers = {"parallel-beta": "findall-2025-09-15", **(extra_headers or {})}
+ extra_headers.update({"parallel-beta": "findall-2025-09-15"})
return self._post(
path_template("/v1beta/findall/runs/{findall_id}/cancel", findall_id=findall_id),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=object,
+ cast_to=NoneType,
)
def candidates(
@@ -681,7 +683,7 @@ async def create(
*,
entity_type: str,
generator: Literal["base", "core", "pro", "preview"],
- match_conditions: Iterable[findall_create_params.MatchCondition],
+ match_conditions: Iterable[MatchConditionParam],
match_limit: int,
objective: str,
exclude_list: Optional[Iterable[findall_create_params.ExcludeList]] | Omit = omit,
@@ -827,7 +829,7 @@ async def cancel(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> object:
+ ) -> None:
"""
Cancel a FindAll run.
@@ -844,6 +846,7 @@ async def cancel(
"""
if not findall_id:
raise ValueError(f"Expected a non-empty value for `findall_id` but received {findall_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
extra_headers = {
**strip_not_given(
{
@@ -854,13 +857,13 @@ async def cancel(
),
**(extra_headers or {}),
}
- extra_headers = {"parallel-beta": "findall-2025-09-15", **(extra_headers or {})}
+ extra_headers.update({"parallel-beta": "findall-2025-09-15"})
return await self._post(
path_template("/v1beta/findall/runs/{findall_id}/cancel", findall_id=findall_id),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=object,
+ cast_to=NoneType,
)
async def candidates(
diff --git a/src/parallel/resources/beta/task_group.py b/src/parallel/resources/beta/task_group.py
index 5ccb046..6e4c01f 100644
--- a/src/parallel/resources/beta/task_group.py
+++ b/src/parallel/resources/beta/task_group.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+import typing_extensions
from typing import Any, Dict, List, Union, Iterable, Optional, cast
from itertools import chain
from typing_extensions import Literal
@@ -26,11 +27,11 @@
task_group_get_runs_params,
)
from ..._base_client import make_request_options
-from ...types.beta.task_group import TaskGroup
+from ...types.task_group import TaskGroup
from ...types.task_spec_param import TaskSpecParam
+from ...types.task_group_run_response import TaskGroupRunResponse
from ...types.beta.parallel_beta_param import ParallelBetaParam
from ...types.beta.beta_run_input_param import BetaRunInputParam
-from ...types.beta.task_group_run_response import TaskGroupRunResponse
from ...types.beta.task_group_events_response import TaskGroupEventsResponse
from ...types.beta.task_group_get_runs_response import TaskGroupGetRunsResponse
@@ -38,18 +39,7 @@
class TaskGroupResource(SyncAPIResource):
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
@cached_property
def with_raw_response(self) -> TaskGroupResourceWithRawResponse:
@@ -70,6 +60,7 @@ def with_streaming_response(self) -> TaskGroupResourceWithStreamingResponse:
"""
return TaskGroupResourceWithStreamingResponse(self)
+ @typing_extensions.deprecated("Use GA Task Group instead")
def create(
self,
*,
@@ -105,6 +96,7 @@ def create(
cast_to=TaskGroup,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
def retrieve(
self,
task_group_id: str,
@@ -139,6 +131,7 @@ def retrieve(
cast_to=TaskGroup,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
def add_runs(
self,
task_group_id: str,
@@ -212,6 +205,7 @@ def add_runs(
cast_to=TaskGroupRunResponse,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
def events(
self,
task_group_id: str,
@@ -266,6 +260,7 @@ def events(
stream_cls=Stream[TaskGroupEventsResponse],
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
def get_runs(
self,
task_group_id: str,
@@ -335,18 +330,7 @@ def get_runs(
class AsyncTaskGroupResource(AsyncAPIResource):
- """The Task API executes web research and extraction tasks.
-
- Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
- - Output metadata: citations, excerpts, reasoning, and confidence per field
-
- Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
- - Observe group progress and receive results as they complete
- - Real-time updates via Server-Sent Events (SSE)
- - Add tasks to an existing group while it is running
- - Group-level retry and error aggregation
- """
+ """Tasks (Beta)"""
@cached_property
def with_raw_response(self) -> AsyncTaskGroupResourceWithRawResponse:
@@ -367,6 +351,7 @@ def with_streaming_response(self) -> AsyncTaskGroupResourceWithStreamingResponse
"""
return AsyncTaskGroupResourceWithStreamingResponse(self)
+ @typing_extensions.deprecated("Use GA Task Group instead")
async def create(
self,
*,
@@ -402,6 +387,7 @@ async def create(
cast_to=TaskGroup,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
async def retrieve(
self,
task_group_id: str,
@@ -436,6 +422,7 @@ async def retrieve(
cast_to=TaskGroup,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
async def add_runs(
self,
task_group_id: str,
@@ -509,6 +496,7 @@ async def add_runs(
cast_to=TaskGroupRunResponse,
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
async def events(
self,
task_group_id: str,
@@ -563,6 +551,7 @@ async def events(
stream_cls=AsyncStream[TaskGroupEventsResponse],
)
+ @typing_extensions.deprecated("Use GA Task Group instead")
async def get_runs(
self,
task_group_id: str,
@@ -635,20 +624,30 @@ class TaskGroupResourceWithRawResponse:
def __init__(self, task_group: TaskGroupResource) -> None:
self._task_group = task_group
- self.create = to_raw_response_wrapper(
- task_group.create,
+ self.create = ( # pyright: ignore[reportDeprecated]
+ to_raw_response_wrapper(
+ task_group.create, # pyright: ignore[reportDeprecated],
+ )
)
- self.retrieve = to_raw_response_wrapper(
- task_group.retrieve,
+ self.retrieve = ( # pyright: ignore[reportDeprecated]
+ to_raw_response_wrapper(
+ task_group.retrieve, # pyright: ignore[reportDeprecated],
+ )
)
- self.add_runs = to_raw_response_wrapper(
- task_group.add_runs,
+ self.add_runs = ( # pyright: ignore[reportDeprecated]
+ to_raw_response_wrapper(
+ task_group.add_runs, # pyright: ignore[reportDeprecated],
+ )
)
- self.events = to_raw_response_wrapper(
- task_group.events,
+ self.events = ( # pyright: ignore[reportDeprecated]
+ to_raw_response_wrapper(
+ task_group.events, # pyright: ignore[reportDeprecated],
+ )
)
- self.get_runs = to_raw_response_wrapper(
- task_group.get_runs,
+ self.get_runs = ( # pyright: ignore[reportDeprecated]
+ to_raw_response_wrapper(
+ task_group.get_runs, # pyright: ignore[reportDeprecated],
+ )
)
@@ -656,20 +655,30 @@ class AsyncTaskGroupResourceWithRawResponse:
def __init__(self, task_group: AsyncTaskGroupResource) -> None:
self._task_group = task_group
- self.create = async_to_raw_response_wrapper(
- task_group.create,
+ self.create = ( # pyright: ignore[reportDeprecated]
+ async_to_raw_response_wrapper(
+ task_group.create, # pyright: ignore[reportDeprecated],
+ )
)
- self.retrieve = async_to_raw_response_wrapper(
- task_group.retrieve,
+ self.retrieve = ( # pyright: ignore[reportDeprecated]
+ async_to_raw_response_wrapper(
+ task_group.retrieve, # pyright: ignore[reportDeprecated],
+ )
)
- self.add_runs = async_to_raw_response_wrapper(
- task_group.add_runs,
+ self.add_runs = ( # pyright: ignore[reportDeprecated]
+ async_to_raw_response_wrapper(
+ task_group.add_runs, # pyright: ignore[reportDeprecated],
+ )
)
- self.events = async_to_raw_response_wrapper(
- task_group.events,
+ self.events = ( # pyright: ignore[reportDeprecated]
+ async_to_raw_response_wrapper(
+ task_group.events, # pyright: ignore[reportDeprecated],
+ )
)
- self.get_runs = async_to_raw_response_wrapper(
- task_group.get_runs,
+ self.get_runs = ( # pyright: ignore[reportDeprecated]
+ async_to_raw_response_wrapper(
+ task_group.get_runs, # pyright: ignore[reportDeprecated],
+ )
)
@@ -677,20 +686,30 @@ class TaskGroupResourceWithStreamingResponse:
def __init__(self, task_group: TaskGroupResource) -> None:
self._task_group = task_group
- self.create = to_streamed_response_wrapper(
- task_group.create,
+ self.create = ( # pyright: ignore[reportDeprecated]
+ to_streamed_response_wrapper(
+ task_group.create, # pyright: ignore[reportDeprecated],
+ )
)
- self.retrieve = to_streamed_response_wrapper(
- task_group.retrieve,
+ self.retrieve = ( # pyright: ignore[reportDeprecated]
+ to_streamed_response_wrapper(
+ task_group.retrieve, # pyright: ignore[reportDeprecated],
+ )
)
- self.add_runs = to_streamed_response_wrapper(
- task_group.add_runs,
+ self.add_runs = ( # pyright: ignore[reportDeprecated]
+ to_streamed_response_wrapper(
+ task_group.add_runs, # pyright: ignore[reportDeprecated],
+ )
)
- self.events = to_streamed_response_wrapper(
- task_group.events,
+ self.events = ( # pyright: ignore[reportDeprecated]
+ to_streamed_response_wrapper(
+ task_group.events, # pyright: ignore[reportDeprecated],
+ )
)
- self.get_runs = to_streamed_response_wrapper(
- task_group.get_runs,
+ self.get_runs = ( # pyright: ignore[reportDeprecated]
+ to_streamed_response_wrapper(
+ task_group.get_runs, # pyright: ignore[reportDeprecated],
+ )
)
@@ -698,18 +717,28 @@ class AsyncTaskGroupResourceWithStreamingResponse:
def __init__(self, task_group: AsyncTaskGroupResource) -> None:
self._task_group = task_group
- self.create = async_to_streamed_response_wrapper(
- task_group.create,
+ self.create = ( # pyright: ignore[reportDeprecated]
+ async_to_streamed_response_wrapper(
+ task_group.create, # pyright: ignore[reportDeprecated],
+ )
)
- self.retrieve = async_to_streamed_response_wrapper(
- task_group.retrieve,
+ self.retrieve = ( # pyright: ignore[reportDeprecated]
+ async_to_streamed_response_wrapper(
+ task_group.retrieve, # pyright: ignore[reportDeprecated],
+ )
)
- self.add_runs = async_to_streamed_response_wrapper(
- task_group.add_runs,
+ self.add_runs = ( # pyright: ignore[reportDeprecated]
+ async_to_streamed_response_wrapper(
+ task_group.add_runs, # pyright: ignore[reportDeprecated],
+ )
)
- self.events = async_to_streamed_response_wrapper(
- task_group.events,
+ self.events = ( # pyright: ignore[reportDeprecated]
+ async_to_streamed_response_wrapper(
+ task_group.events, # pyright: ignore[reportDeprecated],
+ )
)
- self.get_runs = async_to_streamed_response_wrapper(
- task_group.get_runs,
+ self.get_runs = ( # pyright: ignore[reportDeprecated]
+ async_to_streamed_response_wrapper(
+ task_group.get_runs, # pyright: ignore[reportDeprecated],
+ )
)
diff --git a/src/parallel/resources/beta/task_run.py b/src/parallel/resources/beta/task_run.py
index 9b3a65d..c26b219 100644
--- a/src/parallel/resources/beta/task_run.py
+++ b/src/parallel/resources/beta/task_run.py
@@ -28,6 +28,7 @@
from ...types.mcp_server_param import McpServerParam
from ...types.beta.parallel_beta_param import ParallelBetaParam
from ...types.shared_params.source_policy import SourcePolicy
+from ...types.task_advanced_settings_param import TaskAdvancedSettingsParam
from ...types.beta.task_run_events_response import TaskRunEventsResponse
__all__ = ["TaskRunResource", "AsyncTaskRunResource"]
@@ -40,7 +41,7 @@ class TaskRunResource(SyncAPIResource):
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -72,7 +73,7 @@ def create(
*,
input: Union[str, Dict[str, object]],
processor: str,
- advanced_settings: Optional[task_run_create_params.AdvancedSettings] | Omit = omit,
+ advanced_settings: Optional[TaskAdvancedSettingsParam] | Omit = omit,
enable_events: Optional[bool] | Omit = omit,
mcp_servers: Optional[Iterable[McpServerParam]] | Omit = omit,
metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
@@ -278,7 +279,7 @@ class AsyncTaskRunResource(AsyncAPIResource):
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -310,7 +311,7 @@ async def create(
*,
input: Union[str, Dict[str, object]],
processor: str,
- advanced_settings: Optional[task_run_create_params.AdvancedSettings] | Omit = omit,
+ advanced_settings: Optional[TaskAdvancedSettingsParam] | Omit = omit,
enable_events: Optional[bool] | Omit = omit,
mcp_servers: Optional[Iterable[McpServerParam]] | Omit = omit,
metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
diff --git a/src/parallel/resources/monitor.py b/src/parallel/resources/monitor.py
new file mode 100644
index 0000000..a51201f
--- /dev/null
+++ b/src/parallel/resources/monitor.py
@@ -0,0 +1,965 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+from typing_extensions import Literal
+
+import httpx
+
+from ..types import (
+ monitor_list_params,
+ monitor_create_params,
+ monitor_events_params,
+ monitor_update_params,
+)
+from .._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
+from .._utils import path_template, maybe_transform, async_maybe_transform
+from .._compat import cached_property
+from .._resource import SyncAPIResource, AsyncAPIResource
+from .._response import (
+ to_raw_response_wrapper,
+ to_streamed_response_wrapper,
+ async_to_raw_response_wrapper,
+ async_to_streamed_response_wrapper,
+)
+from .._base_client import make_request_options
+from ..types.monitor import Monitor
+from ..types.monitor_webhook_param import MonitorWebhookParam
+from ..types.paginated_monitor_events import PaginatedMonitorEvents
+from ..types.paginated_monitor_response import PaginatedMonitorResponse
+from ..types.update_monitor_event_stream_settings_param import UpdateMonitorEventStreamSettingsParam
+
+__all__ = ["MonitorResource", "AsyncMonitorResource"]
+
+
+class MonitorResource(SyncAPIResource):
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+
+ @cached_property
+ def with_raw_response(self) -> MonitorResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers
+ """
+ return MonitorResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> MonitorResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response
+ """
+ return MonitorResourceWithStreamingResponse(self)
+
+ def create(
+ self,
+ *,
+ frequency: str,
+ settings: monitor_create_params.Settings,
+ type: Literal["event_stream", "snapshot"],
+ metadata: Optional[Dict[str, str]] | Omit = omit,
+ processor: Literal["lite", "base"] | Omit = omit,
+ webhook: Optional[MonitorWebhookParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """
+ Create a monitor.
+
+ Monitors run on a fixed frequency to detect material changes in web content. Set
+ `type=event_stream` to monitor a search query, or `type=snapshot` to monitor a
+ specific task run's output. The monitor runs once immediately at creation, then
+ continues on the configured schedule.
+
+ Args:
+ frequency: Frequency of the monitor. Format: '' where unit is 'h' (hours),
+ 'd' (days), or 'w' (weeks). Must be between 1h and 30d (inclusive).
+
+ settings: Type-specific settings for the monitor. The expected shape is determined by the
+ root `type` field: pass `MonitorEventStreamSettings` when `type` is
+ `event_stream`, and `MonitorSnapshotSettings` when `type` is `snapshot`.
+
+ type: Type of monitor to create. `event_stream` monitors a search query for material
+ changes; `snapshot` monitors a specific task run's output. Determines the
+ expected shape of `settings`.
+
+ metadata: User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+
+ processor: Processor to use for the monitor. `lite` is faster and cheaper; `base` performs
+ more thorough analysis at higher cost and latency. Defaults to `lite`.
+
+ webhook: Webhook configuration for a monitor.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/v1/monitors",
+ body=maybe_transform(
+ {
+ "frequency": frequency,
+ "settings": settings,
+ "type": type,
+ "metadata": metadata,
+ "processor": processor,
+ "webhook": webhook,
+ },
+ monitor_create_params.MonitorCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ def retrieve(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """Retrieve a monitor.
+
+ Retrieves a specific monitor by `monitor_id`.
+
+ Returns the monitor configuration
+ including status, frequency, query, and webhook settings.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return self._get(
+ path_template("/v1/monitors/{monitor_id}", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ def update(
+ self,
+ monitor_id: str,
+ *,
+ frequency: Optional[str] | Omit = omit,
+ metadata: Optional[Dict[str, str]] | Omit = omit,
+ settings: Optional[UpdateMonitorEventStreamSettingsParam] | Omit = omit,
+ type: Optional[Literal["event_stream", "snapshot"]] | Omit = omit,
+ webhook: Optional[MonitorWebhookParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """
+ Update a monitor.
+
+ Only fields explicitly included in the request body are changed. Pass `null` for
+ `webhook` or `metadata` to clear those fields. Pass `type` and `settings` to
+ update type-specific settings on an `event_stream` monitor. At least one field
+ must be provided. Cancelled monitors cannot be updated.
+
+ Args:
+ frequency: Frequency of the monitor. Format: '' where unit is 'h' (hours),
+ 'd' (days), or 'w' (weeks). Must be between 1h and 30d (inclusive).
+
+ metadata: User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+
+ settings: Type-specific update settings for an `event_stream` monitor.
+
+ type: Type of the monitor being updated. Required when `settings` is provided; must be
+ `event_stream` (snapshot monitors have no updatable type-specific settings).
+
+ webhook: Webhook configuration for a monitor.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return self._post(
+ path_template("/v1/monitors/{monitor_id}/update", monitor_id=monitor_id),
+ body=maybe_transform(
+ {
+ "frequency": frequency,
+ "metadata": metadata,
+ "settings": settings,
+ "type": type,
+ "webhook": webhook,
+ },
+ monitor_update_params.MonitorUpdateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ def list(
+ self,
+ *,
+ cursor: Optional[str] | Omit = omit,
+ limit: Optional[int] | Omit = omit,
+ status: Optional[List[Literal["active", "cancelled"]]] | Omit = omit,
+ type: Optional[List[Literal["event_stream", "snapshot"]]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> PaginatedMonitorResponse:
+ """
+ List monitors ordered by creation time, newest first.
+
+ Monitors are sorted by `created_at` descending. `limit` defaults to 100. Use
+ `next_cursor` from the response and pass it as `cursor` to fetch the next page.
+ Pagination ends when `next_cursor` is absent.
+
+ By default only `active` monitors are returned. Pass `status=cancelled` or both
+ values to include cancelled monitors.
+
+ The legacy Monitor API (`/v1alpha/monitors` endpoints) is documented under the
+ `Monitor (Alpha)` tag.
+
+ Args:
+ cursor: Pagination token from `next_cursor` in a previous response. Omit to start from
+ the most recently created monitor.
+
+ limit: Maximum number of monitors to return. Defaults to 100. Between 1 and 10000.
+
+ status: Filter by monitor status. Pass multiple times to filter by multiple values.
+ Defaults to `active` only.
+
+ type: Filter by monitor type. Pass multiple times to filter by multiple values. Omit
+ to return all types.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._get(
+ "/v1/monitors",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "cursor": cursor,
+ "limit": limit,
+ "status": status,
+ "type": type,
+ },
+ monitor_list_params.MonitorListParams,
+ ),
+ ),
+ cast_to=PaginatedMonitorResponse,
+ )
+
+ def cancel(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """Cancel a monitor.
+
+ Permanently stops the monitor from running.
+
+ Cancellation is irreversible —
+ create a new monitor to resume monitoring. Cancelling an already-cancelled
+ monitor is a no-op.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return self._post(
+ path_template("/v1/monitors/{monitor_id}/cancel", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ def events(
+ self,
+ monitor_id: str,
+ *,
+ cursor: Optional[str] | Omit = omit,
+ event_group_id: Optional[str] | Omit = omit,
+ include_completions: bool | Omit = omit,
+ limit: Optional[int] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> PaginatedMonitorEvents:
+ """
+ List events for a monitor, newest first.
+
+ Pass `event_group_id` to narrow results to a single execution. Otherwise returns
+ all executions newest-first; use `next_cursor` to paginate. Set
+ `include_completions=true` to also include no-change executions.
+
+ Args:
+ cursor: Pass `next_cursor` from a previous response to retrieve more events.
+
+ event_group_id: Filter to a single execution. Values come from `event_group_id` in webhook
+ events and listed events. Pagination params are ignored when set.
+
+ include_completions: When true, include completion events for executions that ran but detected no
+ material changes. Useful for auditing execution history.
+
+ limit: Maximum number of events to return. Defaults to 20. Between 1 and 100.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return self._get(
+ path_template("/v1/monitors/{monitor_id}/events", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "cursor": cursor,
+ "event_group_id": event_group_id,
+ "include_completions": include_completions,
+ "limit": limit,
+ },
+ monitor_events_params.MonitorEventsParams,
+ ),
+ ),
+ cast_to=PaginatedMonitorEvents,
+ )
+
+ def trigger(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> None:
+ """
+ Trigger an immediate monitor run.
+
+ Enqueues a one-off execution of the monitor outside its normal schedule. The
+ monitor's regular schedule is not affected. An event is only emitted if the
+ execution detects a material change. Cancelled monitors cannot be triggered.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return self._post(
+ path_template("/v1/monitors/{monitor_id}/trigger", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
+
+class AsyncMonitorResource(AsyncAPIResource):
+ """The Monitor API watches the web for material changes on a fixed frequency.
+
+ Each monitor runs once on creation and then on its configured schedule, emitting events when meaningful changes are detected.
+ - `event_stream` monitors track a search query and emit an event for each new material change.
+ - `snapshot` monitors track a specific task run's output and emit an event when the output changes.
+
+ Results can be polled via the events endpoint or delivered via webhooks.
+ """
+
+ @cached_property
+ def with_raw_response(self) -> AsyncMonitorResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers
+ """
+ return AsyncMonitorResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> AsyncMonitorResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response
+ """
+ return AsyncMonitorResourceWithStreamingResponse(self)
+
+ async def create(
+ self,
+ *,
+ frequency: str,
+ settings: monitor_create_params.Settings,
+ type: Literal["event_stream", "snapshot"],
+ metadata: Optional[Dict[str, str]] | Omit = omit,
+ processor: Literal["lite", "base"] | Omit = omit,
+ webhook: Optional[MonitorWebhookParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """
+ Create a monitor.
+
+ Monitors run on a fixed frequency to detect material changes in web content. Set
+ `type=event_stream` to monitor a search query, or `type=snapshot` to monitor a
+ specific task run's output. The monitor runs once immediately at creation, then
+ continues on the configured schedule.
+
+ Args:
+ frequency: Frequency of the monitor. Format: '' where unit is 'h' (hours),
+ 'd' (days), or 'w' (weeks). Must be between 1h and 30d (inclusive).
+
+ settings: Type-specific settings for the monitor. The expected shape is determined by the
+ root `type` field: pass `MonitorEventStreamSettings` when `type` is
+ `event_stream`, and `MonitorSnapshotSettings` when `type` is `snapshot`.
+
+ type: Type of monitor to create. `event_stream` monitors a search query for material
+ changes; `snapshot` monitors a specific task run's output. Determines the
+ expected shape of `settings`.
+
+ metadata: User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+
+ processor: Processor to use for the monitor. `lite` is faster and cheaper; `base` performs
+ more thorough analysis at higher cost and latency. Defaults to `lite`.
+
+ webhook: Webhook configuration for a monitor.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/v1/monitors",
+ body=await async_maybe_transform(
+ {
+ "frequency": frequency,
+ "settings": settings,
+ "type": type,
+ "metadata": metadata,
+ "processor": processor,
+ "webhook": webhook,
+ },
+ monitor_create_params.MonitorCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ async def retrieve(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """Retrieve a monitor.
+
+ Retrieves a specific monitor by `monitor_id`.
+
+ Returns the monitor configuration
+ including status, frequency, query, and webhook settings.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return await self._get(
+ path_template("/v1/monitors/{monitor_id}", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ async def update(
+ self,
+ monitor_id: str,
+ *,
+ frequency: Optional[str] | Omit = omit,
+ metadata: Optional[Dict[str, str]] | Omit = omit,
+ settings: Optional[UpdateMonitorEventStreamSettingsParam] | Omit = omit,
+ type: Optional[Literal["event_stream", "snapshot"]] | Omit = omit,
+ webhook: Optional[MonitorWebhookParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """
+ Update a monitor.
+
+ Only fields explicitly included in the request body are changed. Pass `null` for
+ `webhook` or `metadata` to clear those fields. Pass `type` and `settings` to
+ update type-specific settings on an `event_stream` monitor. At least one field
+ must be provided. Cancelled monitors cannot be updated.
+
+ Args:
+ frequency: Frequency of the monitor. Format: '' where unit is 'h' (hours),
+ 'd' (days), or 'w' (weeks). Must be between 1h and 30d (inclusive).
+
+ metadata: User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+
+ settings: Type-specific update settings for an `event_stream` monitor.
+
+ type: Type of the monitor being updated. Required when `settings` is provided; must be
+ `event_stream` (snapshot monitors have no updatable type-specific settings).
+
+ webhook: Webhook configuration for a monitor.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return await self._post(
+ path_template("/v1/monitors/{monitor_id}/update", monitor_id=monitor_id),
+ body=await async_maybe_transform(
+ {
+ "frequency": frequency,
+ "metadata": metadata,
+ "settings": settings,
+ "type": type,
+ "webhook": webhook,
+ },
+ monitor_update_params.MonitorUpdateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ async def list(
+ self,
+ *,
+ cursor: Optional[str] | Omit = omit,
+ limit: Optional[int] | Omit = omit,
+ status: Optional[List[Literal["active", "cancelled"]]] | Omit = omit,
+ type: Optional[List[Literal["event_stream", "snapshot"]]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> PaginatedMonitorResponse:
+ """
+ List monitors ordered by creation time, newest first.
+
+ Monitors are sorted by `created_at` descending. `limit` defaults to 100. Use
+ `next_cursor` from the response and pass it as `cursor` to fetch the next page.
+ Pagination ends when `next_cursor` is absent.
+
+ By default only `active` monitors are returned. Pass `status=cancelled` or both
+ values to include cancelled monitors.
+
+ The legacy Monitor API (`/v1alpha/monitors` endpoints) is documented under the
+ `Monitor (Alpha)` tag.
+
+ Args:
+ cursor: Pagination token from `next_cursor` in a previous response. Omit to start from
+ the most recently created monitor.
+
+ limit: Maximum number of monitors to return. Defaults to 100. Between 1 and 10000.
+
+ status: Filter by monitor status. Pass multiple times to filter by multiple values.
+ Defaults to `active` only.
+
+ type: Filter by monitor type. Pass multiple times to filter by multiple values. Omit
+ to return all types.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._get(
+ "/v1/monitors",
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "cursor": cursor,
+ "limit": limit,
+ "status": status,
+ "type": type,
+ },
+ monitor_list_params.MonitorListParams,
+ ),
+ ),
+ cast_to=PaginatedMonitorResponse,
+ )
+
+ async def cancel(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Monitor:
+ """Cancel a monitor.
+
+ Permanently stops the monitor from running.
+
+ Cancellation is irreversible —
+ create a new monitor to resume monitoring. Cancelling an already-cancelled
+ monitor is a no-op.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return await self._post(
+ path_template("/v1/monitors/{monitor_id}/cancel", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=Monitor,
+ )
+
+ async def events(
+ self,
+ monitor_id: str,
+ *,
+ cursor: Optional[str] | Omit = omit,
+ event_group_id: Optional[str] | Omit = omit,
+ include_completions: bool | Omit = omit,
+ limit: Optional[int] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> PaginatedMonitorEvents:
+ """
+ List events for a monitor, newest first.
+
+ Pass `event_group_id` to narrow results to a single execution. Otherwise returns
+ all executions newest-first; use `next_cursor` to paginate. Set
+ `include_completions=true` to also include no-change executions.
+
+ Args:
+ cursor: Pass `next_cursor` from a previous response to retrieve more events.
+
+ event_group_id: Filter to a single execution. Values come from `event_group_id` in webhook
+ events and listed events. Pagination params are ignored when set.
+
+ include_completions: When true, include completion events for executions that ran but detected no
+ material changes. Useful for auditing execution history.
+
+ limit: Maximum number of events to return. Defaults to 20. Between 1 and 100.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ return await self._get(
+ path_template("/v1/monitors/{monitor_id}/events", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "cursor": cursor,
+ "event_group_id": event_group_id,
+ "include_completions": include_completions,
+ "limit": limit,
+ },
+ monitor_events_params.MonitorEventsParams,
+ ),
+ ),
+ cast_to=PaginatedMonitorEvents,
+ )
+
+ async def trigger(
+ self,
+ monitor_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> None:
+ """
+ Trigger an immediate monitor run.
+
+ Enqueues a one-off execution of the monitor outside its normal schedule. The
+ monitor's regular schedule is not affected. An event is only emitted if the
+ execution detects a material change. Cancelled monitors cannot be triggered.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not monitor_id:
+ raise ValueError(f"Expected a non-empty value for `monitor_id` but received {monitor_id!r}")
+ extra_headers = {"Accept": "*/*", **(extra_headers or {})}
+ return await self._post(
+ path_template("/v1/monitors/{monitor_id}/trigger", monitor_id=monitor_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=NoneType,
+ )
+
+
+class MonitorResourceWithRawResponse:
+ def __init__(self, monitor: MonitorResource) -> None:
+ self._monitor = monitor
+
+ self.create = to_raw_response_wrapper(
+ monitor.create,
+ )
+ self.retrieve = to_raw_response_wrapper(
+ monitor.retrieve,
+ )
+ self.update = to_raw_response_wrapper(
+ monitor.update,
+ )
+ self.list = to_raw_response_wrapper(
+ monitor.list,
+ )
+ self.cancel = to_raw_response_wrapper(
+ monitor.cancel,
+ )
+ self.events = to_raw_response_wrapper(
+ monitor.events,
+ )
+ self.trigger = to_raw_response_wrapper(
+ monitor.trigger,
+ )
+
+
+class AsyncMonitorResourceWithRawResponse:
+ def __init__(self, monitor: AsyncMonitorResource) -> None:
+ self._monitor = monitor
+
+ self.create = async_to_raw_response_wrapper(
+ monitor.create,
+ )
+ self.retrieve = async_to_raw_response_wrapper(
+ monitor.retrieve,
+ )
+ self.update = async_to_raw_response_wrapper(
+ monitor.update,
+ )
+ self.list = async_to_raw_response_wrapper(
+ monitor.list,
+ )
+ self.cancel = async_to_raw_response_wrapper(
+ monitor.cancel,
+ )
+ self.events = async_to_raw_response_wrapper(
+ monitor.events,
+ )
+ self.trigger = async_to_raw_response_wrapper(
+ monitor.trigger,
+ )
+
+
+class MonitorResourceWithStreamingResponse:
+ def __init__(self, monitor: MonitorResource) -> None:
+ self._monitor = monitor
+
+ self.create = to_streamed_response_wrapper(
+ monitor.create,
+ )
+ self.retrieve = to_streamed_response_wrapper(
+ monitor.retrieve,
+ )
+ self.update = to_streamed_response_wrapper(
+ monitor.update,
+ )
+ self.list = to_streamed_response_wrapper(
+ monitor.list,
+ )
+ self.cancel = to_streamed_response_wrapper(
+ monitor.cancel,
+ )
+ self.events = to_streamed_response_wrapper(
+ monitor.events,
+ )
+ self.trigger = to_streamed_response_wrapper(
+ monitor.trigger,
+ )
+
+
+class AsyncMonitorResourceWithStreamingResponse:
+ def __init__(self, monitor: AsyncMonitorResource) -> None:
+ self._monitor = monitor
+
+ self.create = async_to_streamed_response_wrapper(
+ monitor.create,
+ )
+ self.retrieve = async_to_streamed_response_wrapper(
+ monitor.retrieve,
+ )
+ self.update = async_to_streamed_response_wrapper(
+ monitor.update,
+ )
+ self.list = async_to_streamed_response_wrapper(
+ monitor.list,
+ )
+ self.cancel = async_to_streamed_response_wrapper(
+ monitor.cancel,
+ )
+ self.events = async_to_streamed_response_wrapper(
+ monitor.events,
+ )
+ self.trigger = async_to_streamed_response_wrapper(
+ monitor.trigger,
+ )
diff --git a/src/parallel/resources/task_group.py b/src/parallel/resources/task_group.py
new file mode 100644
index 0000000..4a0bce9
--- /dev/null
+++ b/src/parallel/resources/task_group.py
@@ -0,0 +1,787 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Union, Iterable, Optional, cast
+from typing_extensions import Literal
+
+import httpx
+
+from ..types import (
+ task_group_create_params,
+ task_group_events_params,
+ task_group_add_runs_params,
+ task_group_get_runs_params,
+)
+from .._types import Body, Omit, Query, Headers, NotGiven, omit, not_given
+from .._utils import is_given, path_template, maybe_transform, strip_not_given, async_maybe_transform
+from .._compat import cached_property
+from .._resource import SyncAPIResource, AsyncAPIResource
+from .._response import (
+ to_raw_response_wrapper,
+ to_streamed_response_wrapper,
+ async_to_raw_response_wrapper,
+ async_to_streamed_response_wrapper,
+)
+from .._streaming import Stream, AsyncStream
+from .._base_client import make_request_options
+from ..types.task_run import TaskRun
+from ..types.task_group import TaskGroup
+from ..types.run_input_param import RunInputParam
+from ..types.task_spec_param import TaskSpecParam
+from ..types.task_group_run_response import TaskGroupRunResponse
+from ..types.beta.parallel_beta_param import ParallelBetaParam
+from ..types.task_group_events_response import TaskGroupEventsResponse
+from ..types.task_group_get_runs_response import TaskGroupGetRunsResponse
+
+__all__ = ["TaskGroupResource", "AsyncTaskGroupResource"]
+
+
+class TaskGroupResource(SyncAPIResource):
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+
+ @cached_property
+ def with_raw_response(self) -> TaskGroupResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers
+ """
+ return TaskGroupResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> TaskGroupResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response
+ """
+ return TaskGroupResourceWithStreamingResponse(self)
+
+ def create(
+ self,
+ *,
+ metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroup:
+ """
+ Initiates a TaskGroup to group and track multiple runs.
+
+ Args:
+ metadata: User-provided metadata stored with the task group.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/v1/tasks/groups",
+ body=maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskGroup,
+ )
+
+ def retrieve(
+ self,
+ task_group_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroup:
+ """
+ Retrieves aggregated status across runs in a TaskGroup.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ return self._get(
+ path_template("/v1/tasks/groups/{task_group_id}", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskGroup,
+ )
+
+ def add_runs(
+ self,
+ task_group_id: str,
+ *,
+ inputs: Iterable[RunInputParam],
+ refresh_status: bool | Omit = omit,
+ default_task_spec: Optional[TaskSpecParam] | Omit = omit,
+ betas: List[ParallelBetaParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroupRunResponse:
+ """
+ Initiates multiple task runs within a TaskGroup.
+
+ Args:
+ inputs: List of task runs to execute. Up to 1,000 runs can be specified per request. If
+ you'd like to add more runs, split them across multiple TaskGroup POST requests.
+
+ default_task_spec: Specification for a task.
+
+ Auto output schemas can be specified by setting `output_schema={"type":"auto"}`.
+ Not specifying a TaskSpec is the same as setting an auto output schema.
+
+ For convenience bare strings are also accepted as input or output schemas.
+
+ betas: Optional header to specify the beta version(s) to enable.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {
+ **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else not_given}),
+ **(extra_headers or {}),
+ }
+ return self._post(
+ path_template("/v1/tasks/groups/{task_group_id}/runs", task_group_id=task_group_id),
+ body=maybe_transform(
+ {
+ "inputs": inputs,
+ "default_task_spec": default_task_spec,
+ },
+ task_group_add_runs_params.TaskGroupAddRunsParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {"refresh_status": refresh_status}, task_group_add_runs_params.TaskGroupAddRunsParams
+ ),
+ ),
+ cast_to=TaskGroupRunResponse,
+ )
+
+ def events(
+ self,
+ task_group_id: str,
+ *,
+ last_event_id: Optional[str] | Omit = omit,
+ api_timeout: Optional[float] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Stream[TaskGroupEventsResponse]:
+ """
+ Streams events from a TaskGroup: status updates and run completions.
+
+ The connection will remain open for up to an hour as long as at least one run in
+ the group is still active.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ return self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/events", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "last_event_id": last_event_id,
+ "api_timeout": api_timeout,
+ },
+ task_group_events_params.TaskGroupEventsParams,
+ ),
+ ),
+ cast_to=cast(
+ Any, TaskGroupEventsResponse
+ ), # Union types cannot be passed in as arguments in the type system
+ stream=True,
+ stream_cls=Stream[TaskGroupEventsResponse],
+ )
+
+ def get_runs(
+ self,
+ task_group_id: str,
+ *,
+ include_input: bool | Omit = omit,
+ include_output: bool | Omit = omit,
+ last_event_id: Optional[str] | Omit = omit,
+ status: Optional[
+ Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]
+ ]
+ | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> Stream[TaskGroupGetRunsResponse]:
+ """
+ Retrieves task runs in a TaskGroup and optionally their inputs and outputs.
+
+ All runs within a TaskGroup are returned as a stream. To get the inputs and/or
+ outputs back in the stream, set the corresponding `include_input` and
+ `include_output` parameters to `true`.
+
+ The stream is resumable using the `event_id` as the cursor. To resume a stream,
+ specify the `last_event_id` parameter with the `event_id` of the last event in
+ the stream. The stream will resume from the next event after the
+ `last_event_id`.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ return self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/runs", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "include_input": include_input,
+ "include_output": include_output,
+ "last_event_id": last_event_id,
+ "status": status,
+ },
+ task_group_get_runs_params.TaskGroupGetRunsParams,
+ ),
+ ),
+ cast_to=cast(
+ Any, TaskGroupGetRunsResponse
+ ), # Union types cannot be passed in as arguments in the type system
+ stream=True,
+ stream_cls=Stream[TaskGroupGetRunsResponse],
+ )
+
+ def retrieve_run(
+ self,
+ run_id: str,
+ *,
+ task_group_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskRun:
+ """
+ Retrieves run status by run_id.
+
+ This endpoint is equivalent to fetching run status directly using the
+ `retrieve()` method or the `tasks/runs` GET endpoint.
+
+ The run result is available from the `/result` endpoint.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ if not run_id:
+ raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}")
+ return self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/runs/{run_id}", task_group_id=task_group_id, run_id=run_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskRun,
+ )
+
+
+class AsyncTaskGroupResource(AsyncAPIResource):
+ """The Task API executes web research and extraction tasks.
+
+ Clients submit a natural-language objective with an optional input schema; the service plans retrieval, fetches relevant URLs, and returns outputs that conform to a provided or inferred JSON schema. Supports deep research style queries and can return rich structured JSON outputs. Processors trade-off between cost, latency, and quality. Each processor supports calibrated confidences.
+ - Output metadata: citations, excerpts, reasoning, and confidence per field
+
+ Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
+ - Submit hundreds or thousands of Tasks as a single group
+ - Observe group progress and receive results as they complete
+ - Real-time updates via Server-Sent Events (SSE)
+ - Add tasks to an existing group while it is running
+ - Group-level retry and error aggregation
+ """
+
+ @cached_property
+ def with_raw_response(self) -> AsyncTaskGroupResourceWithRawResponse:
+ """
+ This property can be used as a prefix for any HTTP method call to return
+ the raw response object instead of the parsed content.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#accessing-raw-response-data-eg-headers
+ """
+ return AsyncTaskGroupResourceWithRawResponse(self)
+
+ @cached_property
+ def with_streaming_response(self) -> AsyncTaskGroupResourceWithStreamingResponse:
+ """
+ An alternative to `.with_raw_response` that doesn't eagerly read the response body.
+
+ For more information, see https://www.github.com/parallel-web/parallel-sdk-python#with_streaming_response
+ """
+ return AsyncTaskGroupResourceWithStreamingResponse(self)
+
+ async def create(
+ self,
+ *,
+ metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroup:
+ """
+ Initiates a TaskGroup to group and track multiple runs.
+
+ Args:
+ metadata: User-provided metadata stored with the task group.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/v1/tasks/groups",
+ body=await async_maybe_transform({"metadata": metadata}, task_group_create_params.TaskGroupCreateParams),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskGroup,
+ )
+
+ async def retrieve(
+ self,
+ task_group_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroup:
+ """
+ Retrieves aggregated status across runs in a TaskGroup.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ return await self._get(
+ path_template("/v1/tasks/groups/{task_group_id}", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskGroup,
+ )
+
+ async def add_runs(
+ self,
+ task_group_id: str,
+ *,
+ inputs: Iterable[RunInputParam],
+ refresh_status: bool | Omit = omit,
+ default_task_spec: Optional[TaskSpecParam] | Omit = omit,
+ betas: List[ParallelBetaParam] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskGroupRunResponse:
+ """
+ Initiates multiple task runs within a TaskGroup.
+
+ Args:
+ inputs: List of task runs to execute. Up to 1,000 runs can be specified per request. If
+ you'd like to add more runs, split them across multiple TaskGroup POST requests.
+
+ default_task_spec: Specification for a task.
+
+ Auto output schemas can be specified by setting `output_schema={"type":"auto"}`.
+ Not specifying a TaskSpec is the same as setting an auto output schema.
+
+ For convenience bare strings are also accepted as input or output schemas.
+
+ betas: Optional header to specify the beta version(s) to enable.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {
+ **strip_not_given({"parallel-beta": ",".join(str(e) for e in betas) if is_given(betas) else not_given}),
+ **(extra_headers or {}),
+ }
+ return await self._post(
+ path_template("/v1/tasks/groups/{task_group_id}/runs", task_group_id=task_group_id),
+ body=await async_maybe_transform(
+ {
+ "inputs": inputs,
+ "default_task_spec": default_task_spec,
+ },
+ task_group_add_runs_params.TaskGroupAddRunsParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {"refresh_status": refresh_status}, task_group_add_runs_params.TaskGroupAddRunsParams
+ ),
+ ),
+ cast_to=TaskGroupRunResponse,
+ )
+
+ async def events(
+ self,
+ task_group_id: str,
+ *,
+ last_event_id: Optional[str] | Omit = omit,
+ api_timeout: Optional[float] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> AsyncStream[TaskGroupEventsResponse]:
+ """
+ Streams events from a TaskGroup: status updates and run completions.
+
+ The connection will remain open for up to an hour as long as at least one run in
+ the group is still active.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ return await self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/events", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "last_event_id": last_event_id,
+ "api_timeout": api_timeout,
+ },
+ task_group_events_params.TaskGroupEventsParams,
+ ),
+ ),
+ cast_to=cast(
+ Any, TaskGroupEventsResponse
+ ), # Union types cannot be passed in as arguments in the type system
+ stream=True,
+ stream_cls=AsyncStream[TaskGroupEventsResponse],
+ )
+
+ async def get_runs(
+ self,
+ task_group_id: str,
+ *,
+ include_input: bool | Omit = omit,
+ include_output: bool | Omit = omit,
+ last_event_id: Optional[str] | Omit = omit,
+ status: Optional[
+ Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]
+ ]
+ | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> AsyncStream[TaskGroupGetRunsResponse]:
+ """
+ Retrieves task runs in a TaskGroup and optionally their inputs and outputs.
+
+ All runs within a TaskGroup are returned as a stream. To get the inputs and/or
+ outputs back in the stream, set the corresponding `include_input` and
+ `include_output` parameters to `true`.
+
+ The stream is resumable using the `event_id` as the cursor. To resume a stream,
+ specify the `last_event_id` parameter with the `event_id` of the last event in
+ the stream. The stream will resume from the next event after the
+ `last_event_id`.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ extra_headers = {"Accept": "text/event-stream", **(extra_headers or {})}
+ return await self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/runs", task_group_id=task_group_id),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "include_input": include_input,
+ "include_output": include_output,
+ "last_event_id": last_event_id,
+ "status": status,
+ },
+ task_group_get_runs_params.TaskGroupGetRunsParams,
+ ),
+ ),
+ cast_to=cast(
+ Any, TaskGroupGetRunsResponse
+ ), # Union types cannot be passed in as arguments in the type system
+ stream=True,
+ stream_cls=AsyncStream[TaskGroupGetRunsResponse],
+ )
+
+ async def retrieve_run(
+ self,
+ run_id: str,
+ *,
+ task_group_id: str,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> TaskRun:
+ """
+ Retrieves run status by run_id.
+
+ This endpoint is equivalent to fetching run status directly using the
+ `retrieve()` method or the `tasks/runs` GET endpoint.
+
+ The run result is available from the `/result` endpoint.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not task_group_id:
+ raise ValueError(f"Expected a non-empty value for `task_group_id` but received {task_group_id!r}")
+ if not run_id:
+ raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}")
+ return await self._get(
+ path_template("/v1/tasks/groups/{task_group_id}/runs/{run_id}", task_group_id=task_group_id, run_id=run_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=TaskRun,
+ )
+
+
+class TaskGroupResourceWithRawResponse:
+ def __init__(self, task_group: TaskGroupResource) -> None:
+ self._task_group = task_group
+
+ self.create = to_raw_response_wrapper(
+ task_group.create,
+ )
+ self.retrieve = to_raw_response_wrapper(
+ task_group.retrieve,
+ )
+ self.add_runs = to_raw_response_wrapper(
+ task_group.add_runs,
+ )
+ self.events = to_raw_response_wrapper(
+ task_group.events,
+ )
+ self.get_runs = to_raw_response_wrapper(
+ task_group.get_runs,
+ )
+ self.retrieve_run = to_raw_response_wrapper(
+ task_group.retrieve_run,
+ )
+
+
+class AsyncTaskGroupResourceWithRawResponse:
+ def __init__(self, task_group: AsyncTaskGroupResource) -> None:
+ self._task_group = task_group
+
+ self.create = async_to_raw_response_wrapper(
+ task_group.create,
+ )
+ self.retrieve = async_to_raw_response_wrapper(
+ task_group.retrieve,
+ )
+ self.add_runs = async_to_raw_response_wrapper(
+ task_group.add_runs,
+ )
+ self.events = async_to_raw_response_wrapper(
+ task_group.events,
+ )
+ self.get_runs = async_to_raw_response_wrapper(
+ task_group.get_runs,
+ )
+ self.retrieve_run = async_to_raw_response_wrapper(
+ task_group.retrieve_run,
+ )
+
+
+class TaskGroupResourceWithStreamingResponse:
+ def __init__(self, task_group: TaskGroupResource) -> None:
+ self._task_group = task_group
+
+ self.create = to_streamed_response_wrapper(
+ task_group.create,
+ )
+ self.retrieve = to_streamed_response_wrapper(
+ task_group.retrieve,
+ )
+ self.add_runs = to_streamed_response_wrapper(
+ task_group.add_runs,
+ )
+ self.events = to_streamed_response_wrapper(
+ task_group.events,
+ )
+ self.get_runs = to_streamed_response_wrapper(
+ task_group.get_runs,
+ )
+ self.retrieve_run = to_streamed_response_wrapper(
+ task_group.retrieve_run,
+ )
+
+
+class AsyncTaskGroupResourceWithStreamingResponse:
+ def __init__(self, task_group: AsyncTaskGroupResource) -> None:
+ self._task_group = task_group
+
+ self.create = async_to_streamed_response_wrapper(
+ task_group.create,
+ )
+ self.retrieve = async_to_streamed_response_wrapper(
+ task_group.retrieve,
+ )
+ self.add_runs = async_to_streamed_response_wrapper(
+ task_group.add_runs,
+ )
+ self.events = async_to_streamed_response_wrapper(
+ task_group.events,
+ )
+ self.get_runs = async_to_streamed_response_wrapper(
+ task_group.get_runs,
+ )
+ self.retrieve_run = async_to_streamed_response_wrapper(
+ task_group.retrieve_run,
+ )
diff --git a/src/parallel/resources/task_run.py b/src/parallel/resources/task_run.py
index 4f8e2f5..2a73374 100644
--- a/src/parallel/resources/task_run.py
+++ b/src/parallel/resources/task_run.py
@@ -23,6 +23,7 @@
from .._streaming import Stream, AsyncStream
from .._base_client import make_request_options
from ..types.task_run import TaskRun
+from ..types.run_input import RunInput
from ..types.webhook_param import WebhookParam
from ..types.task_run_result import TaskRunResult
from ..types.task_spec_param import OutputT, OutputSchema, TaskSpecParam
@@ -37,6 +38,7 @@
from ..types.beta.parallel_beta_param import ParallelBetaParam
from ..types.task_run_events_response import TaskRunEventsResponse
from ..types.shared_params.source_policy import SourcePolicy
+from ..types.task_advanced_settings_param import TaskAdvancedSettingsParam
__all__ = ["TaskRunResource", "AsyncTaskRunResource"]
@@ -48,7 +50,7 @@ class TaskRunResource(SyncAPIResource):
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -79,7 +81,7 @@ def create(
*,
input: Union[str, Dict[str, object]],
processor: str,
- advanced_settings: Optional[task_run_create_params.AdvancedSettings] | Omit = omit,
+ advanced_settings: Optional[TaskAdvancedSettingsParam] | Omit = omit,
enable_events: Optional[bool] | Omit = omit,
mcp_servers: Optional[Iterable[McpServerParam]] | Omit = omit,
metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
@@ -295,6 +297,39 @@ def result(
cast_to=TaskRunResult,
)
+ def retrieve_input(
+ self,
+ run_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> RunInput:
+ """
+ Retrieves the input of a run by run_id.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not run_id:
+ raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}")
+ return self._get(
+ path_template("/v1/tasks/runs/{run_id}/input", run_id=run_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=RunInput,
+ )
+
def _wait_for_result(
self,
*,
@@ -423,7 +458,6 @@ def execute(
extra_body=extra_body,
)
-
class AsyncTaskRunResource(AsyncAPIResource):
"""The Task API executes web research and extraction tasks.
@@ -431,7 +465,7 @@ class AsyncTaskRunResource(AsyncAPIResource):
- Output metadata: citations, excerpts, reasoning, and confidence per field
Task Groups enable batch execution of many independent Task runs with group-level monitoring and failure handling.
- - Submit hundreds or thousands of Tasks as a single group
+ - Submit hundreds or thousands of Tasks as a single group
- Observe group progress and receive results as they complete
- Real-time updates via Server-Sent Events (SSE)
- Add tasks to an existing group while it is running
@@ -462,7 +496,7 @@ async def create(
*,
input: Union[str, Dict[str, object]],
processor: str,
- advanced_settings: Optional[task_run_create_params.AdvancedSettings] | Omit = omit,
+ advanced_settings: Optional[TaskAdvancedSettingsParam] | Omit = omit,
enable_events: Optional[bool] | Omit = omit,
mcp_servers: Optional[Iterable[McpServerParam]] | Omit = omit,
metadata: Optional[Dict[str, Union[str, float, bool]]] | Omit = omit,
@@ -680,6 +714,39 @@ async def result(
cast_to=TaskRunResult,
)
+ async def retrieve_input(
+ self,
+ run_id: str,
+ *,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> RunInput:
+ """
+ Retrieves the input of a run by run_id.
+
+ Args:
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not run_id:
+ raise ValueError(f"Expected a non-empty value for `run_id` but received {run_id!r}")
+ return await self._get(
+ path_template("/v1/tasks/runs/{run_id}/input", run_id=run_id),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=RunInput,
+ )
+
async def _wait_for_result(
self,
*,
@@ -804,7 +871,6 @@ async def execute(
extra_body=extra_body,
)
-
class TaskRunResourceWithRawResponse:
def __init__(self, task_run: TaskRunResource) -> None:
self._task_run = task_run
@@ -821,6 +887,9 @@ def __init__(self, task_run: TaskRunResource) -> None:
self.result = to_raw_response_wrapper(
task_run.result,
)
+ self.retrieve_input = to_raw_response_wrapper(
+ task_run.retrieve_input,
+ )
class AsyncTaskRunResourceWithRawResponse:
@@ -839,6 +908,9 @@ def __init__(self, task_run: AsyncTaskRunResource) -> None:
self.result = async_to_raw_response_wrapper(
task_run.result,
)
+ self.retrieve_input = async_to_raw_response_wrapper(
+ task_run.retrieve_input,
+ )
class TaskRunResourceWithStreamingResponse:
@@ -857,6 +929,9 @@ def __init__(self, task_run: TaskRunResource) -> None:
self.result = to_streamed_response_wrapper(
task_run.result,
)
+ self.retrieve_input = to_streamed_response_wrapper(
+ task_run.retrieve_input,
+ )
class AsyncTaskRunResourceWithStreamingResponse:
@@ -875,3 +950,6 @@ def __init__(self, task_run: AsyncTaskRunResource) -> None:
self.result = async_to_streamed_response_wrapper(
task_run.result,
)
+ self.retrieve_input = async_to_streamed_response_wrapper(
+ task_run.retrieve_input,
+ )
diff --git a/src/parallel/types/__init__.py b/src/parallel/types/__init__.py
index 570116a..2a7d4a1 100644
--- a/src/parallel/types/__init__.py
+++ b/src/parallel/types/__init__.py
@@ -8,12 +8,14 @@
SourcePolicy as SourcePolicy,
ErrorResponse as ErrorResponse,
)
+from .monitor import Monitor as Monitor
from .webhook import Webhook as Webhook
from .citation import Citation as Citation
from .task_run import TaskRun as TaskRun
from .run_input import RunInput as RunInput
from .task_spec import TaskSpec as TaskSpec
from .mcp_server import McpServer as McpServer
+from .task_group import TaskGroup as TaskGroup
from .usage_item import UsageItem as UsageItem
from .auto_schema import AutoSchema as AutoSchema
from .error_event import ErrorEvent as ErrorEvent
@@ -26,6 +28,7 @@
from .webhook_param import WebhookParam as WebhookParam
from .extract_result import ExtractResult as ExtractResult
from .task_run_event import TaskRunEvent as TaskRunEvent
+from .monitor_webhook import MonitorWebhook as MonitorWebhook
from .run_input_param import RunInputParam as RunInputParam
from .task_run_result import TaskRunResult as TaskRunResult
from .task_spec_param import TaskSpecParam as TaskSpecParam
@@ -33,17 +36,55 @@
from .mcp_server_param import McpServerParam as McpServerParam
from .auto_schema_param import AutoSchemaParam as AutoSchemaParam
from .json_schema_param import JsonSchemaParam as JsonSchemaParam
+from .task_group_status import TaskGroupStatus as TaskGroupStatus
from .text_schema_param import TextSchemaParam as TextSchemaParam
from .web_search_result import WebSearchResult as WebSearchResult
from .fetch_policy_param import FetchPolicyParam as FetchPolicyParam
+from .monitor_error_event import MonitorErrorEvent as MonitorErrorEvent
+from .monitor_list_params import MonitorListParams as MonitorListParams
from .client_search_params import ClientSearchParams as ClientSearchParams
from .task_run_json_output import TaskRunJsonOutput as TaskRunJsonOutput
from .task_run_text_output import TaskRunTextOutput as TaskRunTextOutput
from .client_extract_params import ClientExtractParams as ClientExtractParams
+from .monitor_create_params import MonitorCreateParams as MonitorCreateParams
+from .monitor_events_params import MonitorEventsParams as MonitorEventsParams
+from .monitor_update_params import MonitorUpdateParams as MonitorUpdateParams
+from .monitor_webhook_param import MonitorWebhookParam as MonitorWebhookParam
+from .task_run_source_stats import TaskRunSourceStats as TaskRunSourceStats
from .excerpt_settings_param import ExcerptSettingsParam as ExcerptSettingsParam
+from .monitor_snapshot_event import MonitorSnapshotEvent as MonitorSnapshotEvent
from .parsed_task_run_result import ParsedTaskRunResult as ParsedTaskRunResult
+from .task_advanced_settings import TaskAdvancedSettings as TaskAdvancedSettings
from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams
from .task_run_result_params import TaskRunResultParams as TaskRunResultParams
+from .monitor_snapshot_output import MonitorSnapshotOutput as MonitorSnapshotOutput
+from .task_group_run_response import TaskGroupRunResponse as TaskGroupRunResponse
+from .task_group_status_event import TaskGroupStatusEvent as TaskGroupStatusEvent
+from .monitor_completion_event import MonitorCompletionEvent as MonitorCompletionEvent
+from .paginated_monitor_events import PaginatedMonitorEvents as PaginatedMonitorEvents
+from .task_group_create_params import TaskGroupCreateParams as TaskGroupCreateParams
+from .task_group_events_params import TaskGroupEventsParams as TaskGroupEventsParams
from .task_run_events_response import TaskRunEventsResponse as TaskRunEventsResponse
+from .advanced_monitor_settings import AdvancedMonitorSettings as AdvancedMonitorSettings
+from .monitor_event_stream_event import MonitorEventStreamEvent as MonitorEventStreamEvent
+from .paginated_monitor_response import PaginatedMonitorResponse as PaginatedMonitorResponse
+from .task_group_add_runs_params import TaskGroupAddRunsParams as TaskGroupAddRunsParams
+from .task_group_events_response import TaskGroupEventsResponse as TaskGroupEventsResponse
+from .task_group_get_runs_params import TaskGroupGetRunsParams as TaskGroupGetRunsParams
+from .full_content_settings_param import FullContentSettingsParam as FullContentSettingsParam
+from .task_advanced_settings_param import TaskAdvancedSettingsParam as TaskAdvancedSettingsParam
+from .task_group_get_runs_response import TaskGroupGetRunsResponse as TaskGroupGetRunsResponse
+from .task_run_progress_stats_event import TaskRunProgressStatsEvent as TaskRunProgressStatsEvent
from .advanced_search_settings_param import AdvancedSearchSettingsParam as AdvancedSearchSettingsParam
from .advanced_extract_settings_param import AdvancedExtractSettingsParam as AdvancedExtractSettingsParam
+from .advanced_monitor_settings_param import AdvancedMonitorSettingsParam as AdvancedMonitorSettingsParam
+from .monitor_snapshot_settings_param import MonitorSnapshotSettingsParam as MonitorSnapshotSettingsParam
+from .task_run_progress_message_event import TaskRunProgressMessageEvent as TaskRunProgressMessageEvent
+from .monitor_snapshot_response_settings import MonitorSnapshotResponseSettings as MonitorSnapshotResponseSettings
+from .monitor_event_stream_settings_param import MonitorEventStreamSettingsParam as MonitorEventStreamSettingsParam
+from .monitor_event_stream_response_settings import (
+ MonitorEventStreamResponseSettings as MonitorEventStreamResponseSettings,
+)
+from .update_monitor_event_stream_settings_param import (
+ UpdateMonitorEventStreamSettingsParam as UpdateMonitorEventStreamSettingsParam,
+)
diff --git a/src/parallel/types/advanced_extract_settings_param.py b/src/parallel/types/advanced_extract_settings_param.py
index 1328c26..351cae3 100644
--- a/src/parallel/types/advanced_extract_settings_param.py
+++ b/src/parallel/types/advanced_extract_settings_param.py
@@ -7,22 +7,11 @@
from .fetch_policy_param import FetchPolicyParam
from .excerpt_settings_param import ExcerptSettingsParam
+from .full_content_settings_param import FullContentSettingsParam
-__all__ = ["AdvancedExtractSettingsParam", "FullContent", "FullContentFullContentSettings"]
+__all__ = ["AdvancedExtractSettingsParam", "FullContent"]
-
-class FullContentFullContentSettings(TypedDict, total=False):
- """Optional settings for returning full content."""
-
- max_chars_per_result: Optional[int]
- """
- Optional limit on the number of characters to include in the full content for
- each url. Full content always starts at the beginning of the page and is
- truncated at the limit if necessary.
- """
-
-
-FullContent: TypeAlias = Union[FullContentFullContentSettings, bool]
+FullContent: TypeAlias = Union[FullContentSettingsParam, bool]
class AdvancedExtractSettingsParam(TypedDict, total=False):
@@ -44,3 +33,9 @@ class AdvancedExtractSettingsParam(TypedDict, total=False):
Set to true to enable with defaults, false to disable, or provide
FullContentSettings for fine-grained control.
"""
+
+
+# Backwards-compat alias (deprecated). `FullContentFullContentSettings` was the
+# auto-generated nested-class name in this module; it now lives as the top-level
+# `FullContentSettingsParam`.
+FullContentFullContentSettings = FullContentSettingsParam
diff --git a/src/parallel/types/advanced_monitor_settings.py b/src/parallel/types/advanced_monitor_settings.py
new file mode 100644
index 0000000..bde7b54
--- /dev/null
+++ b/src/parallel/types/advanced_monitor_settings.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from .._models import BaseModel
+from .shared.source_policy import SourcePolicy
+
+__all__ = ["AdvancedMonitorSettings"]
+
+
+class AdvancedMonitorSettings(BaseModel):
+ """Advanced monitor configuration."""
+
+ location: Optional[str] = None
+ """ISO 3166-1 alpha-2 country code for geo-targeted monitor results."""
+
+ source_policy: Optional[SourcePolicy] = None
+ """Source policy for web search results.
+
+ This policy governs which sources are allowed/disallowed in results.
+ """
diff --git a/src/parallel/types/advanced_monitor_settings_param.py b/src/parallel/types/advanced_monitor_settings_param.py
new file mode 100644
index 0000000..82c748e
--- /dev/null
+++ b/src/parallel/types/advanced_monitor_settings_param.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import TypedDict
+
+from .shared_params.source_policy import SourcePolicy
+
+__all__ = ["AdvancedMonitorSettingsParam"]
+
+
+class AdvancedMonitorSettingsParam(TypedDict, total=False):
+ """Advanced monitor configuration."""
+
+ location: Optional[str]
+ """ISO 3166-1 alpha-2 country code for geo-targeted monitor results."""
+
+ source_policy: Optional[SourcePolicy]
+ """Source policy for web search results.
+
+ This policy governs which sources are allowed/disallowed in results.
+ """
diff --git a/src/parallel/types/beta/__init__.py b/src/parallel/types/beta/__init__.py
index 91b7229..4a3103d 100644
--- a/src/parallel/types/beta/__init__.py
+++ b/src/parallel/types/beta/__init__.py
@@ -16,13 +16,16 @@
from .extract_result import ExtractResult as ExtractResult
from .findall_schema import FindAllSchema as FindAllSchema, FindallSchema as FindallSchema
from .task_run_event import TaskRunEvent as TaskRunEvent
+from .match_condition import MatchCondition as MatchCondition
from .extract_response import ExtractResponse as ExtractResponse
from .mcp_server_param import McpServerParam as McpServerParam
+from .findall_candidate import FindAllCandidate as FindAllCandidate
from .task_group_status import TaskGroupStatus as TaskGroupStatus
from .web_search_result import WebSearchResult as WebSearchResult
from .beta_search_params import BetaSearchParams as BetaSearchParams
from .fetch_policy_param import FetchPolicyParam as FetchPolicyParam
from .findall_run_result import FindAllRunResult as FindAllRunResult, FindallRunResult as FindallRunResult
+from .findall_run_status import FindAllRunStatus as FindAllRunStatus
from .beta_extract_params import BetaExtractParams as BetaExtractParams
from .parallel_beta_param import ParallelBetaParam as ParallelBetaParam
from .beta_run_input_param import BetaRunInputParam as BetaRunInputParam
@@ -48,6 +51,7 @@
FindAllIngestParams as FindAllIngestParams,
FindallIngestParams as FindallIngestParams,
)
+from .match_condition_param import MatchConditionParam as MatchConditionParam
from .excerpt_settings_param import ExcerptSettingsParam as ExcerptSettingsParam
from .task_run_create_params import TaskRunCreateParams as TaskRunCreateParams
from .task_run_result_params import TaskRunResultParams as TaskRunResultParams
@@ -56,6 +60,7 @@
FindallEventsResponse as FindallEventsResponse,
)
from .task_group_run_response import TaskGroupRunResponse as TaskGroupRunResponse
+from .task_group_status_event import TaskGroupStatusEvent as TaskGroupStatusEvent
from .findall_run_status_event import (
FindAllRunStatusEvent as FindAllRunStatusEvent,
FindallRunStatusEvent as FindallRunStatusEvent,
@@ -63,11 +68,13 @@
from .task_group_create_params import TaskGroupCreateParams as TaskGroupCreateParams
from .task_group_events_params import TaskGroupEventsParams as TaskGroupEventsParams
from .task_run_events_response import TaskRunEventsResponse as TaskRunEventsResponse
+from .findall_candidate_metrics import FindAllCandidateMetrics as FindAllCandidateMetrics
from .findall_candidates_params import FindAllCandidatesParams as FindAllCandidatesParams
from .task_group_add_runs_params import TaskGroupAddRunsParams as TaskGroupAddRunsParams
from .task_group_events_response import TaskGroupEventsResponse as TaskGroupEventsResponse
from .task_group_get_runs_params import TaskGroupGetRunsParams as TaskGroupGetRunsParams
from .findall_candidates_response import FindAllCandidatesResponse as FindAllCandidatesResponse
+from .full_content_settings_param import FullContentSettingsParam as FullContentSettingsParam
from .findall_schema_updated_event import (
FindAllSchemaUpdatedEvent as FindAllSchemaUpdatedEvent,
FindallSchemaUpdatedEvent as FindallSchemaUpdatedEvent,
diff --git a/src/parallel/types/beta/beta_extract_params.py b/src/parallel/types/beta/beta_extract_params.py
index d78d06c..f1dba96 100644
--- a/src/parallel/types/beta/beta_extract_params.py
+++ b/src/parallel/types/beta/beta_extract_params.py
@@ -10,8 +10,9 @@
from ..fetch_policy_param import FetchPolicyParam
from .parallel_beta_param import ParallelBetaParam
from .excerpt_settings_param import ExcerptSettingsParam
+from ..full_content_settings_param import FullContentSettingsParam
-__all__ = ["BetaExtractParams", "Excerpts", "FullContent", "FullContentFullContentSettings"]
+__all__ = ["BetaExtractParams", "Excerpts", "FullContent"]
class BetaExtractParams(TypedDict, total=False):
@@ -59,16 +60,9 @@ class BetaExtractParams(TypedDict, total=False):
Excerpts: TypeAlias = Union[bool, ExcerptSettingsParam]
+FullContent: TypeAlias = Union[bool, FullContentSettingsParam]
-class FullContentFullContentSettings(TypedDict, total=False):
- """Optional settings for returning full content."""
-
- max_chars_per_result: Optional[int]
- """
- Optional limit on the number of characters to include in the full content for
- each url. Full content always starts at the beginning of the page and is
- truncated at the limit if necessary.
- """
-
-
-FullContent: TypeAlias = Union[bool, FullContentFullContentSettings]
+# Backwards-compat alias (deprecated). `FullContentFullContentSettings` was an
+# inline TypedDict in this module; it now lives as the top-level
+# `FullContentSettingsParam`.
+FullContentFullContentSettings = FullContentSettingsParam
diff --git a/src/parallel/types/beta/findall_candidate.py b/src/parallel/types/beta/findall_candidate.py
new file mode 100644
index 0000000..798077e
--- /dev/null
+++ b/src/parallel/types/beta/findall_candidate.py
@@ -0,0 +1,45 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List, Optional
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+from ..field_basis import FieldBasis
+
+__all__ = ["FindAllCandidate"]
+
+
+class FindAllCandidate(BaseModel):
+ """Candidate for a find all run that may end up as a match.
+
+ Contains all the candidate's metadata and the output of the match conditions.
+ A candidate is a match if all match conditions are satisfied.
+ """
+
+ candidate_id: str
+ """ID of the candidate."""
+
+ match_status: Literal["generated", "matched", "unmatched", "discarded"]
+ """Status of the candidate. One of generated, matched, unmatched, discarded."""
+
+ name: str
+ """Name of the candidate."""
+
+ url: str
+ """URL that provides context or details of the entity for disambiguation."""
+
+ basis: Optional[List[FieldBasis]] = None
+ """List of FieldBasis objects supporting the output."""
+
+ description: Optional[str] = None
+ """
+ Brief description of the entity that can help answer whether entity satisfies
+ the query.
+ """
+
+ output: Optional[Dict[str, object]] = None
+ """Results of the match condition evaluations for this candidate.
+
+ This object contains the structured output that determines whether the candidate
+ matches the overall FindAll objective.
+ """
diff --git a/src/parallel/types/beta/findall_candidate_match_status_event.py b/src/parallel/types/beta/findall_candidate_match_status_event.py
index 01ebf20..456784a 100644
--- a/src/parallel/types/beta/findall_candidate_match_status_event.py
+++ b/src/parallel/types/beta/findall_candidate_match_status_event.py
@@ -1,51 +1,18 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, List, Optional
from datetime import datetime
from typing_extensions import Literal
from ..._models import BaseModel
-from ..field_basis import FieldBasis
+from .findall_candidate import FindAllCandidate
-__all__ = ["FindAllCandidateMatchStatusEvent", "FindallCandidateMatchStatusEvent", "Data"]
-
-
-class Data(BaseModel):
- """The candidate whose match status has been updated."""
-
- candidate_id: str
- """ID of the candidate."""
-
- match_status: Literal["generated", "matched", "unmatched", "discarded"]
- """Status of the candidate. One of generated, matched, unmatched, discarded."""
-
- name: str
- """Name of the candidate."""
-
- url: str
- """URL that provides context or details of the entity for disambiguation."""
-
- basis: Optional[List[FieldBasis]] = None
- """List of FieldBasis objects supporting the output."""
-
- description: Optional[str] = None
- """
- Brief description of the entity that can help answer whether entity satisfies
- the query.
- """
-
- output: Optional[Dict[str, object]] = None
- """Results of the match condition evaluations for this candidate.
-
- This object contains the structured output that determines whether the candidate
- matches the overall FindAll objective.
- """
+__all__ = [ "FindallCandidateMatchStatusEvent", "FindAllCandidateMatchStatusEvent"]
class FindAllCandidateMatchStatusEvent(BaseModel):
"""Event containing a candidate whose match status has changed."""
- data: Data
+ data: FindAllCandidate
"""The candidate whose match status has been updated."""
event_id: str
@@ -70,3 +37,7 @@ class FindAllCandidateMatchStatusEvent(BaseModel):
FindallCandidateMatchStatusEvent = FindAllCandidateMatchStatusEvent # for backwards compatibility with v0.3.4
"""This is deprecated, `FindAllCandidateMatchStatusEvent` should be used instead"""
+
+# Backwards-compat alias (deprecated). `Data` was an inline class in earlier
+# releases; it is now the top-level `FindAllCandidate` model.
+Data = FindAllCandidate
diff --git a/src/parallel/types/beta/findall_candidate_metrics.py b/src/parallel/types/beta/findall_candidate_metrics.py
new file mode 100644
index 0000000..95ab6d2
--- /dev/null
+++ b/src/parallel/types/beta/findall_candidate_metrics.py
@@ -0,0 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from ..._models import BaseModel
+
+__all__ = ["FindAllCandidateMetrics"]
+
+
+class FindAllCandidateMetrics(BaseModel):
+ """Metrics object for FindAll run."""
+
+ generated_candidates_count: Optional[int] = None
+ """Number of candidates that were selected."""
+
+ matched_candidates_count: Optional[int] = None
+ """Number of candidates that evaluated to matched."""
diff --git a/src/parallel/types/beta/findall_create_params.py b/src/parallel/types/beta/findall_create_params.py
index 89a8715..f5a700f 100644
--- a/src/parallel/types/beta/findall_create_params.py
+++ b/src/parallel/types/beta/findall_create_params.py
@@ -8,8 +8,9 @@
from ..._utils import PropertyInfo
from ..webhook_param import WebhookParam
from .parallel_beta_param import ParallelBetaParam
+from .match_condition_param import MatchConditionParam
-__all__ = ["FindAllCreateParams", "FindallCreateParams", "MatchCondition", "ExcludeList"]
+__all__ = ["FindAllCreateParams", "FindallCreateParams", "ExcludeList"]
class FindAllCreateParams(TypedDict, total=False):
@@ -19,7 +20,7 @@ class FindAllCreateParams(TypedDict, total=False):
generator: Required[Literal["base", "core", "pro", "preview"]]
"""Generator for the FindAll run. One of base, core, pro, preview."""
- match_conditions: Required[Iterable[MatchCondition]]
+ match_conditions: Required[Iterable[MatchConditionParam]]
"""List of match conditions for the FindAll run."""
match_limit: Required[int]
@@ -44,20 +45,6 @@ class FindAllCreateParams(TypedDict, total=False):
"""Optional header to specify the beta version(s) to enable."""
-class MatchCondition(TypedDict, total=False):
- """Match condition model for FindAll ingest."""
-
- description: Required[str]
- """Detailed description of the match condition.
-
- Include as much specific information as possible to help improve the quality and
- accuracy of Find All run results.
- """
-
- name: Required[str]
- """Name of the match condition."""
-
-
class ExcludeList(TypedDict, total=False):
"""Exclude candidate input model for FindAll run."""
@@ -70,3 +57,7 @@ class ExcludeList(TypedDict, total=False):
FindallCreateParams = FindAllCreateParams # for backwards compatibility with v0.3.4
"""This is deprecated, `FindAllCreateParams` should be used instead"""
+
+# Backwards-compat alias (deprecated). `MatchCondition` was an inline TypedDict
+# in this module; it now lives as the top-level `MatchConditionParam`.
+MatchCondition = MatchConditionParam
diff --git a/src/parallel/types/beta/findall_run.py b/src/parallel/types/beta/findall_run.py
index ad55025..927c048 100644
--- a/src/parallel/types/beta/findall_run.py
+++ b/src/parallel/types/beta/findall_run.py
@@ -4,44 +4,9 @@
from typing_extensions import Literal
from ..._models import BaseModel
+from .findall_run_status import FindAllRunStatus
-__all__ = ["FindAllRun", "FindallRun", "Status", "StatusMetrics"]
-
-
-class StatusMetrics(BaseModel):
- """Candidate metrics for the FindAll run."""
-
- generated_candidates_count: Optional[int] = None
- """Number of candidates that were selected."""
-
- matched_candidates_count: Optional[int] = None
- """Number of candidates that evaluated to matched."""
-
-
-class Status(BaseModel):
- """Status object for the FindAll run."""
-
- is_active: bool
- """Whether the FindAll run is active"""
-
- metrics: StatusMetrics
- """Candidate metrics for the FindAll run."""
-
- status: Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]
- """Status of the FindAll run."""
-
- termination_reason: Optional[
- Literal[
- "low_match_rate",
- "match_limit_met",
- "candidates_exhausted",
- "user_cancelled",
- "error_occurred",
- "timeout",
- "insufficient_funds",
- ]
- ] = None
- """Reason for termination when FindAll run is in terminal status."""
+__all__ = [ "FindallRun", "FindAllRun"]
class FindAllRun(BaseModel):
@@ -53,7 +18,7 @@ class FindAllRun(BaseModel):
generator: Literal["base", "core", "pro", "preview"]
"""Generator for the FindAll run."""
- status: Status
+ status: FindAllRunStatus
"""Status object for the FindAll run."""
created_at: Optional[str] = None
@@ -71,3 +36,11 @@ class FindAllRun(BaseModel):
FindallRun = FindAllRun # for backwards compatibility with v0.3.4
"""This is deprecated, `FindAllRun` should be used instead"""
+
+# Backwards-compat aliases (deprecated). `Status` and `StatusMetrics` were
+# inline classes in this module; they now live as top-level `FindAllRunStatus`
+# and `FindAllCandidateMetrics` models.
+from .findall_candidate_metrics import FindAllCandidateMetrics # noqa: E402
+
+Status = FindAllRunStatus
+StatusMetrics = FindAllCandidateMetrics
diff --git a/src/parallel/types/beta/findall_run_result.py b/src/parallel/types/beta/findall_run_result.py
index d1851a9..aa908c8 100644
--- a/src/parallel/types/beta/findall_run_result.py
+++ b/src/parallel/types/beta/findall_run_result.py
@@ -1,49 +1,12 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, List, Optional
-from typing_extensions import Literal
+from typing import List, Optional
from ..._models import BaseModel
from .findall_run import FindAllRun
-from ..field_basis import FieldBasis
+from .findall_candidate import FindAllCandidate
-__all__ = ["FindAllRunResult", "FindallRunResult", "Candidate"]
-
-
-class Candidate(BaseModel):
- """Candidate for a find all run that may end up as a match.
-
- Contains all the candidate's metadata and the output of the match conditions.
- A candidate is a match if all match conditions are satisfied.
- """
-
- candidate_id: str
- """ID of the candidate."""
-
- match_status: Literal["generated", "matched", "unmatched", "discarded"]
- """Status of the candidate. One of generated, matched, unmatched, discarded."""
-
- name: str
- """Name of the candidate."""
-
- url: str
- """URL that provides context or details of the entity for disambiguation."""
-
- basis: Optional[List[FieldBasis]] = None
- """List of FieldBasis objects supporting the output."""
-
- description: Optional[str] = None
- """
- Brief description of the entity that can help answer whether entity satisfies
- the query.
- """
-
- output: Optional[Dict[str, object]] = None
- """Results of the match condition evaluations for this candidate.
-
- This object contains the structured output that determines whether the candidate
- matches the overall FindAll objective.
- """
+__all__ = [ "FindallRunResult", "FindAllRunResult"]
class FindAllRunResult(BaseModel):
@@ -54,7 +17,7 @@ class FindAllRunResult(BaseModel):
taken.
"""
- candidates: List[Candidate]
+ candidates: List[FindAllCandidate]
"""All evaluated candidates at the time of the snapshot."""
run: FindAllRun
@@ -69,3 +32,7 @@ class FindAllRunResult(BaseModel):
FindallRunResult = FindAllRunResult # for backwards compatibility with v0.3.4
"""This is deprecated, `FindAllRunResult` should be used instead"""
+
+# Backwards-compat alias (deprecated). `Candidate` was an inline class in this
+# module; it now lives as the top-level `FindAllCandidate` model.
+Candidate = FindAllCandidate
diff --git a/src/parallel/types/beta/findall_run_status.py b/src/parallel/types/beta/findall_run_status.py
new file mode 100644
index 0000000..55f7a75
--- /dev/null
+++ b/src/parallel/types/beta/findall_run_status.py
@@ -0,0 +1,35 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from ..._models import BaseModel
+from .findall_candidate_metrics import FindAllCandidateMetrics
+
+__all__ = ["FindAllRunStatus"]
+
+
+class FindAllRunStatus(BaseModel):
+ """Status object for FindAll run."""
+
+ is_active: bool
+ """Whether the FindAll run is active"""
+
+ metrics: FindAllCandidateMetrics
+ """Candidate metrics for the FindAll run."""
+
+ status: Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]
+ """Status of the FindAll run."""
+
+ termination_reason: Optional[
+ Literal[
+ "low_match_rate",
+ "match_limit_met",
+ "candidates_exhausted",
+ "user_cancelled",
+ "error_occurred",
+ "timeout",
+ "insufficient_funds",
+ ]
+ ] = None
+ """Reason for termination when FindAll run is in terminal status."""
diff --git a/src/parallel/types/beta/findall_schema.py b/src/parallel/types/beta/findall_schema.py
index d214db7..3c0e4f6 100644
--- a/src/parallel/types/beta/findall_schema.py
+++ b/src/parallel/types/beta/findall_schema.py
@@ -4,23 +4,10 @@
from typing_extensions import Literal
from ..._models import BaseModel
+from .match_condition import MatchCondition
from .findall_enrich_input import FindAllEnrichInput
-__all__ = ["FindAllSchema", "FindallSchema", "MatchCondition"]
-
-
-class MatchCondition(BaseModel):
- """Match condition model for FindAll ingest."""
-
- description: str
- """Detailed description of the match condition.
-
- Include as much specific information as possible to help improve the quality and
- accuracy of Find All run results.
- """
-
- name: str
- """Name of the match condition."""
+__all__ = ["FindAllSchema", "FindallSchema"]
class FindAllSchema(BaseModel):
diff --git a/src/parallel/types/beta/full_content_settings_param.py b/src/parallel/types/beta/full_content_settings_param.py
new file mode 100644
index 0000000..7805bf4
--- /dev/null
+++ b/src/parallel/types/beta/full_content_settings_param.py
@@ -0,0 +1,7 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .. import full_content_settings_param
+
+FullContentSettingsParam = full_content_settings_param.FullContentSettingsParam
diff --git a/src/parallel/types/beta/match_condition.py b/src/parallel/types/beta/match_condition.py
new file mode 100644
index 0000000..53fa311
--- /dev/null
+++ b/src/parallel/types/beta/match_condition.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from ..._models import BaseModel
+
+__all__ = ["MatchCondition"]
+
+
+class MatchCondition(BaseModel):
+ """Match condition model for FindAll ingest."""
+
+ description: str
+ """Detailed description of the match condition.
+
+ Include as much specific information as possible to help improve the quality and
+ accuracy of Find All run results.
+ """
+
+ name: str
+ """Name of the match condition."""
diff --git a/src/parallel/types/beta/match_condition_param.py b/src/parallel/types/beta/match_condition_param.py
new file mode 100644
index 0000000..26d16a9
--- /dev/null
+++ b/src/parallel/types/beta/match_condition_param.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import Required, TypedDict
+
+__all__ = ["MatchConditionParam"]
+
+
+class MatchConditionParam(TypedDict, total=False):
+ """Match condition model for FindAll ingest."""
+
+ description: Required[str]
+ """Detailed description of the match condition.
+
+ Include as much specific information as possible to help improve the quality and
+ accuracy of Find All run results.
+ """
+
+ name: Required[str]
+ """Name of the match condition."""
diff --git a/src/parallel/types/beta/task_group.py b/src/parallel/types/beta/task_group.py
index ba452dc..d454351 100644
--- a/src/parallel/types/beta/task_group.py
+++ b/src/parallel/types/beta/task_group.py
@@ -1,26 +1,14 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+# Backwards-compatible re-export module (deprecated).
+#
+# Historically this module exposed `TaskGroup` as the data shape for a beta
+# task group. The Stainless config no longer aliases the GA `TaskGroup` model
+# under `beta` (to avoid a TypeScript class+type collision). Importers that
+# still reference `parallel.types.beta.task_group.TaskGroup` should switch to
+# `parallel.types.task_group.TaskGroup` instead. This module preserves the old
+# import path as a deprecated shim.
-from typing import Dict, Union, Optional
-
-from pydantic import Field as FieldInfo
-
-from ..._models import BaseModel
-from .task_group_status import TaskGroupStatus
+from .. import task_group
__all__ = ["TaskGroup"]
-
-class TaskGroup(BaseModel):
- """Response object for a task group, including its status and metadata."""
-
- created_at: Optional[str] = None
- """Timestamp of the creation of the group, as an RFC 3339 string."""
-
- status: TaskGroupStatus
- """Status of the group."""
-
- task_group_id: str = FieldInfo(alias="taskgroup_id")
- """ID of the group."""
-
- metadata: Optional[Dict[str, Union[str, float, bool]]] = None
- """User-provided metadata stored with the group."""
+TaskGroup = task_group.TaskGroup
diff --git a/src/parallel/types/beta/task_group_events_response.py b/src/parallel/types/beta/task_group_events_response.py
index 9728390..90d4403 100644
--- a/src/parallel/types/beta/task_group_events_response.py
+++ b/src/parallel/types/beta/task_group_events_response.py
@@ -1,29 +1,14 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from typing import Union
-from typing_extensions import Literal, Annotated, TypeAlias
+from typing_extensions import Annotated, TypeAlias
from ..._utils import PropertyInfo
-from ..._models import BaseModel
from ..error_event import ErrorEvent
from ..task_run_event import TaskRunEvent
-from .task_group_status import TaskGroupStatus
-
-__all__ = ["TaskGroupEventsResponse", "TaskGroupStatusEvent"]
-
-
-class TaskGroupStatusEvent(BaseModel):
- """Event indicating an update to group status."""
-
- event_id: str
- """Cursor to resume the event stream."""
-
- status: TaskGroupStatus
- """Task group status object."""
-
- type: Literal["task_group_status"]
- """Event type; always 'task_group_status'."""
+from ..task_group_status_event import TaskGroupStatusEvent
+__all__ = ["TaskGroupEventsResponse"]
TaskGroupEventsResponse: TypeAlias = Annotated[
Union[TaskGroupStatusEvent, TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")
diff --git a/src/parallel/types/beta/task_group_run_response.py b/src/parallel/types/beta/task_group_run_response.py
index 59acedf..ed74941 100644
--- a/src/parallel/types/beta/task_group_run_response.py
+++ b/src/parallel/types/beta/task_group_run_response.py
@@ -1,32 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List, Optional
-
-from ..._models import BaseModel
-from .task_group_status import TaskGroupStatus
+from .. import task_group_run_response
__all__ = ["TaskGroupRunResponse"]
-
-class TaskGroupRunResponse(BaseModel):
- """Response from adding new task runs to a task group."""
-
- event_cursor: Optional[str] = None
- """
- Cursor for these runs in the event stream at
- taskgroup/events?last_event_id=. Empty for the first runs in the
- group.
- """
-
- run_cursor: Optional[str] = None
- """
- Cursor for these runs in the run stream at
- taskgroup/runs?last_event_id=. Empty for the first runs in the
- group.
- """
-
- run_ids: List[str]
- """IDs of the newly created runs."""
-
- status: TaskGroupStatus
- """Status of the group."""
+TaskGroupRunResponse = task_group_run_response.TaskGroupRunResponse
diff --git a/src/parallel/types/beta/task_group_status.py b/src/parallel/types/beta/task_group_status.py
index 0628c5d..349a034 100644
--- a/src/parallel/types/beta/task_group_status.py
+++ b/src/parallel/types/beta/task_group_status.py
@@ -1,29 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Dict, Optional
-
-from ..._models import BaseModel
+from .. import task_group_status
__all__ = ["TaskGroupStatus"]
-
-class TaskGroupStatus(BaseModel):
- """Status of a task group."""
-
- is_active: bool
- """True if at least one run in the group is currently active, i.e.
-
- status is one of {'cancelling', 'queued', 'running'}.
- """
-
- modified_at: Optional[str] = None
- """Timestamp of the last status update to the group, as an RFC 3339 string."""
-
- num_task_runs: int
- """Number of task runs in the group."""
-
- status_message: Optional[str] = None
- """Human-readable status message for the group."""
-
- task_run_status_counts: Dict[str, int]
- """Number of task runs with each status."""
+TaskGroupStatus = task_group_status.TaskGroupStatus
diff --git a/src/parallel/types/beta/task_group_status_event.py b/src/parallel/types/beta/task_group_status_event.py
new file mode 100644
index 0000000..dfb764c
--- /dev/null
+++ b/src/parallel/types/beta/task_group_status_event.py
@@ -0,0 +1,7 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .. import task_group_status_event
+
+__all__ = ["TaskGroupStatusEvent"]
+
+TaskGroupStatusEvent = task_group_status_event.TaskGroupStatusEvent
diff --git a/src/parallel/types/beta/task_run_create_params.py b/src/parallel/types/beta/task_run_create_params.py
index 5f6f4c9..caafb05 100644
--- a/src/parallel/types/beta/task_run_create_params.py
+++ b/src/parallel/types/beta/task_run_create_params.py
@@ -11,8 +11,9 @@
from ..mcp_server_param import McpServerParam
from .parallel_beta_param import ParallelBetaParam
from ..shared_params.source_policy import SourcePolicy
+from ..task_advanced_settings_param import TaskAdvancedSettingsParam
-__all__ = ["TaskRunCreateParams", "AdvancedSettings"]
+__all__ = ["TaskRunCreateParams"]
class TaskRunCreateParams(TypedDict, total=False):
@@ -22,7 +23,7 @@ class TaskRunCreateParams(TypedDict, total=False):
processor: Required[str]
"""Processor to use for the task."""
- advanced_settings: Optional[AdvancedSettings]
+ advanced_settings: Optional[TaskAdvancedSettingsParam]
"""Advanced search configuration for a task run."""
enable_events: Optional[bool]
@@ -70,8 +71,6 @@ class TaskRunCreateParams(TypedDict, total=False):
"""Optional header to specify the beta version(s) to enable."""
-class AdvancedSettings(TypedDict, total=False):
- """Advanced search configuration for a task run."""
-
- location: Optional[str]
- """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
+# Backwards-compat alias (deprecated). `AdvancedSettings` was an inline TypedDict
+# in this module; it now lives as the top-level `TaskAdvancedSettingsParam`.
+AdvancedSettings = TaskAdvancedSettingsParam
diff --git a/src/parallel/types/beta/task_run_events_response.py b/src/parallel/types/beta/task_run_events_response.py
index 1516f91..6ef4012 100644
--- a/src/parallel/types/beta/task_run_events_response.py
+++ b/src/parallel/types/beta/task_run_events_response.py
@@ -1,70 +1,24 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List, Union, Optional
-from typing_extensions import Literal, Annotated, TypeAlias
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
from ..._utils import PropertyInfo
-from ..._models import BaseModel
from ..error_event import ErrorEvent
from ..task_run_event import TaskRunEvent
+from ..task_run_progress_stats_event import TaskRunProgressStatsEvent
+from ..task_run_progress_message_event import TaskRunProgressMessageEvent
-__all__ = [
- "TaskRunEventsResponse",
- "TaskRunProgressStatsEvent",
- "TaskRunProgressStatsEventSourceStats",
- "TaskRunProgressMessageEvent",
-]
-
-
-class TaskRunProgressStatsEventSourceStats(BaseModel):
- """Source stats describing progress so far."""
-
- num_sources_considered: Optional[int] = None
- """Number of sources considered in processing the task."""
-
- num_sources_read: Optional[int] = None
- """Number of sources read in processing the task."""
-
- sources_read_sample: Optional[List[str]] = None
- """A sample of URLs of sources read in processing the task."""
-
-
-class TaskRunProgressStatsEvent(BaseModel):
- """A progress update for a task run."""
-
- progress_meter: float
- """Completion percentage of the task run.
-
- Ranges from 0 to 100 where 0 indicates no progress and 100 indicates completion.
- """
-
- source_stats: TaskRunProgressStatsEventSourceStats
- """Source stats describing progress so far."""
-
- type: Literal["task_run.progress_stats"]
- """Event type; always 'task_run.progress_stats'."""
-
-
-class TaskRunProgressMessageEvent(BaseModel):
- """A message for a task run progress update."""
-
- message: str
- """Progress update message."""
-
- timestamp: Optional[str] = None
- """Timestamp of the message."""
-
- type: Literal[
- "task_run.progress_msg.plan",
- "task_run.progress_msg.search",
- "task_run.progress_msg.result",
- "task_run.progress_msg.tool_call",
- "task_run.progress_msg.exec_status",
- ]
- """Event type; always starts with 'task_run.progress_msg'."""
-
+__all__ = ["TaskRunEventsResponse"]
TaskRunEventsResponse: TypeAlias = Annotated[
Union[TaskRunProgressStatsEvent, TaskRunProgressMessageEvent, TaskRunEvent, ErrorEvent],
PropertyInfo(discriminator="type"),
]
+
+# Backwards-compat alias (deprecated). `TaskRunProgressStatsEventSourceStats`
+# was the auto-generated nested-class name for the source-stats payload; it now
+# lives as the top-level `TaskRunSourceStats` model.
+from ..task_run_source_stats import TaskRunSourceStats # noqa: E402
+
+TaskRunProgressStatsEventSourceStats = TaskRunSourceStats
diff --git a/src/parallel/types/full_content_settings_param.py b/src/parallel/types/full_content_settings_param.py
new file mode 100644
index 0000000..4d0146c
--- /dev/null
+++ b/src/parallel/types/full_content_settings_param.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import TypedDict
+
+__all__ = ["FullContentSettingsParam"]
+
+
+class FullContentSettingsParam(TypedDict, total=False):
+ """Optional settings for returning full content."""
+
+ max_chars_per_result: Optional[int]
+ """
+ Optional limit on the number of characters to include in the full content for
+ each url. Full content always starts at the beginning of the page and is
+ truncated at the limit if necessary.
+ """
diff --git a/src/parallel/types/monitor.py b/src/parallel/types/monitor.py
new file mode 100644
index 0000000..edf65e6
--- /dev/null
+++ b/src/parallel/types/monitor.py
@@ -0,0 +1,74 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, Union, Optional
+from datetime import datetime
+from typing_extensions import Literal, TypeAlias
+
+from .._models import BaseModel
+from .monitor_webhook import MonitorWebhook
+from .monitor_snapshot_output import MonitorSnapshotOutput
+from .monitor_snapshot_response_settings import MonitorSnapshotResponseSettings
+from .monitor_event_stream_response_settings import MonitorEventStreamResponseSettings
+
+__all__ = ["Monitor", "Settings"]
+
+Settings: TypeAlias = Union[MonitorEventStreamResponseSettings, MonitorSnapshotResponseSettings]
+
+
+class Monitor(BaseModel):
+ """Response object for a monitor.
+
+ The `type` field at the root determines the concrete shape of `settings`:
+ `event_stream` uses `MonitorEventStreamResponseSettings`, and `snapshot`
+ uses `MonitorSnapshotResponseSettings`. Snapshot monitors also carry an
+ `output` field (`MonitorSnapshotOutput`) with the latest computed state.
+ """
+
+ created_at: datetime
+ """Timestamp of the creation of the monitor, as an RFC 3339 string."""
+
+ frequency: str
+ """Frequency of the monitor.
+
+ Format: '' where unit is 'h' (hours), 'd' (days), or 'w' (weeks).
+ Must be between 1h and 30d (inclusive).
+ """
+
+ monitor_id: str
+ """ID of the monitor."""
+
+ processor: Literal["lite", "base"]
+ """Processor to use for the monitor.
+
+ `lite` is faster and cheaper; `base` performs more thorough analysis at higher
+ cost and latency. Defaults to `lite`.
+ """
+
+ settings: Settings
+ """Type-specific configuration.
+
+ Shape is determined by `type`: `MonitorEventStreamResponseSettings` for
+ `event_stream`, `MonitorSnapshotResponseSettings` for `snapshot`.
+ """
+
+ status: Literal["active", "cancelled"]
+ """Status of the monitor."""
+
+ type: Literal["event_stream", "snapshot"]
+ """The type of monitor."""
+
+ last_run_at: Optional[str] = None
+ """Timestamp of the last run for the monitor, as an RFC 3339 string."""
+
+ metadata: Optional[Dict[str, str]] = None
+ """
+ User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+ """
+
+ output: Optional[MonitorSnapshotOutput] = None
+ """Runtime output state for a `snapshot` monitor."""
+
+ webhook: Optional[MonitorWebhook] = None
+ """Webhook configuration for a monitor."""
diff --git a/src/parallel/types/monitor_completion_event.py b/src/parallel/types/monitor_completion_event.py
new file mode 100644
index 0000000..02f9ea7
--- /dev/null
+++ b/src/parallel/types/monitor_completion_event.py
@@ -0,0 +1,23 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["MonitorCompletionEvent"]
+
+
+class MonitorCompletionEvent(BaseModel):
+ """Emitted when a monitor execution ran but detected no material changes.
+
+ Only returned when `include_completions=true` is passed to the list events
+ endpoint. Useful for auditing execution history alongside content events.
+ """
+
+ timestamp: datetime
+ """Timestamp of when the monitor execution completed, as an RFC 3339 string."""
+
+ event_type: Optional[Literal["completion"]] = None
+ """Discriminant for the completion event variant."""
diff --git a/src/parallel/types/monitor_create_params.py b/src/parallel/types/monitor_create_params.py
new file mode 100644
index 0000000..769d5e9
--- /dev/null
+++ b/src/parallel/types/monitor_create_params.py
@@ -0,0 +1,56 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, Union, Optional
+from typing_extensions import Literal, Required, TypeAlias, TypedDict
+
+from .monitor_webhook_param import MonitorWebhookParam
+from .monitor_snapshot_settings_param import MonitorSnapshotSettingsParam
+from .monitor_event_stream_settings_param import MonitorEventStreamSettingsParam
+
+__all__ = ["MonitorCreateParams", "Settings"]
+
+
+class MonitorCreateParams(TypedDict, total=False):
+ frequency: Required[str]
+ """Frequency of the monitor.
+
+ Format: '' where unit is 'h' (hours), 'd' (days), or 'w' (weeks).
+ Must be between 1h and 30d (inclusive).
+ """
+
+ settings: Required[Settings]
+ """Type-specific settings for the monitor.
+
+ The expected shape is determined by the root `type` field: pass
+ `MonitorEventStreamSettings` when `type` is `event_stream`, and
+ `MonitorSnapshotSettings` when `type` is `snapshot`.
+ """
+
+ type: Required[Literal["event_stream", "snapshot"]]
+ """Type of monitor to create.
+
+ `event_stream` monitors a search query for material changes; `snapshot` monitors
+ a specific task run's output. Determines the expected shape of `settings`.
+ """
+
+ metadata: Optional[Dict[str, str]]
+ """
+ User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+ """
+
+ processor: Literal["lite", "base"]
+ """Processor to use for the monitor.
+
+ `lite` is faster and cheaper; `base` performs more thorough analysis at higher
+ cost and latency. Defaults to `lite`.
+ """
+
+ webhook: Optional[MonitorWebhookParam]
+ """Webhook configuration for a monitor."""
+
+
+Settings: TypeAlias = Union[MonitorEventStreamSettingsParam, MonitorSnapshotSettingsParam]
diff --git a/src/parallel/types/monitor_error_event.py b/src/parallel/types/monitor_error_event.py
new file mode 100644
index 0000000..112e0fb
--- /dev/null
+++ b/src/parallel/types/monitor_error_event.py
@@ -0,0 +1,25 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["MonitorErrorEvent"]
+
+
+class MonitorErrorEvent(BaseModel):
+ """Emitted when a monitor execution failed (e.g. payment or quota error).
+
+ Always included in the events list regardless of `include_completions`.
+ """
+
+ error_message: str
+ """Human-readable description of the failure."""
+
+ timestamp: datetime
+ """Timestamp of when the monitor execution failed, as an RFC 3339 string."""
+
+ event_type: Optional[Literal["error"]] = None
+ """Discriminant for the error event variant."""
diff --git a/src/parallel/types/monitor_event_stream_event.py b/src/parallel/types/monitor_event_stream_event.py
new file mode 100644
index 0000000..54a4825
--- /dev/null
+++ b/src/parallel/types/monitor_event_stream_event.py
@@ -0,0 +1,43 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union, Optional
+from typing_extensions import Literal, Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .._models import BaseModel
+from .task_run_json_output import TaskRunJsonOutput
+from .task_run_text_output import TaskRunTextOutput
+
+__all__ = ["MonitorEventStreamEvent", "Output"]
+
+Output: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")]
+
+
+class MonitorEventStreamEvent(BaseModel):
+ """Append-only event from an event_stream monitor.
+
+ Each event represents a distinct material change detected since the
+ previous execution. Events are net-new relative to the cursor; clients
+ should treat them as an append-only log.
+ """
+
+ event_date: Optional[str] = None
+ """Date when this event was produced.
+
+ ISO 8601 date (YYYY-MM-DD) or partial (YYYY-MM or YYYY).
+ """
+
+ event_group_id: str
+ """ID of the event group that owns this event."""
+
+ event_id: str
+ """Stable identifier for this event.
+
+ Safe to use for client-side deduplication across pagination and retries.
+ """
+
+ output: Output
+ """Text or JSON output describing the detected change."""
+
+ event_type: Optional[Literal["event_stream"]] = None
+ """Discriminant for the event_stream event variant."""
diff --git a/src/parallel/types/monitor_event_stream_response_settings.py b/src/parallel/types/monitor_event_stream_response_settings.py
new file mode 100644
index 0000000..630f85b
--- /dev/null
+++ b/src/parallel/types/monitor_event_stream_response_settings.py
@@ -0,0 +1,30 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from .._models import BaseModel
+from .json_schema import JsonSchema
+from .advanced_monitor_settings import AdvancedMonitorSettings
+
+__all__ = ["MonitorEventStreamResponseSettings"]
+
+
+class MonitorEventStreamResponseSettings(BaseModel):
+ """Type-specific response fields for an `event_stream` monitor."""
+
+ query: str
+ """The search query being monitored."""
+
+ advanced_settings: Optional[AdvancedMonitorSettings] = None
+ """Advanced monitor configuration."""
+
+ include_backfill: Optional[bool] = None
+ """
+ If true, the first execution returns a sample of recent historical events
+ matching the query (preview only — not exhaustive). If false or omitted, only
+ events from the monitor's creation date onward are returned. Subsequent
+ executions are always incremental.
+ """
+
+ output_schema: Optional[JsonSchema] = None
+ """JSON schema for a task input or output."""
diff --git a/src/parallel/types/monitor_event_stream_settings_param.py b/src/parallel/types/monitor_event_stream_settings_param.py
new file mode 100644
index 0000000..187abb6
--- /dev/null
+++ b/src/parallel/types/monitor_event_stream_settings_param.py
@@ -0,0 +1,32 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Required, TypedDict
+
+from .json_schema_param import JsonSchemaParam
+from .advanced_monitor_settings_param import AdvancedMonitorSettingsParam
+
+__all__ = ["MonitorEventStreamSettingsParam"]
+
+
+class MonitorEventStreamSettingsParam(TypedDict, total=False):
+ """Type-specific settings for an `event_stream` monitor."""
+
+ query: Required[str]
+ """Search query to monitor for material changes."""
+
+ advanced_settings: Optional[AdvancedMonitorSettingsParam]
+ """Advanced monitor configuration."""
+
+ include_backfill: Optional[bool]
+ """
+ If true, the first execution returns a sample of recent historical events
+ matching the query (preview only — not exhaustive). If false or omitted, only
+ events from the monitor's creation date onward are returned. Subsequent
+ executions are always incremental.
+ """
+
+ output_schema: Optional[JsonSchemaParam]
+ """JSON schema for a task input or output."""
diff --git a/src/parallel/types/monitor_events_params.py b/src/parallel/types/monitor_events_params.py
new file mode 100644
index 0000000..44aba97
--- /dev/null
+++ b/src/parallel/types/monitor_events_params.py
@@ -0,0 +1,29 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import TypedDict
+
+__all__ = ["MonitorEventsParams"]
+
+
+class MonitorEventsParams(TypedDict, total=False):
+ cursor: Optional[str]
+ """Pass `next_cursor` from a previous response to retrieve more events."""
+
+ event_group_id: Optional[str]
+ """Filter to a single execution.
+
+ Values come from `event_group_id` in webhook events and listed events.
+ Pagination params are ignored when set.
+ """
+
+ include_completions: bool
+ """
+ When true, include completion events for executions that ran but detected no
+ material changes. Useful for auditing execution history.
+ """
+
+ limit: Optional[int]
+ """Maximum number of events to return. Defaults to 20. Between 1 and 100."""
diff --git a/src/parallel/types/monitor_list_params.py b/src/parallel/types/monitor_list_params.py
new file mode 100644
index 0000000..56279e7
--- /dev/null
+++ b/src/parallel/types/monitor_list_params.py
@@ -0,0 +1,31 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import List, Optional
+from typing_extensions import Literal, TypedDict
+
+__all__ = ["MonitorListParams"]
+
+
+class MonitorListParams(TypedDict, total=False):
+ cursor: Optional[str]
+ """Pagination token from `next_cursor` in a previous response.
+
+ Omit to start from the most recently created monitor.
+ """
+
+ limit: Optional[int]
+ """Maximum number of monitors to return. Defaults to 100. Between 1 and 10000."""
+
+ status: Optional[List[Literal["active", "cancelled"]]]
+ """Filter by monitor status.
+
+ Pass multiple times to filter by multiple values. Defaults to `active` only.
+ """
+
+ type: Optional[List[Literal["event_stream", "snapshot"]]]
+ """Filter by monitor type.
+
+ Pass multiple times to filter by multiple values. Omit to return all types.
+ """
diff --git a/src/parallel/types/monitor_snapshot_event.py b/src/parallel/types/monitor_snapshot_event.py
new file mode 100644
index 0000000..776ebb7
--- /dev/null
+++ b/src/parallel/types/monitor_snapshot_event.py
@@ -0,0 +1,51 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union, Optional
+from typing_extensions import Literal, Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .._models import BaseModel
+from .task_run_json_output import TaskRunJsonOutput
+from .task_run_text_output import TaskRunTextOutput
+
+__all__ = ["MonitorSnapshotEvent", "ChangedOutput", "PreviousOutput"]
+
+ChangedOutput: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")]
+
+PreviousOutput: TypeAlias = Annotated[Union[TaskRunTextOutput, TaskRunJsonOutput], PropertyInfo(discriminator="type")]
+
+
+class MonitorSnapshotEvent(BaseModel):
+ """Snapshot diff event emitted when a monitored task run's output changes.
+
+ `changed_output` contains only the fields that changed since the previous execution,
+ along with their `basis` (reasoning + citations). `previous_output` holds
+ the complete output from the prior run for comparison.
+ """
+
+ changed_output: ChangedOutput
+ """
+ Partial output containing only the fields that changed since the previous
+ execution, each with its `basis` (reasoning and citations).
+ """
+
+ event_date: Optional[str] = None
+ """Date when this event was produced.
+
+ ISO 8601 date (YYYY-MM-DD) or partial (YYYY-MM or YYYY).
+ """
+
+ event_group_id: str
+ """ID of the event group that owns this event."""
+
+ event_id: str
+ """Stable identifier for this event.
+
+ Safe to use for client-side deduplication across pagination and retries.
+ """
+
+ previous_output: PreviousOutput
+ """The full output from the prior run, including all fields and basis."""
+
+ event_type: Optional[Literal["snapshot"]] = None
+ """Discriminant for the snapshot event variant."""
diff --git a/src/parallel/types/monitor_snapshot_output.py b/src/parallel/types/monitor_snapshot_output.py
new file mode 100644
index 0000000..e6c7219
--- /dev/null
+++ b/src/parallel/types/monitor_snapshot_output.py
@@ -0,0 +1,26 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union, Optional
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .._models import BaseModel
+from .task_run_json_output import TaskRunJsonOutput
+from .task_run_text_output import TaskRunTextOutput
+
+__all__ = ["MonitorSnapshotOutput", "LatestSnapshot"]
+
+LatestSnapshot: TypeAlias = Annotated[
+ Union[TaskRunTextOutput, TaskRunJsonOutput, None], PropertyInfo(discriminator="type")
+]
+
+
+class MonitorSnapshotOutput(BaseModel):
+ """Runtime output state for a `snapshot` monitor."""
+
+ latest_snapshot: Optional[LatestSnapshot] = None
+ """
+ Task run output from the most recent completed execution of this snapshot
+ monitor — same structure as the output of the original task run the monitor was
+ created from. `null` until the first run completes.
+ """
diff --git a/src/parallel/types/monitor_snapshot_response_settings.py b/src/parallel/types/monitor_snapshot_response_settings.py
new file mode 100644
index 0000000..85a5bc3
--- /dev/null
+++ b/src/parallel/types/monitor_snapshot_response_settings.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from .._models import BaseModel
+from .json_schema import JsonSchema
+
+__all__ = ["MonitorSnapshotResponseSettings"]
+
+
+class MonitorSnapshotResponseSettings(BaseModel):
+ """Configuration settings for a `snapshot` monitor."""
+
+ query: str
+ """The original task input from the baseline task run that this monitor tracks."""
+
+ task_run_id: str
+ """ID of the task run used as the monitoring baseline."""
+
+ output_schema: Optional[JsonSchema] = None
+ """JSON schema for a task input or output."""
diff --git a/src/parallel/types/monitor_snapshot_settings_param.py b/src/parallel/types/monitor_snapshot_settings_param.py
new file mode 100644
index 0000000..d0d3ad7
--- /dev/null
+++ b/src/parallel/types/monitor_snapshot_settings_param.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing_extensions import Required, TypedDict
+
+__all__ = ["MonitorSnapshotSettingsParam"]
+
+
+class MonitorSnapshotSettingsParam(TypedDict, total=False):
+ """Type-specific settings for a `snapshot` monitor."""
+
+ task_run_id: Required[str]
+ """Task run ID whose output becomes the data and schema for the monitor."""
diff --git a/src/parallel/types/monitor_update_params.py b/src/parallel/types/monitor_update_params.py
new file mode 100644
index 0000000..b2a8f34
--- /dev/null
+++ b/src/parallel/types/monitor_update_params.py
@@ -0,0 +1,40 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+from typing_extensions import Literal, TypedDict
+
+from .monitor_webhook_param import MonitorWebhookParam
+from .update_monitor_event_stream_settings_param import UpdateMonitorEventStreamSettingsParam
+
+__all__ = ["MonitorUpdateParams"]
+
+
+class MonitorUpdateParams(TypedDict, total=False):
+ frequency: Optional[str]
+ """Frequency of the monitor.
+
+ Format: '' where unit is 'h' (hours), 'd' (days), or 'w' (weeks).
+ Must be between 1h and 30d (inclusive).
+ """
+
+ metadata: Optional[Dict[str, str]]
+ """
+ User-provided metadata stored with the monitor and echoed back in webhook
+ notifications and GET responses, so you can map events to objects in your
+ application. Keys: max 16 chars; values: max 512 chars.
+ """
+
+ settings: Optional[UpdateMonitorEventStreamSettingsParam]
+ """Type-specific update settings for an `event_stream` monitor."""
+
+ type: Optional[Literal["event_stream", "snapshot"]]
+ """Type of the monitor being updated.
+
+ Required when `settings` is provided; must be `event_stream` (snapshot monitors
+ have no updatable type-specific settings).
+ """
+
+ webhook: Optional[MonitorWebhookParam]
+ """Webhook configuration for a monitor."""
diff --git a/src/parallel/types/monitor_webhook.py b/src/parallel/types/monitor_webhook.py
new file mode 100644
index 0000000..abfedfd
--- /dev/null
+++ b/src/parallel/types/monitor_webhook.py
@@ -0,0 +1,20 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["MonitorWebhook"]
+
+
+class MonitorWebhook(BaseModel):
+ """Webhook configuration for a monitor."""
+
+ url: str
+ """URL for the webhook."""
+
+ event_types: Optional[
+ List[Literal["monitor.event.detected", "monitor.execution.completed", "monitor.execution.failed"]]
+ ] = None
+ """Event types to send the webhook notifications for."""
diff --git a/src/parallel/types/monitor_webhook_param.py b/src/parallel/types/monitor_webhook_param.py
new file mode 100644
index 0000000..879924d
--- /dev/null
+++ b/src/parallel/types/monitor_webhook_param.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import List
+from typing_extensions import Literal, Required, TypedDict
+
+__all__ = ["MonitorWebhookParam"]
+
+
+class MonitorWebhookParam(TypedDict, total=False):
+ """Webhook configuration for a monitor."""
+
+ url: Required[str]
+ """URL for the webhook."""
+
+ event_types: List[Literal["monitor.event.detected", "monitor.execution.completed", "monitor.execution.failed"]]
+ """Event types to send the webhook notifications for."""
diff --git a/src/parallel/types/paginated_monitor_events.py b/src/parallel/types/paginated_monitor_events.py
new file mode 100644
index 0000000..586329f
--- /dev/null
+++ b/src/parallel/types/paginated_monitor_events.py
@@ -0,0 +1,32 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Union, Optional
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .._models import BaseModel
+from .shared.warning import Warning
+from .monitor_error_event import MonitorErrorEvent
+from .monitor_snapshot_event import MonitorSnapshotEvent
+from .monitor_completion_event import MonitorCompletionEvent
+from .monitor_event_stream_event import MonitorEventStreamEvent
+
+__all__ = ["PaginatedMonitorEvents", "Event"]
+
+Event: TypeAlias = Annotated[
+ Union[MonitorEventStreamEvent, MonitorSnapshotEvent, MonitorCompletionEvent, MonitorErrorEvent],
+ PropertyInfo(discriminator="event_type"),
+]
+
+
+class PaginatedMonitorEvents(BaseModel):
+ """Paginated list of monitor events, newest first."""
+
+ events: List[Event]
+ """Monitor events returned by this request, ordered newest first."""
+
+ next_cursor: Optional[str] = None
+ """Pass as `cursor` to retrieve more events. Absent when there are no more events."""
+
+ warnings: Optional[List[Warning]] = None
+ """Execution caveats for this page of events, e.g. compute limits."""
diff --git a/src/parallel/types/paginated_monitor_response.py b/src/parallel/types/paginated_monitor_response.py
new file mode 100644
index 0000000..a6e6ad6
--- /dev/null
+++ b/src/parallel/types/paginated_monitor_response.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from .monitor import Monitor
+from .._models import BaseModel
+
+__all__ = ["PaginatedMonitorResponse"]
+
+
+class PaginatedMonitorResponse(BaseModel):
+ """Paginated list of monitors."""
+
+ monitors: List[Monitor]
+ """List of monitors for the current page."""
+
+ next_cursor: Optional[str] = None
+ """Opaque pagination token.
+
+ Pass as `cursor` to retrieve the next page. Absent when there are no more pages.
+ """
diff --git a/src/parallel/types/run_input.py b/src/parallel/types/run_input.py
index 8c302d6..1997632 100644
--- a/src/parallel/types/run_input.py
+++ b/src/parallel/types/run_input.py
@@ -7,15 +7,9 @@
from .task_spec import TaskSpec
from .mcp_server import McpServer
from .shared.source_policy import SourcePolicy
+from .task_advanced_settings import TaskAdvancedSettings
-__all__ = ["RunInput", "AdvancedSettings"]
-
-
-class AdvancedSettings(BaseModel):
- """Advanced search configuration for a task run."""
-
- location: Optional[str] = None
- """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
+__all__ = ["RunInput"]
class RunInput(BaseModel):
@@ -27,7 +21,7 @@ class RunInput(BaseModel):
processor: str
"""Processor to use for the task."""
- advanced_settings: Optional[AdvancedSettings] = None
+ advanced_settings: Optional[TaskAdvancedSettings] = None
"""Advanced search configuration for a task run."""
enable_events: Optional[bool] = None
@@ -70,3 +64,8 @@ class RunInput(BaseModel):
webhook: Optional[Webhook] = None
"""Webhooks for Task Runs."""
+
+
+# Backwards-compat alias (deprecated). `AdvancedSettings` was an inline class in
+# this module; it now lives as the top-level `TaskAdvancedSettings` model.
+AdvancedSettings = TaskAdvancedSettings
diff --git a/src/parallel/types/run_input_param.py b/src/parallel/types/run_input_param.py
index 9fc2605..85e8830 100644
--- a/src/parallel/types/run_input_param.py
+++ b/src/parallel/types/run_input_param.py
@@ -9,15 +9,9 @@
from .task_spec_param import TaskSpecParam
from .mcp_server_param import McpServerParam
from .shared_params.source_policy import SourcePolicy
+from .task_advanced_settings_param import TaskAdvancedSettingsParam
-__all__ = ["RunInputParam", "AdvancedSettings"]
-
-
-class AdvancedSettings(TypedDict, total=False):
- """Advanced search configuration for a task run."""
-
- location: Optional[str]
- """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
+__all__ = ["RunInputParam"]
class RunInputParam(TypedDict, total=False):
@@ -29,7 +23,7 @@ class RunInputParam(TypedDict, total=False):
processor: Required[str]
"""Processor to use for the task."""
- advanced_settings: Optional[AdvancedSettings]
+ advanced_settings: Optional[TaskAdvancedSettingsParam]
"""Advanced search configuration for a task run."""
enable_events: Optional[bool]
@@ -72,3 +66,8 @@ class RunInputParam(TypedDict, total=False):
webhook: Optional[WebhookParam]
"""Webhooks for Task Runs."""
+
+
+# Backwards-compat alias (deprecated). `AdvancedSettings` was an inline TypedDict
+# in this module; it now lives as the top-level `TaskAdvancedSettingsParam`.
+AdvancedSettings = TaskAdvancedSettingsParam
diff --git a/src/parallel/types/task_advanced_settings.py b/src/parallel/types/task_advanced_settings.py
new file mode 100644
index 0000000..a1556c4
--- /dev/null
+++ b/src/parallel/types/task_advanced_settings.py
@@ -0,0 +1,14 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from .._models import BaseModel
+
+__all__ = ["TaskAdvancedSettings"]
+
+
+class TaskAdvancedSettings(BaseModel):
+ """Advanced search configuration for a task run."""
+
+ location: Optional[str] = None
+ """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
diff --git a/src/parallel/types/task_advanced_settings_param.py b/src/parallel/types/task_advanced_settings_param.py
new file mode 100644
index 0000000..00dbe03
--- /dev/null
+++ b/src/parallel/types/task_advanced_settings_param.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import TypedDict
+
+__all__ = ["TaskAdvancedSettingsParam"]
+
+
+class TaskAdvancedSettingsParam(TypedDict, total=False):
+ """Advanced search configuration for a task run."""
+
+ location: Optional[str]
+ """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
diff --git a/src/parallel/types/task_group.py b/src/parallel/types/task_group.py
new file mode 100644
index 0000000..b40fa65
--- /dev/null
+++ b/src/parallel/types/task_group.py
@@ -0,0 +1,26 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, Union, Optional
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+from .task_group_status import TaskGroupStatus
+
+__all__ = ["TaskGroup"]
+
+
+class TaskGroup(BaseModel):
+ """Response object for a task group, including its status and metadata."""
+
+ created_at: Optional[str] = None
+ """Timestamp of the creation of the group, as an RFC 3339 string."""
+
+ status: TaskGroupStatus
+ """Status of the group."""
+
+ task_group_id: str = FieldInfo(alias="taskgroup_id")
+ """ID of the group."""
+
+ metadata: Optional[Dict[str, Union[str, float, bool]]] = None
+ """User-provided metadata stored with the group."""
diff --git a/src/parallel/types/task_group_add_runs_params.py b/src/parallel/types/task_group_add_runs_params.py
new file mode 100644
index 0000000..75812f6
--- /dev/null
+++ b/src/parallel/types/task_group_add_runs_params.py
@@ -0,0 +1,36 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import List, Iterable, Optional
+from typing_extensions import Required, Annotated, TypedDict
+
+from .._utils import PropertyInfo
+from .run_input_param import RunInputParam
+from .task_spec_param import TaskSpecParam
+from .beta.parallel_beta_param import ParallelBetaParam
+
+__all__ = ["TaskGroupAddRunsParams"]
+
+
+class TaskGroupAddRunsParams(TypedDict, total=False):
+ inputs: Required[Iterable[RunInputParam]]
+ """List of task runs to execute.
+
+ Up to 1,000 runs can be specified per request. If you'd like to add more runs,
+ split them across multiple TaskGroup POST requests.
+ """
+
+ refresh_status: bool
+
+ default_task_spec: Optional[TaskSpecParam]
+ """Specification for a task.
+
+ Auto output schemas can be specified by setting `output_schema={"type":"auto"}`.
+ Not specifying a TaskSpec is the same as setting an auto output schema.
+
+ For convenience bare strings are also accepted as input or output schemas.
+ """
+
+ betas: Annotated[List[ParallelBetaParam], PropertyInfo(alias="parallel-beta")]
+ """Optional header to specify the beta version(s) to enable."""
diff --git a/src/parallel/types/task_group_create_params.py b/src/parallel/types/task_group_create_params.py
new file mode 100644
index 0000000..2b5cc73
--- /dev/null
+++ b/src/parallel/types/task_group_create_params.py
@@ -0,0 +1,13 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Dict, Union, Optional
+from typing_extensions import TypedDict
+
+__all__ = ["TaskGroupCreateParams"]
+
+
+class TaskGroupCreateParams(TypedDict, total=False):
+ metadata: Optional[Dict[str, Union[str, float, bool]]]
+ """User-provided metadata stored with the task group."""
diff --git a/src/parallel/types/task_group_events_params.py b/src/parallel/types/task_group_events_params.py
new file mode 100644
index 0000000..435e52a
--- /dev/null
+++ b/src/parallel/types/task_group_events_params.py
@@ -0,0 +1,16 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Annotated, TypedDict
+
+from .._utils import PropertyInfo
+
+__all__ = ["TaskGroupEventsParams"]
+
+
+class TaskGroupEventsParams(TypedDict, total=False):
+ last_event_id: Optional[str]
+
+ api_timeout: Annotated[Optional[float], PropertyInfo(alias="timeout")]
diff --git a/src/parallel/types/task_group_events_response.py b/src/parallel/types/task_group_events_response.py
new file mode 100644
index 0000000..c8a7b09
--- /dev/null
+++ b/src/parallel/types/task_group_events_response.py
@@ -0,0 +1,15 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .error_event import ErrorEvent
+from .task_run_event import TaskRunEvent
+from .task_group_status_event import TaskGroupStatusEvent
+
+__all__ = ["TaskGroupEventsResponse"]
+
+TaskGroupEventsResponse: TypeAlias = Annotated[
+ Union[TaskGroupStatusEvent, TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")
+]
diff --git a/src/parallel/types/task_group_get_runs_params.py b/src/parallel/types/task_group_get_runs_params.py
new file mode 100644
index 0000000..b6b1ef7
--- /dev/null
+++ b/src/parallel/types/task_group_get_runs_params.py
@@ -0,0 +1,18 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal, TypedDict
+
+__all__ = ["TaskGroupGetRunsParams"]
+
+
+class TaskGroupGetRunsParams(TypedDict, total=False):
+ include_input: bool
+
+ include_output: bool
+
+ last_event_id: Optional[str]
+
+ status: Optional[Literal["queued", "action_required", "running", "completed", "failed", "cancelling", "cancelled"]]
diff --git a/src/parallel/types/task_group_get_runs_response.py b/src/parallel/types/task_group_get_runs_response.py
new file mode 100644
index 0000000..932625d
--- /dev/null
+++ b/src/parallel/types/task_group_get_runs_response.py
@@ -0,0 +1,12 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
+
+from .._utils import PropertyInfo
+from .error_event import ErrorEvent
+from .task_run_event import TaskRunEvent
+
+__all__ = ["TaskGroupGetRunsResponse"]
+
+TaskGroupGetRunsResponse: TypeAlias = Annotated[Union[TaskRunEvent, ErrorEvent], PropertyInfo(discriminator="type")]
diff --git a/src/parallel/types/task_group_run_response.py b/src/parallel/types/task_group_run_response.py
new file mode 100644
index 0000000..2aa9d9e
--- /dev/null
+++ b/src/parallel/types/task_group_run_response.py
@@ -0,0 +1,32 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from .._models import BaseModel
+from .task_group_status import TaskGroupStatus
+
+__all__ = ["TaskGroupRunResponse"]
+
+
+class TaskGroupRunResponse(BaseModel):
+ """Response from adding new task runs to a task group."""
+
+ event_cursor: Optional[str] = None
+ """
+ Cursor for these runs in the event stream at
+ taskgroup/events?last_event_id=. Empty for the first runs in the
+ group.
+ """
+
+ run_cursor: Optional[str] = None
+ """
+ Cursor for these runs in the run stream at
+ taskgroup/runs?last_event_id=. Empty for the first runs in the
+ group.
+ """
+
+ run_ids: List[str]
+ """IDs of the newly created runs."""
+
+ status: TaskGroupStatus
+ """Status of the group."""
diff --git a/src/parallel/types/task_group_status.py b/src/parallel/types/task_group_status.py
new file mode 100644
index 0000000..25b573d
--- /dev/null
+++ b/src/parallel/types/task_group_status.py
@@ -0,0 +1,29 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, Optional
+
+from .._models import BaseModel
+
+__all__ = ["TaskGroupStatus"]
+
+
+class TaskGroupStatus(BaseModel):
+ """Status of a task group."""
+
+ is_active: bool
+ """True if at least one run in the group is currently active, i.e.
+
+ status is one of {'cancelling', 'queued', 'running'}.
+ """
+
+ modified_at: Optional[str] = None
+ """Timestamp of the last status update to the group, as an RFC 3339 string."""
+
+ num_task_runs: int
+ """Number of task runs in the group."""
+
+ status_message: Optional[str] = None
+ """Human-readable status message for the group."""
+
+ task_run_status_counts: Dict[str, int]
+ """Number of task runs with each status."""
diff --git a/src/parallel/types/task_group_status_event.py b/src/parallel/types/task_group_status_event.py
new file mode 100644
index 0000000..be76e81
--- /dev/null
+++ b/src/parallel/types/task_group_status_event.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from .._models import BaseModel
+from .task_group_status import TaskGroupStatus
+
+__all__ = ["TaskGroupStatusEvent"]
+
+
+class TaskGroupStatusEvent(BaseModel):
+ """Event indicating an update to group status."""
+
+ event_id: str
+ """Cursor to resume the event stream."""
+
+ status: TaskGroupStatus
+ """Task group status object."""
+
+ type: Literal["task_group_status"]
+ """Event type; always 'task_group_status'."""
diff --git a/src/parallel/types/task_run_create_params.py b/src/parallel/types/task_run_create_params.py
index be5d695..c70b241 100644
--- a/src/parallel/types/task_run_create_params.py
+++ b/src/parallel/types/task_run_create_params.py
@@ -11,8 +11,9 @@
from .mcp_server_param import McpServerParam
from .beta.parallel_beta_param import ParallelBetaParam
from .shared_params.source_policy import SourcePolicy
+from .task_advanced_settings_param import TaskAdvancedSettingsParam
-__all__ = ["TaskRunCreateParams", "AdvancedSettings"]
+__all__ = ["TaskRunCreateParams"]
class TaskRunCreateParams(TypedDict, total=False):
@@ -22,7 +23,7 @@ class TaskRunCreateParams(TypedDict, total=False):
processor: Required[str]
"""Processor to use for the task."""
- advanced_settings: Optional[AdvancedSettings]
+ advanced_settings: Optional[TaskAdvancedSettingsParam]
"""Advanced search configuration for a task run."""
enable_events: Optional[bool]
@@ -70,8 +71,6 @@ class TaskRunCreateParams(TypedDict, total=False):
"""Optional header to specify the beta version(s) to enable."""
-class AdvancedSettings(TypedDict, total=False):
- """Advanced search configuration for a task run."""
-
- location: Optional[str]
- """ISO 3166-1 alpha-2 country code for geo-targeted search results."""
+# Backwards-compat alias (deprecated). `AdvancedSettings` was an inline TypedDict
+# in this module; it now lives as the top-level `TaskAdvancedSettingsParam`.
+AdvancedSettings = TaskAdvancedSettingsParam
diff --git a/src/parallel/types/task_run_events_response.py b/src/parallel/types/task_run_events_response.py
index 20ded6e..a754b51 100644
--- a/src/parallel/types/task_run_events_response.py
+++ b/src/parallel/types/task_run_events_response.py
@@ -1,70 +1,24 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List, Union, Optional
-from typing_extensions import Literal, Annotated, TypeAlias
+from typing import Union
+from typing_extensions import Annotated, TypeAlias
from .._utils import PropertyInfo
-from .._models import BaseModel
from .error_event import ErrorEvent
from .task_run_event import TaskRunEvent
+from .task_run_progress_stats_event import TaskRunProgressStatsEvent
+from .task_run_progress_message_event import TaskRunProgressMessageEvent
-__all__ = [
- "TaskRunEventsResponse",
- "TaskRunProgressStatsEvent",
- "TaskRunProgressStatsEventSourceStats",
- "TaskRunProgressMessageEvent",
-]
-
-
-class TaskRunProgressStatsEventSourceStats(BaseModel):
- """Source stats describing progress so far."""
-
- num_sources_considered: Optional[int] = None
- """Number of sources considered in processing the task."""
-
- num_sources_read: Optional[int] = None
- """Number of sources read in processing the task."""
-
- sources_read_sample: Optional[List[str]] = None
- """A sample of URLs of sources read in processing the task."""
-
-
-class TaskRunProgressStatsEvent(BaseModel):
- """A progress update for a task run."""
-
- progress_meter: float
- """Completion percentage of the task run.
-
- Ranges from 0 to 100 where 0 indicates no progress and 100 indicates completion.
- """
-
- source_stats: TaskRunProgressStatsEventSourceStats
- """Source stats describing progress so far."""
-
- type: Literal["task_run.progress_stats"]
- """Event type; always 'task_run.progress_stats'."""
-
-
-class TaskRunProgressMessageEvent(BaseModel):
- """A message for a task run progress update."""
-
- message: str
- """Progress update message."""
-
- timestamp: Optional[str] = None
- """Timestamp of the message."""
-
- type: Literal[
- "task_run.progress_msg.plan",
- "task_run.progress_msg.search",
- "task_run.progress_msg.result",
- "task_run.progress_msg.tool_call",
- "task_run.progress_msg.exec_status",
- ]
- """Event type; always starts with 'task_run.progress_msg'."""
-
+__all__ = ["TaskRunEventsResponse"]
TaskRunEventsResponse: TypeAlias = Annotated[
Union[TaskRunProgressStatsEvent, TaskRunProgressMessageEvent, TaskRunEvent, ErrorEvent],
PropertyInfo(discriminator="type"),
]
+
+# Backwards-compat alias (deprecated). `TaskRunProgressStatsEventSourceStats`
+# was the auto-generated nested-class name for the source-stats payload; it now
+# lives as the top-level `TaskRunSourceStats` model.
+from .task_run_source_stats import TaskRunSourceStats # noqa: E402
+
+TaskRunProgressStatsEventSourceStats = TaskRunSourceStats
diff --git a/src/parallel/types/task_run_progress_message_event.py b/src/parallel/types/task_run_progress_message_event.py
new file mode 100644
index 0000000..5238e2d
--- /dev/null
+++ b/src/parallel/types/task_run_progress_message_event.py
@@ -0,0 +1,27 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from typing_extensions import Literal
+
+from .._models import BaseModel
+
+__all__ = ["TaskRunProgressMessageEvent"]
+
+
+class TaskRunProgressMessageEvent(BaseModel):
+ """A message for a task run progress update."""
+
+ message: str
+ """Progress update message."""
+
+ timestamp: Optional[str] = None
+ """Timestamp of the message."""
+
+ type: Literal[
+ "task_run.progress_msg.plan",
+ "task_run.progress_msg.search",
+ "task_run.progress_msg.result",
+ "task_run.progress_msg.tool_call",
+ "task_run.progress_msg.exec_status",
+ ]
+ """Event type; always starts with 'task_run.progress_msg'."""
diff --git a/src/parallel/types/task_run_progress_stats_event.py b/src/parallel/types/task_run_progress_stats_event.py
new file mode 100644
index 0000000..aec2a73
--- /dev/null
+++ b/src/parallel/types/task_run_progress_stats_event.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing_extensions import Literal
+
+from .._models import BaseModel
+from .task_run_source_stats import TaskRunSourceStats
+
+__all__ = ["TaskRunProgressStatsEvent"]
+
+
+class TaskRunProgressStatsEvent(BaseModel):
+ """A progress update for a task run."""
+
+ progress_meter: float
+ """Completion percentage of the task run.
+
+ Ranges from 0 to 100 where 0 indicates no progress and 100 indicates completion.
+ """
+
+ source_stats: TaskRunSourceStats
+ """Source stats describing progress so far."""
+
+ type: Literal["task_run.progress_stats"]
+ """Event type; always 'task_run.progress_stats'."""
diff --git a/src/parallel/types/task_run_source_stats.py b/src/parallel/types/task_run_source_stats.py
new file mode 100644
index 0000000..e324ad8
--- /dev/null
+++ b/src/parallel/types/task_run_source_stats.py
@@ -0,0 +1,20 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from .._models import BaseModel
+
+__all__ = ["TaskRunSourceStats"]
+
+
+class TaskRunSourceStats(BaseModel):
+ """Source stats for a task run."""
+
+ num_sources_considered: Optional[int] = None
+ """Number of sources considered in processing the task."""
+
+ num_sources_read: Optional[int] = None
+ """Number of sources read in processing the task."""
+
+ sources_read_sample: Optional[List[str]] = None
+ """A sample of URLs of sources read in processing the task."""
diff --git a/src/parallel/types/update_monitor_event_stream_settings_param.py b/src/parallel/types/update_monitor_event_stream_settings_param.py
new file mode 100644
index 0000000..9b4566a
--- /dev/null
+++ b/src/parallel/types/update_monitor_event_stream_settings_param.py
@@ -0,0 +1,17 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import TypedDict
+
+from .advanced_monitor_settings_param import AdvancedMonitorSettingsParam
+
+__all__ = ["UpdateMonitorEventStreamSettingsParam"]
+
+
+class UpdateMonitorEventStreamSettingsParam(TypedDict, total=False):
+ """Type-specific update settings for an `event_stream` monitor."""
+
+ advanced_settings: Optional[AdvancedMonitorSettingsParam]
+ """Advanced monitor configuration."""
diff --git a/tests/api_resources/beta/test_findall.py b/tests/api_resources/beta/test_findall.py
index 3d73d48..a5daf07 100644
--- a/tests/api_resources/beta/test_findall.py
+++ b/tests/api_resources/beta/test_findall.py
@@ -159,7 +159,7 @@ def test_method_cancel(self, client: Parallel) -> None:
findall = client.beta.findall.cancel(
findall_id="findall_id",
)
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
def test_method_cancel_with_all_params(self, client: Parallel) -> None:
@@ -167,7 +167,7 @@ def test_method_cancel_with_all_params(self, client: Parallel) -> None:
findall_id="findall_id",
betas=["mcp-server-2025-07-17"],
)
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
def test_raw_response_cancel(self, client: Parallel) -> None:
@@ -178,7 +178,7 @@ def test_raw_response_cancel(self, client: Parallel) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
findall = response.parse()
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
def test_streaming_response_cancel(self, client: Parallel) -> None:
@@ -189,7 +189,7 @@ def test_streaming_response_cancel(self, client: Parallel) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
findall = response.parse()
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
assert cast(Any, response.is_closed) is True
@@ -712,7 +712,7 @@ async def test_method_cancel(self, async_client: AsyncParallel) -> None:
findall = await async_client.beta.findall.cancel(
findall_id="findall_id",
)
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
async def test_method_cancel_with_all_params(self, async_client: AsyncParallel) -> None:
@@ -720,7 +720,7 @@ async def test_method_cancel_with_all_params(self, async_client: AsyncParallel)
findall_id="findall_id",
betas=["mcp-server-2025-07-17"],
)
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
async def test_raw_response_cancel(self, async_client: AsyncParallel) -> None:
@@ -731,7 +731,7 @@ async def test_raw_response_cancel(self, async_client: AsyncParallel) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
findall = await response.parse()
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
@parametrize
async def test_streaming_response_cancel(self, async_client: AsyncParallel) -> None:
@@ -742,7 +742,7 @@ async def test_streaming_response_cancel(self, async_client: AsyncParallel) -> N
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
findall = await response.parse()
- assert_matches_type(object, findall, path=["response"])
+ assert findall is None
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/beta/test_task_group.py b/tests/api_resources/beta/test_task_group.py
index 9f30f48..add56d0 100644
--- a/tests/api_resources/beta/test_task_group.py
+++ b/tests/api_resources/beta/test_task_group.py
@@ -9,11 +9,10 @@
from parallel import Parallel, AsyncParallel
from tests.utils import assert_matches_type
+from parallel.types import TaskGroup, TaskGroupRunResponse
from parallel._utils import parse_date
-from parallel.types.beta import (
- TaskGroup,
- TaskGroupRunResponse,
-)
+
+# pyright: reportDeprecated=false
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -23,19 +22,24 @@ class TestTaskGroup:
@parametrize
def test_method_create(self, client: Parallel) -> None:
- task_group = client.beta.task_group.create()
+ with pytest.warns(DeprecationWarning):
+ task_group = client.beta.task_group.create()
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
def test_method_create_with_all_params(self, client: Parallel) -> None:
- task_group = client.beta.task_group.create(
- metadata={"foo": "string"},
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = client.beta.task_group.create(
+ metadata={"foo": "string"},
+ )
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
def test_raw_response_create(self, client: Parallel) -> None:
- response = client.beta.task_group.with_raw_response.create()
+ with pytest.warns(DeprecationWarning):
+ response = client.beta.task_group.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -44,27 +48,31 @@ def test_raw_response_create(self, client: Parallel) -> None:
@parametrize
def test_streaming_response_create(self, client: Parallel) -> None:
- with client.beta.task_group.with_streaming_response.create() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ with client.beta.task_group.with_streaming_response.create() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- task_group = response.parse()
- assert_matches_type(TaskGroup, task_group, path=["response"])
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_method_retrieve(self, client: Parallel) -> None:
- task_group = client.beta.task_group.retrieve(
- "taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = client.beta.task_group.retrieve(
+ "taskgroup_id",
+ )
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
def test_raw_response_retrieve(self, client: Parallel) -> None:
- response = client.beta.task_group.with_raw_response.retrieve(
- "taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = client.beta.task_group.with_raw_response.retrieve(
+ "taskgroup_id",
+ )
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -73,109 +81,116 @@ def test_raw_response_retrieve(self, client: Parallel) -> None:
@parametrize
def test_streaming_response_retrieve(self, client: Parallel) -> None:
- with client.beta.task_group.with_streaming_response.retrieve(
- "taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ with client.beta.task_group.with_streaming_response.retrieve(
+ "taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- task_group = response.parse()
- assert_matches_type(TaskGroup, task_group, path=["response"])
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_retrieve(self, client: Parallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- client.beta.task_group.with_raw_response.retrieve(
- "",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.beta.task_group.with_raw_response.retrieve(
+ "",
+ )
@parametrize
def test_method_add_runs(self, client: Parallel) -> None:
- task_group = client.beta.task_group.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = client.beta.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
@parametrize
def test_method_add_runs_with_all_params(self, client: Parallel) -> None:
- task_group = client.beta.task_group.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- "advanced_settings": {"location": "us"},
- "enable_events": True,
- "mcp_servers": [
- {
- "name": "name",
- "url": "url",
- "allowed_tools": ["string"],
- "headers": {"foo": "string"},
- "type": "url",
- }
- ],
- "metadata": {"foo": "string"},
- "previous_interaction_id": "previous_interaction_id",
- "source_policy": {
- "after_date": parse_date("2024-01-01"),
- "exclude_domains": ["reddit.com", "x.com", ".ai"],
- "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
- },
- "task_spec": {
- "output_schema": {
- "json_schema": {
- "additionalProperties": "bar",
- "properties": "bar",
- "required": "bar",
- "type": "bar",
+ with pytest.warns(DeprecationWarning):
+ task_group = client.beta.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ "advanced_settings": {"location": "us"},
+ "enable_events": True,
+ "mcp_servers": [
+ {
+ "name": "name",
+ "url": "url",
+ "allowed_tools": ["string"],
+ "headers": {"foo": "string"},
+ "type": "url",
+ }
+ ],
+ "metadata": {"foo": "string"},
+ "previous_interaction_id": "previous_interaction_id",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ "task_spec": {
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
},
- "type": "json",
+ "input_schema": "string",
},
- "input_schema": "string",
- },
- "webhook": {
- "url": "url",
- "event_types": ["task_run.status"],
- },
- }
- ],
- refresh_status=True,
- default_task_spec={
- "output_schema": {
- "json_schema": {
- "additionalProperties": "bar",
- "properties": "bar",
- "required": "bar",
- "type": "bar",
+ "webhook": {
+ "url": "url",
+ "event_types": ["task_run.status"],
+ },
+ }
+ ],
+ refresh_status=True,
+ default_task_spec={
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
},
- "type": "json",
+ "input_schema": "string",
},
- "input_schema": "string",
- },
- betas=["mcp-server-2025-07-17"],
- )
+ betas=["mcp-server-2025-07-17"],
+ )
+
assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
@parametrize
def test_raw_response_add_runs(self, client: Parallel) -> None:
- response = client.beta.task_group.with_raw_response.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- )
+ with pytest.warns(DeprecationWarning):
+ response = client.beta.task_group.with_raw_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -184,57 +199,64 @@ def test_raw_response_add_runs(self, client: Parallel) -> None:
@parametrize
def test_streaming_response_add_runs(self, client: Parallel) -> None:
- with client.beta.task_group.with_streaming_response.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- task_group = response.parse()
- assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- def test_path_params_add_runs(self, client: Parallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- client.beta.task_group.with_raw_response.add_runs(
- task_group_id="",
+ with pytest.warns(DeprecationWarning):
+ with client.beta.task_group.with_streaming_response.add_runs(
+ task_group_id="taskgroup_id",
inputs=[
{
"input": "What was the GDP of France in 2023?",
"processor": "base",
}
],
- )
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_add_runs(self, client: Parallel) -> None:
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.beta.task_group.with_raw_response.add_runs(
+ task_group_id="",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
@parametrize
def test_method_events(self, client: Parallel) -> None:
- task_group_stream = client.beta.task_group.events(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = client.beta.task_group.events(
+ task_group_id="taskgroup_id",
+ )
+
task_group_stream.response.close()
@parametrize
def test_method_events_with_all_params(self, client: Parallel) -> None:
- task_group_stream = client.beta.task_group.events(
- task_group_id="taskgroup_id",
- last_event_id="last_event_id",
- api_timeout=0,
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = client.beta.task_group.events(
+ task_group_id="taskgroup_id",
+ last_event_id="last_event_id",
+ api_timeout=0,
+ )
+
task_group_stream.response.close()
@parametrize
def test_raw_response_events(self, client: Parallel) -> None:
- response = client.beta.task_group.with_raw_response.events(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = client.beta.task_group.with_raw_response.events(
+ task_group_id="taskgroup_id",
+ )
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
stream = response.parse()
@@ -242,47 +264,54 @@ def test_raw_response_events(self, client: Parallel) -> None:
@parametrize
def test_streaming_response_events(self, client: Parallel) -> None:
- with client.beta.task_group.with_streaming_response.events(
- task_group_id="taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ with client.beta.task_group.with_streaming_response.events(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = response.parse()
- stream.close()
+ stream = response.parse()
+ stream.close()
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_events(self, client: Parallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- client.beta.task_group.with_raw_response.events(
- task_group_id="",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.beta.task_group.with_raw_response.events(
+ task_group_id="",
+ )
@parametrize
def test_method_get_runs(self, client: Parallel) -> None:
- task_group_stream = client.beta.task_group.get_runs(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = client.beta.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ )
+
task_group_stream.response.close()
@parametrize
def test_method_get_runs_with_all_params(self, client: Parallel) -> None:
- task_group_stream = client.beta.task_group.get_runs(
- task_group_id="taskgroup_id",
- include_input=True,
- include_output=True,
- last_event_id="last_event_id",
- status="queued",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = client.beta.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ include_input=True,
+ include_output=True,
+ last_event_id="last_event_id",
+ status="queued",
+ )
+
task_group_stream.response.close()
@parametrize
def test_raw_response_get_runs(self, client: Parallel) -> None:
- response = client.beta.task_group.with_raw_response.get_runs(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = client.beta.task_group.with_raw_response.get_runs(
+ task_group_id="taskgroup_id",
+ )
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
stream = response.parse()
@@ -290,23 +319,25 @@ def test_raw_response_get_runs(self, client: Parallel) -> None:
@parametrize
def test_streaming_response_get_runs(self, client: Parallel) -> None:
- with client.beta.task_group.with_streaming_response.get_runs(
- task_group_id="taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ with client.beta.task_group.with_streaming_response.get_runs(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = response.parse()
- stream.close()
+ stream = response.parse()
+ stream.close()
assert cast(Any, response.is_closed) is True
@parametrize
def test_path_params_get_runs(self, client: Parallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- client.beta.task_group.with_raw_response.get_runs(
- task_group_id="",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.beta.task_group.with_raw_response.get_runs(
+ task_group_id="",
+ )
class TestAsyncTaskGroup:
@@ -316,19 +347,24 @@ class TestAsyncTaskGroup:
@parametrize
async def test_method_create(self, async_client: AsyncParallel) -> None:
- task_group = await async_client.beta.task_group.create()
+ with pytest.warns(DeprecationWarning):
+ task_group = await async_client.beta.task_group.create()
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None:
- task_group = await async_client.beta.task_group.create(
- metadata={"foo": "string"},
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = await async_client.beta.task_group.create(
+ metadata={"foo": "string"},
+ )
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncParallel) -> None:
- response = await async_client.beta.task_group.with_raw_response.create()
+ with pytest.warns(DeprecationWarning):
+ response = await async_client.beta.task_group.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -337,27 +373,31 @@ async def test_raw_response_create(self, async_client: AsyncParallel) -> None:
@parametrize
async def test_streaming_response_create(self, async_client: AsyncParallel) -> None:
- async with async_client.beta.task_group.with_streaming_response.create() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ async with async_client.beta.task_group.with_streaming_response.create() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- task_group = await response.parse()
- assert_matches_type(TaskGroup, task_group, path=["response"])
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_method_retrieve(self, async_client: AsyncParallel) -> None:
- task_group = await async_client.beta.task_group.retrieve(
- "taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = await async_client.beta.task_group.retrieve(
+ "taskgroup_id",
+ )
+
assert_matches_type(TaskGroup, task_group, path=["response"])
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None:
- response = await async_client.beta.task_group.with_raw_response.retrieve(
- "taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = await async_client.beta.task_group.with_raw_response.retrieve(
+ "taskgroup_id",
+ )
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -366,109 +406,116 @@ async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None:
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None:
- async with async_client.beta.task_group.with_streaming_response.retrieve(
- "taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ async with async_client.beta.task_group.with_streaming_response.retrieve(
+ "taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- task_group = await response.parse()
- assert_matches_type(TaskGroup, task_group, path=["response"])
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- await async_client.beta.task_group.with_raw_response.retrieve(
- "",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.beta.task_group.with_raw_response.retrieve(
+ "",
+ )
@parametrize
async def test_method_add_runs(self, async_client: AsyncParallel) -> None:
- task_group = await async_client.beta.task_group.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- )
+ with pytest.warns(DeprecationWarning):
+ task_group = await async_client.beta.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
@parametrize
async def test_method_add_runs_with_all_params(self, async_client: AsyncParallel) -> None:
- task_group = await async_client.beta.task_group.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- "advanced_settings": {"location": "us"},
- "enable_events": True,
- "mcp_servers": [
- {
- "name": "name",
- "url": "url",
- "allowed_tools": ["string"],
- "headers": {"foo": "string"},
- "type": "url",
- }
- ],
- "metadata": {"foo": "string"},
- "previous_interaction_id": "previous_interaction_id",
- "source_policy": {
- "after_date": parse_date("2024-01-01"),
- "exclude_domains": ["reddit.com", "x.com", ".ai"],
- "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
- },
- "task_spec": {
- "output_schema": {
- "json_schema": {
- "additionalProperties": "bar",
- "properties": "bar",
- "required": "bar",
- "type": "bar",
+ with pytest.warns(DeprecationWarning):
+ task_group = await async_client.beta.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ "advanced_settings": {"location": "us"},
+ "enable_events": True,
+ "mcp_servers": [
+ {
+ "name": "name",
+ "url": "url",
+ "allowed_tools": ["string"],
+ "headers": {"foo": "string"},
+ "type": "url",
+ }
+ ],
+ "metadata": {"foo": "string"},
+ "previous_interaction_id": "previous_interaction_id",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ "task_spec": {
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
},
- "type": "json",
+ "input_schema": "string",
},
- "input_schema": "string",
- },
- "webhook": {
- "url": "url",
- "event_types": ["task_run.status"],
- },
- }
- ],
- refresh_status=True,
- default_task_spec={
- "output_schema": {
- "json_schema": {
- "additionalProperties": "bar",
- "properties": "bar",
- "required": "bar",
- "type": "bar",
+ "webhook": {
+ "url": "url",
+ "event_types": ["task_run.status"],
+ },
+ }
+ ],
+ refresh_status=True,
+ default_task_spec={
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
},
- "type": "json",
+ "input_schema": "string",
},
- "input_schema": "string",
- },
- betas=["mcp-server-2025-07-17"],
- )
+ betas=["mcp-server-2025-07-17"],
+ )
+
assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
@parametrize
async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None:
- response = await async_client.beta.task_group.with_raw_response.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- )
+ with pytest.warns(DeprecationWarning):
+ response = await async_client.beta.task_group.with_raw_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -477,57 +524,64 @@ async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None:
@parametrize
async def test_streaming_response_add_runs(self, async_client: AsyncParallel) -> None:
- async with async_client.beta.task_group.with_streaming_response.add_runs(
- task_group_id="taskgroup_id",
- inputs=[
- {
- "input": "What was the GDP of France in 2023?",
- "processor": "base",
- }
- ],
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- task_group = await response.parse()
- assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
- @parametrize
- async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- await async_client.beta.task_group.with_raw_response.add_runs(
- task_group_id="",
+ with pytest.warns(DeprecationWarning):
+ async with async_client.beta.task_group.with_streaming_response.add_runs(
+ task_group_id="taskgroup_id",
inputs=[
{
"input": "What was the GDP of France in 2023?",
"processor": "base",
}
],
- )
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = await response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None:
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.beta.task_group.with_raw_response.add_runs(
+ task_group_id="",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
@parametrize
async def test_method_events(self, async_client: AsyncParallel) -> None:
- task_group_stream = await async_client.beta.task_group.events(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = await async_client.beta.task_group.events(
+ task_group_id="taskgroup_id",
+ )
+
await task_group_stream.response.aclose()
@parametrize
async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None:
- task_group_stream = await async_client.beta.task_group.events(
- task_group_id="taskgroup_id",
- last_event_id="last_event_id",
- api_timeout=0,
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = await async_client.beta.task_group.events(
+ task_group_id="taskgroup_id",
+ last_event_id="last_event_id",
+ api_timeout=0,
+ )
+
await task_group_stream.response.aclose()
@parametrize
async def test_raw_response_events(self, async_client: AsyncParallel) -> None:
- response = await async_client.beta.task_group.with_raw_response.events(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = await async_client.beta.task_group.with_raw_response.events(
+ task_group_id="taskgroup_id",
+ )
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
stream = await response.parse()
@@ -535,47 +589,54 @@ async def test_raw_response_events(self, async_client: AsyncParallel) -> None:
@parametrize
async def test_streaming_response_events(self, async_client: AsyncParallel) -> None:
- async with async_client.beta.task_group.with_streaming_response.events(
- task_group_id="taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ async with async_client.beta.task_group.with_streaming_response.events(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = await response.parse()
- await stream.close()
+ stream = await response.parse()
+ await stream.close()
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_events(self, async_client: AsyncParallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- await async_client.beta.task_group.with_raw_response.events(
- task_group_id="",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.beta.task_group.with_raw_response.events(
+ task_group_id="",
+ )
@parametrize
async def test_method_get_runs(self, async_client: AsyncParallel) -> None:
- task_group_stream = await async_client.beta.task_group.get_runs(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = await async_client.beta.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ )
+
await task_group_stream.response.aclose()
@parametrize
async def test_method_get_runs_with_all_params(self, async_client: AsyncParallel) -> None:
- task_group_stream = await async_client.beta.task_group.get_runs(
- task_group_id="taskgroup_id",
- include_input=True,
- include_output=True,
- last_event_id="last_event_id",
- status="queued",
- )
+ with pytest.warns(DeprecationWarning):
+ task_group_stream = await async_client.beta.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ include_input=True,
+ include_output=True,
+ last_event_id="last_event_id",
+ status="queued",
+ )
+
await task_group_stream.response.aclose()
@parametrize
async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None:
- response = await async_client.beta.task_group.with_raw_response.get_runs(
- task_group_id="taskgroup_id",
- )
+ with pytest.warns(DeprecationWarning):
+ response = await async_client.beta.task_group.with_raw_response.get_runs(
+ task_group_id="taskgroup_id",
+ )
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
stream = await response.parse()
@@ -583,20 +644,22 @@ async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None:
@parametrize
async def test_streaming_response_get_runs(self, async_client: AsyncParallel) -> None:
- async with async_client.beta.task_group.with_streaming_response.get_runs(
- task_group_id="taskgroup_id",
- ) as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ with pytest.warns(DeprecationWarning):
+ async with async_client.beta.task_group.with_streaming_response.get_runs(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- stream = await response.parse()
- await stream.close()
+ stream = await response.parse()
+ await stream.close()
assert cast(Any, response.is_closed) is True
@parametrize
async def test_path_params_get_runs(self, async_client: AsyncParallel) -> None:
- with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
- await async_client.beta.task_group.with_raw_response.get_runs(
- task_group_id="",
- )
+ with pytest.warns(DeprecationWarning):
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.beta.task_group.with_raw_response.get_runs(
+ task_group_id="",
+ )
diff --git a/tests/api_resources/test_monitor.py b/tests/api_resources/test_monitor.py
new file mode 100644
index 0000000..4e22cf3
--- /dev/null
+++ b/tests/api_resources/test_monitor.py
@@ -0,0 +1,705 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+import os
+from typing import Any, cast
+
+import pytest
+
+from parallel import Parallel, AsyncParallel
+from tests.utils import assert_matches_type
+from parallel.types import (
+ Monitor,
+ PaginatedMonitorEvents,
+ PaginatedMonitorResponse,
+)
+from parallel._utils import parse_date
+
+base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
+
+
+class TestMonitor:
+ parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+
+ @parametrize
+ def test_method_create(self, client: Parallel) -> None:
+ monitor = client.monitor.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_method_create_with_all_params(self, client: Parallel) -> None:
+ monitor = client.monitor.create(
+ frequency="1h",
+ settings={
+ "query": "Extract recent news about AI",
+ "advanced_settings": {
+ "location": "us",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ },
+ "include_backfill": True,
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ },
+ type="event_stream",
+ metadata={
+ "slack_thread_id": "1234567890.123456",
+ "user_id": "U123ABC",
+ },
+ processor="lite",
+ webhook={
+ "url": "https://example.com/webhook",
+ "event_types": ["monitor.event.detected"],
+ },
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_create(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_create(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_method_retrieve(self, client: Parallel) -> None:
+ monitor = client.monitor.retrieve(
+ "monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.retrieve(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.retrieve(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ client.monitor.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ def test_method_update(self, client: Parallel) -> None:
+ monitor = client.monitor.update(
+ monitor_id="monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_method_update_with_all_params(self, client: Parallel) -> None:
+ monitor = client.monitor.update(
+ monitor_id="monitor_id",
+ frequency="1h",
+ metadata={
+ "slack_thread_id": "1234567890.123456",
+ "user_id": "U123ABC",
+ },
+ settings={
+ "advanced_settings": {
+ "location": "us",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ }
+ },
+ type="event_stream",
+ webhook={
+ "url": "https://example.com/webhook",
+ "event_types": ["monitor.event.detected"],
+ },
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_update(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.update(
+ monitor_id="monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_update(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.update(
+ monitor_id="monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_update(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ client.monitor.with_raw_response.update(
+ monitor_id="",
+ )
+
+ @parametrize
+ def test_method_list(self, client: Parallel) -> None:
+ monitor = client.monitor.list()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ def test_method_list_with_all_params(self, client: Parallel) -> None:
+ monitor = client.monitor.list(
+ cursor="cursor",
+ limit=1,
+ status=["active"],
+ type=["event_stream"],
+ )
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_list(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_list(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_method_cancel(self, client: Parallel) -> None:
+ monitor = client.monitor.cancel(
+ "monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_cancel(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.cancel(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_cancel(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.cancel(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_cancel(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ client.monitor.with_raw_response.cancel(
+ "",
+ )
+
+ @parametrize
+ def test_method_events(self, client: Parallel) -> None:
+ monitor = client.monitor.events(
+ monitor_id="monitor_id",
+ )
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ def test_method_events_with_all_params(self, client: Parallel) -> None:
+ monitor = client.monitor.events(
+ monitor_id="monitor_id",
+ cursor="cursor",
+ event_group_id="event_group_id",
+ include_completions=True,
+ limit=1,
+ )
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ def test_raw_response_events(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.events(
+ monitor_id="monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ def test_streaming_response_events(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.events(
+ monitor_id="monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_events(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ client.monitor.with_raw_response.events(
+ monitor_id="",
+ )
+
+ @parametrize
+ def test_method_trigger(self, client: Parallel) -> None:
+ monitor = client.monitor.trigger(
+ "monitor_id",
+ )
+ assert monitor is None
+
+ @parametrize
+ def test_raw_response_trigger(self, client: Parallel) -> None:
+ response = client.monitor.with_raw_response.trigger(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = response.parse()
+ assert monitor is None
+
+ @parametrize
+ def test_streaming_response_trigger(self, client: Parallel) -> None:
+ with client.monitor.with_streaming_response.trigger(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = response.parse()
+ assert monitor is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_trigger(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ client.monitor.with_raw_response.trigger(
+ "",
+ )
+
+
+class TestAsyncMonitor:
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
+
+ @parametrize
+ async def test_method_create(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.create(
+ frequency="1h",
+ settings={
+ "query": "Extract recent news about AI",
+ "advanced_settings": {
+ "location": "us",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ },
+ "include_backfill": True,
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ },
+ type="event_stream",
+ metadata={
+ "slack_thread_id": "1234567890.123456",
+ "user_id": "U123ABC",
+ },
+ processor="lite",
+ webhook={
+ "url": "https://example.com/webhook",
+ "event_types": ["monitor.event.detected"],
+ },
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_create(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_create(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.create(
+ frequency="1h",
+ settings={"query": "Extract recent news about AI"},
+ type="event_stream",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_method_retrieve(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.retrieve(
+ "monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.retrieve(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.retrieve(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ await async_client.monitor.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ async def test_method_update(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.update(
+ monitor_id="monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_method_update_with_all_params(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.update(
+ monitor_id="monitor_id",
+ frequency="1h",
+ metadata={
+ "slack_thread_id": "1234567890.123456",
+ "user_id": "U123ABC",
+ },
+ settings={
+ "advanced_settings": {
+ "location": "us",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ }
+ },
+ type="event_stream",
+ webhook={
+ "url": "https://example.com/webhook",
+ "event_types": ["monitor.event.detected"],
+ },
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_update(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.update(
+ monitor_id="monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_update(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.update(
+ monitor_id="monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_update(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ await async_client.monitor.with_raw_response.update(
+ monitor_id="",
+ )
+
+ @parametrize
+ async def test_method_list(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.list()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ async def test_method_list_with_all_params(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.list(
+ cursor="cursor",
+ limit=1,
+ status=["active"],
+ type=["event_stream"],
+ )
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_list(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.list()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_list(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.list() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(PaginatedMonitorResponse, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_method_cancel(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.cancel(
+ "monitor_id",
+ )
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_cancel(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.cancel(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_cancel(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.cancel(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(Monitor, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_cancel(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ await async_client.monitor.with_raw_response.cancel(
+ "",
+ )
+
+ @parametrize
+ async def test_method_events(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.events(
+ monitor_id="monitor_id",
+ )
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.events(
+ monitor_id="monitor_id",
+ cursor="cursor",
+ event_group_id="event_group_id",
+ include_completions=True,
+ limit=1,
+ )
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ async def test_raw_response_events(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.events(
+ monitor_id="monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_events(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.events(
+ monitor_id="monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert_matches_type(PaginatedMonitorEvents, monitor, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_events(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ await async_client.monitor.with_raw_response.events(
+ monitor_id="",
+ )
+
+ @parametrize
+ async def test_method_trigger(self, async_client: AsyncParallel) -> None:
+ monitor = await async_client.monitor.trigger(
+ "monitor_id",
+ )
+ assert monitor is None
+
+ @parametrize
+ async def test_raw_response_trigger(self, async_client: AsyncParallel) -> None:
+ response = await async_client.monitor.with_raw_response.trigger(
+ "monitor_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ monitor = await response.parse()
+ assert monitor is None
+
+ @parametrize
+ async def test_streaming_response_trigger(self, async_client: AsyncParallel) -> None:
+ async with async_client.monitor.with_streaming_response.trigger(
+ "monitor_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ monitor = await response.parse()
+ assert monitor is None
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_trigger(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `monitor_id` but received ''"):
+ await async_client.monitor.with_raw_response.trigger(
+ "",
+ )
diff --git a/tests/api_resources/test_task_group.py b/tests/api_resources/test_task_group.py
new file mode 100644
index 0000000..29ae421
--- /dev/null
+++ b/tests/api_resources/test_task_group.py
@@ -0,0 +1,699 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+import os
+from typing import Any, cast
+
+import pytest
+
+from parallel import Parallel, AsyncParallel
+from tests.utils import assert_matches_type
+from parallel.types import (
+ TaskRun,
+ TaskGroup,
+ TaskGroupRunResponse,
+)
+from parallel._utils import parse_date
+
+base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
+
+
+class TestTaskGroup:
+ parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+
+ @parametrize
+ def test_method_create(self, client: Parallel) -> None:
+ task_group = client.task_group.create()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ def test_method_create_with_all_params(self, client: Parallel) -> None:
+ task_group = client.task_group.create(
+ metadata={"foo": "string"},
+ )
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ def test_raw_response_create(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.create()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ def test_streaming_response_create(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.create() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_method_retrieve(self, client: Parallel) -> None:
+ task_group = client.task_group.retrieve(
+ "taskgroup_id",
+ )
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.retrieve(
+ "taskgroup_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.retrieve(
+ "taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.task_group.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ def test_method_add_runs(self, client: Parallel) -> None:
+ task_group = client.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ def test_method_add_runs_with_all_params(self, client: Parallel) -> None:
+ task_group = client.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ "advanced_settings": {"location": "us"},
+ "enable_events": True,
+ "mcp_servers": [
+ {
+ "name": "name",
+ "url": "url",
+ "allowed_tools": ["string"],
+ "headers": {"foo": "string"},
+ "type": "url",
+ }
+ ],
+ "metadata": {"foo": "string"},
+ "previous_interaction_id": "previous_interaction_id",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ "task_spec": {
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ "input_schema": "string",
+ },
+ "webhook": {
+ "url": "url",
+ "event_types": ["task_run.status"],
+ },
+ }
+ ],
+ refresh_status=True,
+ default_task_spec={
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ "input_schema": "string",
+ },
+ betas=["mcp-server-2025-07-17"],
+ )
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ def test_raw_response_add_runs(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ def test_streaming_response_add_runs(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_add_runs(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.task_group.with_raw_response.add_runs(
+ task_group_id="",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
+ @parametrize
+ def test_method_events(self, client: Parallel) -> None:
+ task_group_stream = client.task_group.events(
+ task_group_id="taskgroup_id",
+ )
+ task_group_stream.response.close()
+
+ @parametrize
+ def test_method_events_with_all_params(self, client: Parallel) -> None:
+ task_group_stream = client.task_group.events(
+ task_group_id="taskgroup_id",
+ last_event_id="last_event_id",
+ api_timeout=0,
+ )
+ task_group_stream.response.close()
+
+ @parametrize
+ def test_raw_response_events(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.events(
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = response.parse()
+ stream.close()
+
+ @parametrize
+ def test_streaming_response_events(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.events(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = response.parse()
+ stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_events(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.task_group.with_raw_response.events(
+ task_group_id="",
+ )
+
+ @parametrize
+ def test_method_get_runs(self, client: Parallel) -> None:
+ task_group_stream = client.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ )
+ task_group_stream.response.close()
+
+ @parametrize
+ def test_method_get_runs_with_all_params(self, client: Parallel) -> None:
+ task_group_stream = client.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ include_input=True,
+ include_output=True,
+ last_event_id="last_event_id",
+ status="queued",
+ )
+ task_group_stream.response.close()
+
+ @parametrize
+ def test_raw_response_get_runs(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.get_runs(
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = response.parse()
+ stream.close()
+
+ @parametrize
+ def test_streaming_response_get_runs(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.get_runs(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = response.parse()
+ stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_get_runs(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.task_group.with_raw_response.get_runs(
+ task_group_id="",
+ )
+
+ @parametrize
+ def test_method_retrieve_run(self, client: Parallel) -> None:
+ task_group = client.task_group.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ )
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve_run(self, client: Parallel) -> None:
+ response = client.task_group.with_raw_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = response.parse()
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve_run(self, client: Parallel) -> None:
+ with client.task_group.with_streaming_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = response.parse()
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve_run(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ client.task_group.with_raw_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
+ client.task_group.with_raw_response.retrieve_run(
+ run_id="",
+ task_group_id="taskgroup_id",
+ )
+
+
+class TestAsyncTaskGroup:
+ parametrize = pytest.mark.parametrize(
+ "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
+ )
+
+ @parametrize
+ async def test_method_create(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.create()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ async def test_method_create_with_all_params(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.create(
+ metadata={"foo": "string"},
+ )
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ async def test_raw_response_create(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.create()
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_create(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.create() as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_method_retrieve(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.retrieve(
+ "taskgroup_id",
+ )
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.retrieve(
+ "taskgroup_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.retrieve(
+ "taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = await response.parse()
+ assert_matches_type(TaskGroup, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.task_group.with_raw_response.retrieve(
+ "",
+ )
+
+ @parametrize
+ async def test_method_add_runs(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ async def test_method_add_runs_with_all_params(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ "advanced_settings": {"location": "us"},
+ "enable_events": True,
+ "mcp_servers": [
+ {
+ "name": "name",
+ "url": "url",
+ "allowed_tools": ["string"],
+ "headers": {"foo": "string"},
+ "type": "url",
+ }
+ ],
+ "metadata": {"foo": "string"},
+ "previous_interaction_id": "previous_interaction_id",
+ "source_policy": {
+ "after_date": parse_date("2024-01-01"),
+ "exclude_domains": ["reddit.com", "x.com", ".ai"],
+ "include_domains": ["wikipedia.org", "usa.gov", ".edu"],
+ },
+ "task_spec": {
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ "input_schema": "string",
+ },
+ "webhook": {
+ "url": "url",
+ "event_types": ["task_run.status"],
+ },
+ }
+ ],
+ refresh_status=True,
+ default_task_spec={
+ "output_schema": {
+ "json_schema": {
+ "additionalProperties": "bar",
+ "properties": "bar",
+ "required": "bar",
+ "type": "bar",
+ },
+ "type": "json",
+ },
+ "input_schema": "string",
+ },
+ betas=["mcp-server-2025-07-17"],
+ )
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ async def test_raw_response_add_runs(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = await response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_add_runs(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.add_runs(
+ task_group_id="taskgroup_id",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = await response.parse()
+ assert_matches_type(TaskGroupRunResponse, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_add_runs(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.task_group.with_raw_response.add_runs(
+ task_group_id="",
+ inputs=[
+ {
+ "input": "What was the GDP of France in 2023?",
+ "processor": "base",
+ }
+ ],
+ )
+
+ @parametrize
+ async def test_method_events(self, async_client: AsyncParallel) -> None:
+ task_group_stream = await async_client.task_group.events(
+ task_group_id="taskgroup_id",
+ )
+ await task_group_stream.response.aclose()
+
+ @parametrize
+ async def test_method_events_with_all_params(self, async_client: AsyncParallel) -> None:
+ task_group_stream = await async_client.task_group.events(
+ task_group_id="taskgroup_id",
+ last_event_id="last_event_id",
+ api_timeout=0,
+ )
+ await task_group_stream.response.aclose()
+
+ @parametrize
+ async def test_raw_response_events(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.events(
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = await response.parse()
+ await stream.close()
+
+ @parametrize
+ async def test_streaming_response_events(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.events(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = await response.parse()
+ await stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_events(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.task_group.with_raw_response.events(
+ task_group_id="",
+ )
+
+ @parametrize
+ async def test_method_get_runs(self, async_client: AsyncParallel) -> None:
+ task_group_stream = await async_client.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ )
+ await task_group_stream.response.aclose()
+
+ @parametrize
+ async def test_method_get_runs_with_all_params(self, async_client: AsyncParallel) -> None:
+ task_group_stream = await async_client.task_group.get_runs(
+ task_group_id="taskgroup_id",
+ include_input=True,
+ include_output=True,
+ last_event_id="last_event_id",
+ status="queued",
+ )
+ await task_group_stream.response.aclose()
+
+ @parametrize
+ async def test_raw_response_get_runs(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.get_runs(
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ stream = await response.parse()
+ await stream.close()
+
+ @parametrize
+ async def test_streaming_response_get_runs(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.get_runs(
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ stream = await response.parse()
+ await stream.close()
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_get_runs(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.task_group.with_raw_response.get_runs(
+ task_group_id="",
+ )
+
+ @parametrize
+ async def test_method_retrieve_run(self, async_client: AsyncParallel) -> None:
+ task_group = await async_client.task_group.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ )
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve_run(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_group.with_raw_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_group = await response.parse()
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve_run(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_group.with_streaming_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="taskgroup_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_group = await response.parse()
+ assert_matches_type(TaskRun, task_group, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve_run(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_group_id` but received ''"):
+ await async_client.task_group.with_raw_response.retrieve_run(
+ run_id="run_id",
+ task_group_id="",
+ )
+
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
+ await async_client.task_group.with_raw_response.retrieve_run(
+ run_id="",
+ task_group_id="taskgroup_id",
+ )
diff --git a/tests/api_resources/test_task_run.py b/tests/api_resources/test_task_run.py
index 4bb9bd4..a41628a 100644
--- a/tests/api_resources/test_task_run.py
+++ b/tests/api_resources/test_task_run.py
@@ -11,6 +11,7 @@
from tests.utils import assert_matches_type
from parallel.types import (
TaskRun,
+ RunInput,
TaskRunResult,
)
from parallel._utils import parse_date
@@ -220,6 +221,44 @@ def test_path_params_result(self, client: Parallel) -> None:
run_id="",
)
+ @parametrize
+ def test_method_retrieve_input(self, client: Parallel) -> None:
+ task_run = client.task_run.retrieve_input(
+ "run_id",
+ )
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve_input(self, client: Parallel) -> None:
+ response = client.task_run.with_raw_response.retrieve_input(
+ "run_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_run = response.parse()
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve_input(self, client: Parallel) -> None:
+ with client.task_run.with_streaming_response.retrieve_input(
+ "run_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_run = response.parse()
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve_input(self, client: Parallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
+ client.task_run.with_raw_response.retrieve_input(
+ "",
+ )
+
class TestAsyncTaskRun:
parametrize = pytest.mark.parametrize(
@@ -424,3 +463,41 @@ async def test_path_params_result(self, async_client: AsyncParallel) -> None:
await async_client.task_run.with_raw_response.result(
run_id="",
)
+
+ @parametrize
+ async def test_method_retrieve_input(self, async_client: AsyncParallel) -> None:
+ task_run = await async_client.task_run.retrieve_input(
+ "run_id",
+ )
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve_input(self, async_client: AsyncParallel) -> None:
+ response = await async_client.task_run.with_raw_response.retrieve_input(
+ "run_id",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ task_run = await response.parse()
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve_input(self, async_client: AsyncParallel) -> None:
+ async with async_client.task_run.with_streaming_response.retrieve_input(
+ "run_id",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ task_run = await response.parse()
+ assert_matches_type(RunInput, task_run, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve_input(self, async_client: AsyncParallel) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"):
+ await async_client.task_run.with_raw_response.retrieve_input(
+ "",
+ )
diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py
index d9bb4c0..61b6cde 100644
--- a/tests/test_extract_files.py
+++ b/tests/test_extract_files.py
@@ -4,7 +4,7 @@
import pytest
-from parallel._types import FileTypes
+from parallel._types import FileTypes, ArrayFormat
from parallel._utils import extract_files
@@ -37,10 +37,7 @@ def test_multiple_files() -> None:
def test_top_level_file_array() -> None:
query = {"files": [b"file one", b"file two"], "title": "hello"}
- assert extract_files(query, paths=[["files", ""]]) == [
- ("files[]", b"file one"),
- ("files[]", b"file two"),
- ]
+ assert extract_files(query, paths=[["files", ""]]) == [("files[]", b"file one"), ("files[]", b"file two")]
assert query == {"title": "hello"}
@@ -71,3 +68,24 @@ def test_ignores_incorrect_paths(
expected: list[tuple[str, FileTypes]],
) -> None:
assert extract_files(query, paths=paths) == expected
+
+
+@pytest.mark.parametrize(
+ "array_format,expected_top_level,expected_nested",
+ [
+ ("brackets", [("files[]", b"a"), ("files[]", b"b")], [("items[][file]", b"a"), ("items[][file]", b"b")]),
+ ("repeat", [("files", b"a"), ("files", b"b")], [("items[file]", b"a"), ("items[file]", b"b")]),
+ ("comma", [("files", b"a"), ("files", b"b")], [("items[file]", b"a"), ("items[file]", b"b")]),
+ ("indices", [("files[0]", b"a"), ("files[1]", b"b")], [("items[0][file]", b"a"), ("items[1][file]", b"b")]),
+ ],
+)
+def test_array_format_controls_file_field_names(
+ array_format: ArrayFormat,
+ expected_top_level: list[tuple[str, FileTypes]],
+ expected_nested: list[tuple[str, FileTypes]],
+) -> None:
+ top_level = {"files": [b"a", b"b"]}
+ assert extract_files(top_level, paths=[["files", ""]], array_format=array_format) == expected_top_level
+
+ nested = {"items": [{"file": b"a"}, {"file": b"b"}]}
+ assert extract_files(nested, paths=[["items", "", "file"]], array_format=array_format) == expected_nested
diff --git a/tests/test_files.py b/tests/test_files.py
index f488947..1388074 100644
--- a/tests/test_files.py
+++ b/tests/test_files.py
@@ -131,7 +131,7 @@ def test_extract_files_does_not_mutate_original_nested_array_path(self) -> None:
copied = deepcopy_with_paths(original, [["items", "", "file"]])
extracted = extract_files(copied, paths=[["items", "", "file"]])
- assert extracted == [("items[][file]", file1), ("items[][file]", file2)]
+ assert [entry for _, entry in extracted] == [file1, file2]
assert original == {
"items": [
{"file": file1, "extra": 1},