Skip to content
15 changes: 15 additions & 0 deletions inference/core/entities/responses/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,20 @@ class ExternalOperationDescription(BaseModel):
nested_operation_output_kind: Optional[List[str]] = None
description: Optional[str] = None

property_name_options: Optional[List[str]] = Field(
default=None,
description=(
"List of possible property names. \
Optional parameter for operations extracting property values from data. "
),
examples=[
"size",
"height",
"width",
"aspect_ratio",
],
)

@classmethod
def from_internal_entity(
cls, operation_description: OperationDescription
Expand All @@ -82,6 +96,7 @@ def from_internal_entity(
nested_operation_input_kind=nested_operation_input_kind,
nested_operation_output_kind=nested_operation_output_kind,
description=operation_description.description,
property_name_options=operation_description.property_name_options,
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
VELOCITY_KEY_IN_SV_DETECTIONS,
)
from inference.core.workflows.execution_engine.constants import (
AREA_CONVERTED_KEY_IN_SV_DETECTIONS,
AREA_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_ANGLE_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_HEIGHT_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_RECT_KEY_IN_SV_DETECTIONS,
Expand Down Expand Up @@ -79,6 +81,8 @@ class DetectionsProperty(Enum):
BOUNDING_RECT_WIDTH = BOUNDING_RECT_WIDTH_KEY_IN_SV_DETECTIONS
BOUNDING_RECT_HEIGHT = BOUNDING_RECT_HEIGHT_KEY_IN_SV_DETECTIONS
BOUNDING_RECT_ANGLE = BOUNDING_RECT_ANGLE_KEY_IN_SV_DETECTIONS
AREA = AREA_KEY_IN_SV_DETECTIONS
AREA_CONVERTED = AREA_CONVERTED_KEY_IN_SV_DETECTIONS


class DetectionsSortProperties(Enum):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import List, Optional

from pydantic import BaseModel
from pydantic import BaseModel, Field

from inference.core.workflows.execution_engine.entities.types import Kind

Expand All @@ -14,6 +14,20 @@ class OperationDescription(BaseModel):
nested_operation_output_kind: Optional[List[Kind]] = None
description: Optional[str] = None

property_name_options: Optional[List[str]] = Field(
default=None,
description=(
"List of possible property names. \
Optional parameter for operations extracting property values from data. "
),
examples=[
"size",
"height",
"width",
"aspect_ratio",
],
)


class OperatorDescription(BaseModel):
operator_type: str
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List
from typing import List, Optional

from inference.core.workflows.core_steps.common.query_language.entities.introspection import (
OperationDescription,
Expand All @@ -25,21 +25,57 @@
)


def _ref_to_def_name(
schema: dict,
ref_key: str = "$ref",
) -> Optional[str]:
"""Extract definition name from a JSON Schema object's $ref (e.g. {'$ref': '#/$defs/Foo'} -> 'Foo')."""
ref = schema.get(ref_key)
if ref is None:
return None

return ref.split("/")[-1]


def _get_property_name_options(
type_definition: dict,
defs: dict,
) -> Optional[List[str]]:
"""Resolve property_name enum from operation schema when present (e.g. DetectionsProperty)."""
properties = type_definition.get("properties") or {}
property_name_schema = properties.get("property_name")

if not property_name_schema:
return None

ref_name = _ref_to_def_name(property_name_schema)
if ref_name is not None:
resolved = defs.get(ref_name)
if resolved and "enum" in resolved:
return list(resolved["enum"])

if "enum" in property_name_schema:
return list(property_name_schema["enum"])

return None


def prepare_operations_descriptions() -> List[OperationDescription]:
operations_chain_schema = OperationsChain.schema()
operations_chain_schema = OperationsChain.model_json_schema()
defs = operations_chain_schema.get("$defs", {})
operation_type_definitions = operations_chain_schema["properties"]["operations"]["items"]["oneOf"] # fmt: skip

operation_types = [
type_definition["$ref"].split("/")[-1]
for type_definition in operations_chain_schema["properties"]["operations"][
"items"
]["oneOf"]
]
type_definitions = [
operations_chain_schema["$defs"][operation_type]
for operation_type in operation_types
name
for type_definition in operation_type_definitions
if (name := _ref_to_def_name(type_definition)) is not None
]
type_definitions = [defs[operation_type] for operation_type in operation_types]

result = []
for type_definition in type_definitions:
# TODO: fix simplification of type getter
property_name_options = _get_property_name_options(type_definition, defs)

result.append(
OperationDescription(
operation_type=type_definition["properties"]["type"]["const"],
Expand All @@ -52,6 +88,7 @@ def prepare_operations_descriptions() -> List[OperationDescription]:
nested_operation_output_kind=type_definition.get(
"nested_operation_output_kind"
),
property_name_options=property_name_options,
)
)
return result
Expand All @@ -78,7 +115,7 @@ def prepare_operators_descriptions() -> List[OperatorDescription]:
]
results = []
for operator_type in operator_types:
operator_schema = operator_type.schema()
operator_schema = operator_type.model_json_schema()
for alias in operator_schema["properties"]["type"].get("enum", []):
results.append(
OperatorDescription(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
safe_stringify,
)
from inference.core.workflows.execution_engine.constants import (
AREA_CONVERTED_KEY_IN_SV_DETECTIONS,
AREA_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_ANGLE_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_HEIGHT_KEY_IN_SV_DETECTIONS,
BOUNDING_RECT_RECT_KEY_IN_SV_DETECTIONS,
Expand Down Expand Up @@ -84,6 +86,10 @@
DetectionsProperty.BOUNDING_RECT_ANGLE: lambda x: x[5].get(
BOUNDING_RECT_ANGLE_KEY_IN_SV_DETECTIONS
),
DetectionsProperty.AREA: lambda x: x[5].get(AREA_KEY_IN_SV_DETECTIONS),
DetectionsProperty.AREA_CONVERTED: lambda x: x[5].get(
AREA_CONVERTED_KEY_IN_SV_DETECTIONS
),
}


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import numpy as np

from inference.core.workflows.core_steps.common.query_language.entities.enums import (
DetectionsProperty,
)
from inference.core.workflows.core_steps.common.query_language.operations.detection.base import (
extract_detection_property,
)
from inference.core.workflows.execution_engine.constants import (
AREA_CONVERTED_KEY_IN_SV_DETECTIONS,
AREA_KEY_IN_SV_DETECTIONS,
)


def test_extract_detection_property_with_area_px() -> None:
# given
detection = (
np.array([0.0, 0.0, 50.0, 50.0], dtype=np.float32),
None,
np.float32(0.9),
np.int64(1),
None,
{
"class_name": np.array("leaf"),
AREA_KEY_IN_SV_DETECTIONS: np.float32(400.0),
AREA_CONVERTED_KEY_IN_SV_DETECTIONS: np.float32(4.0),
},
)

# when
result = extract_detection_property(
value=detection,
property_name=DetectionsProperty.AREA,
execution_context="<test>",
)

# then
assert result == 400.0


def test_extract_detection_property_with_area_converted() -> None:
# given
detection = (
np.array([0.0, 0.0, 50.0, 50.0], dtype=np.float32),
None,
np.float32(0.9),
np.int64(1),
None,
{
"class_name": np.array("leaf"),
AREA_KEY_IN_SV_DETECTIONS: np.float32(400.0),
AREA_CONVERTED_KEY_IN_SV_DETECTIONS: np.float32(4.0),
},
)

# when
result = extract_detection_property(
value=detection,
property_name=DetectionsProperty.AREA_CONVERTED,
execution_context="<test>",
)

# then
assert result == 4.0
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
"""Tests for UQL operations introspection (prepare_operations_descriptions)."""

from inference.core.workflows.core_steps.common.query_language.introspection.core import (
prepare_operations_descriptions,
)


def test_detections_property_extract_includes_property_name_options_with_area_fields():
"""Describe endpoint UQL operations must include area_px and area_converted for DetectionsPropertyExtract."""
operations = prepare_operations_descriptions()
detections_extract = next(
(op for op in operations if op.operation_type == "DetectionsPropertyExtract"),
None,
)
assert detections_extract is not None
assert detections_extract.property_name_options is not None
assert "area_px" in detections_extract.property_name_options
assert "area_converted" in detections_extract.property_name_options
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@
from inference.core.workflows.core_steps.formatters.property_definition.v1 import (
PropertyDefinitionBlockV1,
)
from inference.core.workflows.execution_engine.constants import (
AREA_CONVERTED_KEY_IN_SV_DETECTIONS,
AREA_KEY_IN_SV_DETECTIONS,
)


def test_property_extraction_block() -> None:
Expand Down Expand Up @@ -171,3 +175,61 @@ def test_property_extraction_block_with_bottom_right() -> None:

# then
assert result == {"output": [[30, 40], [50, 60]]}


def test_property_extraction_block_with_area_px() -> None:
# given
detections = sv.Detections(
xyxy=np.array([[10, 20, 30, 40], [30, 40, 50, 60]], dtype=np.int32),
class_id=np.array([0, 1], dtype=np.int32),
confidence=np.array([0.6, 0.4], dtype=np.float32),
data={AREA_KEY_IN_SV_DETECTIONS: np.array([400.0, 1000.0], dtype=np.float32)},
)
operations = OperationsChain.model_validate(
{
"operations": [
{
"type": "DetectionsPropertyExtract",
"property_name": AREA_KEY_IN_SV_DETECTIONS,
}
]
}
).operations
step = PropertyDefinitionBlockV1()

# when
result = step.run(data=detections, operations=operations)

# then
assert result == {"output": [400.0, 1000.0]}


def test_property_extraction_block_with_area_converted() -> None:
# given
detections = sv.Detections(
xyxy=np.array([[10, 20, 30, 40], [30, 40, 50, 60]], dtype=np.int32),
class_id=np.array([0, 1], dtype=np.int32),
confidence=np.array([0.6, 0.4], dtype=np.float32),
data={
AREA_CONVERTED_KEY_IN_SV_DETECTIONS: np.array(
[4.0, 10.0], dtype=np.float32
)
},
)
operations = OperationsChain.model_validate(
{
"operations": [
{
"type": "DetectionsPropertyExtract",
"property_name": AREA_CONVERTED_KEY_IN_SV_DETECTIONS,
}
]
}
).operations
step = PropertyDefinitionBlockV1()

# when
result = step.run(data=detections, operations=operations)

# then
assert result == {"output": [4.0, 10.0]}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
"""Regression tests for workflow schema containing DetectionsProperty enum.

Ensures the schema returned by get_workflow_schema_description() includes
area_px and area_converted in the DetectionsProperty enum (used by
DetectionsPropertyExtract and other UQL operations).
"""

import pytest

from inference.core.workflows.execution_engine.v1.compiler import syntactic_parser


@pytest.fixture(autouse=True)
def clear_schema_cache():
"""Clear schema cache so schema is rebuilt from current block/enum definitions."""
syntactic_parser.clear_cache()
yield
syntactic_parser.clear_cache()


def test_workflow_schema_includes_area_px_and_area_converted_in_detections_property():
"""DetectionsProperty enum in schema must include area_px and area_converted."""
from inference.core.workflows.execution_engine.v1.compiler.syntactic_parser import (
get_workflow_schema_description,
)

desc = get_workflow_schema_description()
schema = desc.schema
defs = schema.get("$defs", schema.get("definitions", {}))

detections_property_schema = defs.get("DetectionsProperty")
assert detections_property_schema is not None, (
"Schema should define DetectionsProperty (e.g. under $defs)"
)
enum_values = detections_property_schema.get("enum", [])
assert "area_px" in enum_values, (
"DetectionsProperty enum in workflow schema must include 'area_px' "
"(from DetectionsProperty.AREA)"
)
assert "area_converted" in enum_values, (
"DetectionsProperty enum in workflow schema must include 'area_converted' "
"(from DetectionsProperty.AREA_CONVERTED)"
)
Loading