diff --git a/packages/uipath-platform/pyproject.toml b/packages/uipath-platform/pyproject.toml index 99c6d16ab..342b22d83 100644 --- a/packages/uipath-platform/pyproject.toml +++ b/packages/uipath-platform/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uipath-platform" -version = "0.1.46" +version = "0.1.47" description = "HTTP client library for programmatic access to UiPath Platform" readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.11" diff --git a/packages/uipath-platform/src/uipath/platform/entities/__init__.py b/packages/uipath-platform/src/uipath/platform/entities/__init__.py index aca80997b..63069d1d6 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/__init__.py +++ b/packages/uipath-platform/src/uipath/platform/entities/__init__.py @@ -8,17 +8,34 @@ ChoiceSetValue, DataFabricEntityItem, Entity, + EntityAggregate, + EntityAggregateFunction, + EntityBinning, + EntityCreateFieldOptions, + EntityCreateOptions, EntityField, + EntityFieldDataType, EntityFieldMetadata, + EntityImportRecordsResponse, + EntityJoin, + EntityMetadataUpdateOptions, + EntityQueryFilter, + EntityQueryFilterGroup, + EntityQueryRecordsResponse, + EntityQuerySortOption, EntityRecord, EntityRecordsBatchResponse, + EntityRecordsListResponse, EntityRouting, EntitySetResolution, ExternalField, ExternalObject, ExternalSourceFields, + FailureRecord, FieldDataType, FieldMetadata, + LogicalOperator, + QueryFilterOperator, QueryRoutingOverrideContext, ReferenceType, SourceJoinCriteria, @@ -29,17 +46,34 @@ "DataFabricEntityItem", "EntitiesService", "Entity", + "EntityAggregate", + "EntityAggregateFunction", + "EntityBinning", + "EntityCreateFieldOptions", + "EntityCreateOptions", "EntityField", - "EntityRecord", + "EntityFieldDataType", "EntityFieldMetadata", + "EntityImportRecordsResponse", + "EntityJoin", + "EntityMetadataUpdateOptions", + "EntityQueryFilter", + "EntityQueryFilterGroup", + "EntityQueryRecordsResponse", + "EntityQuerySortOption", + "EntityRecord", + "EntityRecordsBatchResponse", + "EntityRecordsListResponse", "EntityRouting", "EntitySetResolution", - "FieldDataType", - "FieldMetadata", - "EntityRecordsBatchResponse", "ExternalField", "ExternalObject", "ExternalSourceFields", + "FailureRecord", + "FieldDataType", + "FieldMetadata", + "LogicalOperator", + "QueryFilterOperator", "QueryRoutingOverrideContext", "ReferenceType", "SourceJoinCriteria", diff --git a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py index 951a4b07b..3f0157da8 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py +++ b/packages/uipath-platform/src/uipath/platform/entities/_entities_service.py @@ -1,9 +1,13 @@ import json as json_module import logging -from typing import Any, Dict, List, Optional, Type +import re +from contextlib import nullcontext +from pathlib import Path +from typing import Any, Dict, List, Optional, Type, Union import sqlparse -from httpx import Response +from httpx import HTTPStatusError, Response +from pydantic import BaseModel from sqlparse.sql import Function, Identifier, IdentifierList, Parenthesis, Where from sqlparse.tokens import DML, Keyword, Whitespace, Wildcard from uipath.core.tracing import traced @@ -14,6 +18,7 @@ from ..common._execution_context import UiPathExecutionContext from ..common._models import Endpoint, RequestSpec from ..common.constants import HEADER_FOLDER_KEY +from ..errors._enriched_exception import EnrichedException from ..orchestrator._folder_service import FolderService from ._entity_resolution import ( RoutingStrategy, @@ -25,15 +30,39 @@ fetch_resolved_entities_async, ) from .entities import ( + ENTITY_FIELD_CONSTRAINT_DEFAULTS, + ENTITY_FIELD_CONSTRAINT_SPEC, + ENTITY_SCHEMA_FIELD_TYPE_MAP, + RESERVED_FIELD_NAMES, ChoiceSetValue, DataFabricEntityItem, Entity, + EntityAggregate, + EntityBinning, + EntityCreateFieldOptions, + EntityCreateOptions, + EntityFieldDataType, + EntityImportRecordsResponse, + EntityJoin, + EntityMetadataUpdateOptions, + EntityQueryFilterGroup, + EntityQueryRecordsResponse, + EntityQuerySortOption, EntityRecord, EntityRecordsBatchResponse, + EntityRecordsListResponse, EntitySetResolution, QueryRoutingOverrideContext, ) +DATA_FABRIC_TENANT_FOLDER_ID = "00000000-0000-0000-0000-000000000000" + +FileContent = Union[bytes, bytearray, memoryview] +"""Acceptable raw bytes types for attachment/CSV uploads.""" + +_NAME_RE = re.compile(r"^[a-zA-Z]\w*$") +"""Entity and field name pattern: must start with a letter, then letters/digits/underscores.""" + logger = logging.getLogger(__name__) _FORBIDDEN_DML = {"INSERT", "UPDATE", "DELETE", "MERGE", "REPLACE"} @@ -205,6 +234,10 @@ async def retrieve_by_name_async( def list_entities(self) -> List[Entity]: """List all entities in Data Service. + Note: + This call returns regular entities only — choice sets are excluded. + Use :meth:`list_choicesets` to list choice set entities. + Returns: List[Entity]: A list of all entities with their metadata and field definitions. Each entity includes name, display name, fields, record count, and storage information. @@ -248,6 +281,10 @@ def list_entities(self) -> List[Entity]: async def list_entities_async(self) -> List[Entity]: """Asynchronously list all entities in the Data Service. + Note: + This call returns regular entities only — choice sets are excluded. + Use :meth:`list_choicesets_async` to list choice set entities. + Returns: List[Entity]: A list of all entities with their metadata and field definitions. Each entity includes name, display name, fields, record count, and storage information. @@ -387,7 +424,12 @@ def list_records( schema: Optional[Type[Any]] = None, # Optional schema start: Optional[int] = None, limit: Optional[int] = None, - ) -> List[EntityRecord]: + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: """List records from an entity with optional pagination and schema validation. The schema parameter enables type-safe access to entity records by validating the @@ -424,11 +466,16 @@ class CustomerRecord: start (Optional[int]): Starting index for pagination (0-based). limit (Optional[int]): Maximum number of records to return. + expansion_level (Optional[int]): Depth of foreign-key expansion (0 = none). + filter (Optional[str]): OData ``$filter`` expression (e.g. ``"status eq 'active'"``). + orderby (Optional[str]): OData ``$orderby`` expression (e.g. ``"name asc"``). + select (Optional[List[str]]): Field names for column projection (``$select``). + expand (Optional[List[str]]): Relationship names to expand (``$expand``). Returns: - List[EntityRecord]: A list of entity records. Each record contains an 'id' field - and all other fields from the entity. Fields can be accessed as attributes - or dictionary keys on the EntityRecord object. + EntityRecordsListResponse: A list-compatible response with ``total_count``, + ``has_next_page``, and ``next_cursor`` attributes for pagination metadata. + Iteration, indexing, and ``len()`` continue to work. Raises: ValueError: If schema validation fails for any record, including cases where @@ -447,6 +494,18 @@ class CustomerRecord: # Get first 50 records records = entities_service.list_records("Customers", start=0, limit=50) + With OData filtering, sorting, and expansion:: + + records = entities_service.list_records( + "Customers", + filter="status eq 'active'", + orderby="created_at desc", + select=["name", "email"], + expand=["company"], + expansion_level=1, + ) + print(f"Total matching: {records.total_count}") + With schema validation:: class CustomerRecord: @@ -465,19 +524,19 @@ class CustomerRecord: for record in records: print(f"{record.name}: {record.email}") """ - # Example method to generate the API request specification (mocked here) - spec = self._list_records_spec(entity_key, start, limit) - - # Make the HTTP request (assumes self.request exists) + spec = self._list_records_spec( + entity_key, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, + ) response = self.request(spec.method, spec.endpoint, params=spec.params) - # Parse the response JSON and extract the "value" field - records_data = response.json().get("value", []) - - # Validate and wrap records - return [ - EntityRecord.from_data(data=record, model=schema) for record in records_data - ] + return self._build_records_list_response(response, schema, start, limit) @traced(name="entity_list_records", run_type="uipath") async def list_records_async( @@ -486,7 +545,12 @@ async def list_records_async( schema: Optional[Type[Any]] = None, # Optional schema start: Optional[int] = None, limit: Optional[int] = None, - ) -> List[EntityRecord]: + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + ) -> EntityRecordsListResponse: """Asynchronously list records from an entity with optional pagination and schema validation. The schema parameter enables type-safe access to entity records by validating the @@ -523,11 +587,16 @@ class CustomerRecord: start (Optional[int]): Starting index for pagination (0-based). limit (Optional[int]): Maximum number of records to return. + expansion_level (Optional[int]): Depth of foreign-key expansion (0 = none). + filter (Optional[str]): OData ``$filter`` expression (e.g. ``"status eq 'active'"``). + orderby (Optional[str]): OData ``$orderby`` expression (e.g. ``"name asc"``). + select (Optional[List[str]]): Field names for column projection (``$select``). + expand (Optional[List[str]]): Relationship names to expand (``$expand``). Returns: - List[EntityRecord]: A list of entity records. Each record contains an 'id' field - and all other fields from the entity. Fields can be accessed as attributes - or dictionary keys on the EntityRecord object. + EntityRecordsListResponse: A list-compatible response with ``total_count``, + ``has_next_page``, and ``next_cursor`` attributes for pagination metadata. + Iteration, indexing, and ``len()`` continue to work. Raises: ValueError: If schema validation fails for any record, including cases where @@ -546,6 +615,18 @@ class CustomerRecord: # Get first 50 records records = await entities_service.list_records_async("Customers", start=0, limit=50) + With OData filtering, sorting, and expansion:: + + records = await entities_service.list_records_async( + "Customers", + filter="status eq 'active'", + orderby="created_at desc", + select=["name", "email"], + expand=["company"], + expansion_level=1, + ) + print(f"Total matching: {records.total_count}") + With schema validation:: class CustomerRecord: @@ -564,20 +645,21 @@ class CustomerRecord: for record in records: print(f"{record.name}: {record.email}") """ - spec = self._list_records_spec(entity_key, start, limit) - - # Make the HTTP request (assumes self.request exists) + spec = self._list_records_spec( + entity_key, + start=start, + limit=limit, + expansion_level=expansion_level, + filter=filter, + orderby=orderby, + select=select, + expand=expand, + ) response = await self.request_async( spec.method, spec.endpoint, params=spec.params ) - # Parse the response JSON and extract the "value" field - records_data = response.json().get("value", []) - - # Validate and wrap records - return [ - EntityRecord.from_data(data=record, model=schema) for record in records_data - ] + return self._build_records_list_response(response, schema, start, limit) @traced(name="entity_query_records", run_type="uipath") def query_entity_records( @@ -729,6 +811,8 @@ def insert_records( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Insert multiple records into an entity in a single batch operation. @@ -791,8 +875,19 @@ def __init__(self, name, email, age): for record in response.success_records: print(f"Inserted: {record.name} (ID: {record.id})") """ - spec = self._insert_batch_spec(entity_key, records) - response = self.request(spec.method, spec.endpoint, json=spec.json) + spec = self._insert_batch_spec( + entity_key, + records, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + response = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(response, EntityRecordsBatchResponse): + return response return self.validate_entity_batch(response, schema) @@ -802,6 +897,8 @@ async def insert_records_async( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously insert multiple records into an entity in a single batch operation. @@ -864,10 +961,23 @@ def __init__(self, name, email, age): for record in response.success_records: print(f"Inserted: {record.name} (ID: {record.id})") """ - spec = self._insert_batch_spec(entity_key, records) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + spec = self._insert_batch_spec( + entity_key, + records, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) - return self.validate_entity_batch(response, schema) + async def _do() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + + result = await self._request_or_extract_batch_async(_do) + if isinstance(result, EntityRecordsBatchResponse): + return result + + return self.validate_entity_batch(result, schema) @traced(name="entity_record_update_batch", run_type="uipath") def update_records( @@ -875,6 +985,8 @@ def update_records( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Update multiple records in an entity in a single batch operation. @@ -937,13 +1049,24 @@ class CustomerSchema: for record in response.success_records: print(f"Updated: {record.name}") """ - valid_records = [ - EntityRecord.from_data(data=record.model_dump(by_alias=True), model=schema) - for record in records - ] - - spec = self._update_batch_spec(entity_key, valid_records) - response = self.request(spec.method, spec.endpoint, json=spec.json) + normalized = [self._record_to_dict(record) for record in records] + if schema is not None: + for record in normalized: + EntityRecord.from_data(data=record, model=schema) + + spec = self._update_batch_spec( + entity_key, + normalized, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) + response = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(response, EntityRecordsBatchResponse): + return response return self.validate_entity_batch(response, schema) @@ -953,6 +1076,8 @@ async def update_records_async( entity_key: str, records: List[Any], schema: Optional[Type[Any]] = None, + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously update multiple records in an entity in a single batch operation. @@ -1015,21 +1140,35 @@ class CustomerSchema: for record in response.success_records: print(f"Updated: {record.name}") """ - valid_records = [ - EntityRecord.from_data(data=record.model_dump(by_alias=True), model=schema) - for record in records - ] + normalized = [self._record_to_dict(record) for record in records] + if schema is not None: + for record in normalized: + EntityRecord.from_data(data=record, model=schema) + + spec = self._update_batch_spec( + entity_key, + normalized, + expansion_level=expansion_level, + fail_on_first=fail_on_first, + ) - spec = self._update_batch_spec(entity_key, valid_records) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + async def _do() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) - return self.validate_entity_batch(response, schema) + result = await self._request_or_extract_batch_async(_do) + if isinstance(result, EntityRecordsBatchResponse): + return result + + return self.validate_entity_batch(result, schema) @traced(name="entity_record_delete_batch", run_type="uipath") def delete_records( self, entity_key: str, record_ids: List[str], + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Delete multiple records from an entity in a single batch operation. @@ -1077,20 +1216,25 @@ def delete_records( ) print(f"Deleted {len(response.success_records)} inactive records") """ - spec = self._delete_batch_spec(entity_key, record_ids) - response = self.request(spec.method, spec.endpoint, json=spec.json) - - delete_records_response = EntityRecordsBatchResponse.model_validate( - response.json() + spec = self._delete_batch_spec( + entity_key, record_ids, fail_on_first=fail_on_first ) + result = self._request_or_extract_batch( + sync_call=lambda: self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + ) + if isinstance(result, EntityRecordsBatchResponse): + return result - return delete_records_response + return EntityRecordsBatchResponse.model_validate(result.json()) @traced(name="entity_record_delete_batch", run_type="uipath") async def delete_records_async( self, entity_key: str, record_ids: List[str], + fail_on_first: Optional[bool] = None, ) -> EntityRecordsBatchResponse: """Asynchronously delete multiple records from an entity in a single batch operation. @@ -1138,43 +1282,512 @@ async def delete_records_async( ) print(f"Deleted {len(response.success_records)} inactive records") """ - spec = self._delete_batch_spec(entity_key, record_ids) - response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + spec = self._delete_batch_spec( + entity_key, record_ids, fail_on_first=fail_on_first + ) + + async def _do() -> Response: + return await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + + result = await self._request_or_extract_batch_async(_do) + if isinstance(result, EntityRecordsBatchResponse): + return result + + return EntityRecordsBatchResponse.model_validate(result.json()) - delete_records_response = EntityRecordsBatchResponse.model_validate( - response.json() + # ------------------------------------------------------------------ + # Single-record operations. + # + # The single-record endpoints fire Data Fabric trigger events on each + # mutation; the batch endpoints do not. Use these when triggers must run. + # ------------------------------------------------------------------ + + @traced(name="entity_insert_record", run_type="uipath") + def insert_record( + self, + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Insert a single record and return it with its generated ``Id``. + + Args: + entity_key: Entity identifier. + data: Record payload — dict, Pydantic model, :class:`EntityRecord`, + or any object exposing ``__dict__``. + expansion_level: Optional foreign-key expansion depth in the response. + """ + spec = self._insert_record_spec(entity_key, data, expansion_level) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json ) + return EntityRecord.model_validate(response.json()) - return delete_records_response + @traced(name="entity_insert_record", run_type="uipath") + async def insert_record_async( + self, + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`insert_record`.""" + spec = self._insert_record_spec(entity_key, data, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) - def validate_entity_batch( + @traced(name="entity_get_record", run_type="uipath") + def get_record( self, - batch_response: Response, - schema: Optional[Type[Any]] = None, - ) -> EntityRecordsBatchResponse: - # Validate the response format - insert_records_response = EntityRecordsBatchResponse.model_validate( - batch_response.json() + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Fetch a single record by its id. + + Args: + entity_key: Entity identifier. + record_id: Record identifier. + expansion_level: Optional foreign-key expansion depth. + """ + spec = self._get_record_spec(entity_key, record_id, expansion_level) + response = self.request(spec.method, spec.endpoint, params=spec.params) + return EntityRecord.model_validate(response.json()) + + @traced(name="entity_get_record", run_type="uipath") + async def get_record_async( + self, + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`get_record`.""" + spec = self._get_record_spec(entity_key, record_id, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params ) + return EntityRecord.model_validate(response.json()) - # Validate individual records - validated_successful_records = [ - EntityRecord.from_data( - data=successful_record.model_dump(by_alias=True), model=schema + @traced(name="entity_update_record", run_type="uipath") + def update_record( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Update a single record by id and return the updated record. + + Args: + entity_key: Entity identifier. + record_id: Record identifier to update. + data: Fields to update — dict, Pydantic model, or any object with + ``__dict__``. Fields explicitly set to ``None`` are sent + through; unset fields are omitted. + expansion_level: Optional foreign-key expansion depth in the response. + """ + spec = self._update_record_spec(entity_key, record_id, data, expansion_level) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + @traced(name="entity_update_record", run_type="uipath") + async def update_record_async( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> EntityRecord: + """Async variant of :meth:`update_record`.""" + spec = self._update_record_spec(entity_key, record_id, data, expansion_level) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return EntityRecord.model_validate(response.json()) + + @traced(name="entity_delete_record", run_type="uipath") + def delete_record(self, entity_key: str, record_id: str) -> None: + """Delete a single record by id.""" + spec = self._delete_record_spec(entity_key, record_id) + self.request(spec.method, spec.endpoint) + + @traced(name="entity_delete_record", run_type="uipath") + async def delete_record_async(self, entity_key: str, record_id: str) -> None: + """Async variant of :meth:`delete_record`.""" + spec = self._delete_record_spec(entity_key, record_id) + await self.request_async(spec.method, spec.endpoint) + + # ------------------------------------------------------------------ + # Structured query + # ------------------------------------------------------------------ + + @traced(name="entity_query_records_structured", run_type="uipath") + def query_records( + self, + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Query records with filters, sorting, expansion, joins, and aggregates. + + Routes to the V2 endpoint when ``binnings`` is provided (numeric/date + binning is gated by the ``enable-binning-on-query`` feature flag on + the backend). + + Args: + entity_key: Entity identifier. + filter_group: Nested filter conditions combined with AND/OR. + sort_options: Sort fields and direction. + selected_fields: Column projection; omit to return all fields. + expansions: Foreign-key relationships to expand. + expansion_level: Depth of expansion (sent as a URL query param). + aggregates: Aggregate expressions (``COUNT``/``SUM``/``AVG``/ + ``MIN``/``MAX``). Maximum 5 per query. + group_by: Fields to group aggregate results by. Maximum 5; + required when both ``aggregates`` and ``selected_fields`` + are supplied. + joins: Cross-entity joins. Maximum 3, all of the same type. + binnings: Bucket numeric or date group-by fields. Each entry's + field must also appear in ``group_by``. + start: Records to skip (pagination offset). + limit: Maximum number of records to return. + + Returns: + :class:`EntityQueryRecordsResponse` with ``items``, ``total_count``, + and ``has_next_page``. ``next_cursor`` is populated only when the + backend returns one; otherwise paginate by passing the next + ``start`` yourself. + """ + spec = self._query_records_spec( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, + ) + response = self.request( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_query_response(response, start=start, limit=limit) + + @traced(name="entity_query_records_structured", run_type="uipath") + async def query_records_async( + self, + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[EntityAggregate]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Async variant of :meth:`query_records`.""" + spec = self._query_records_spec( + entity_key, + filter_group=filter_group, + sort_options=sort_options, + selected_fields=selected_fields, + expansions=expansions, + expansion_level=expansion_level, + aggregates=aggregates, + group_by=group_by, + joins=joins, + binnings=binnings, + start=start, + limit=limit, + ) + response = await self.request_async( + spec.method, spec.endpoint, params=spec.params, json=spec.json + ) + return self._parse_query_response(response, start=start, limit=limit) + + # ------------------------------------------------------------------ + # File attachments + # ------------------------------------------------------------------ + + @traced(name="entity_upload_attachment", run_type="uipath") + def upload_attachment( + self, + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Upload a file to a File-type field on a record. + + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). + + Args: + entity_id: Entity identifier. + record_id: Record identifier whose attachment field is being set. + field_name: Name of the File-type field. + file: Raw bytes to upload. + file_path: Path to a local file to upload. + expansion_level: Optional foreign-key expansion depth in the response. + + Returns: + The decoded JSON response (typically the updated record), or an + empty dict when the response has no body. + """ + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + with self._open_file(file, file_path) as handle: + response = self.request( + "POST", + spec.endpoint, + params=spec.params, + files={"file": handle}, ) - for successful_record in insert_records_response.success_records - ] + return response.json() if response.content else {} - validated_failed_records = [ - EntityRecord.from_data( - data=failed_record.model_dump(by_alias=True), model=schema + @traced(name="entity_upload_attachment", run_type="uipath") + async def upload_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Async variant of :meth:`upload_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + with self._open_file(file, file_path) as handle: + response = await self.request_async( + "POST", + spec.endpoint, + params=spec.params, + files={"file": handle}, ) - for failed_record in insert_records_response.failure_records - ] + return response.json() if response.content else {} + + @traced(name="entity_download_attachment", run_type="uipath") + def download_attachment( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Download a file attached to a record and return its raw bytes.""" + spec = self._attachment_endpoint(entity_id, record_id, field_name) + response = self.request("GET", spec.endpoint) + return response.content + + @traced(name="entity_download_attachment", run_type="uipath") + async def download_attachment_async( + self, entity_id: str, record_id: str, field_name: str + ) -> bytes: + """Async variant of :meth:`download_attachment`.""" + spec = self._attachment_endpoint(entity_id, record_id, field_name) + response = await self.request_async("GET", spec.endpoint) + return response.content + + @traced(name="entity_delete_attachment", run_type="uipath") + def delete_attachment( + self, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Remove the file attached to a File-type field on a record. + + Returns the decoded JSON response, or an empty dict when the response + has no body. + """ + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + response = self.request("DELETE", spec.endpoint, params=spec.params) + return response.json() if response.content else {} + + @traced(name="entity_delete_attachment", run_type="uipath") + async def delete_attachment_async( + self, + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> Dict[str, Any]: + """Async variant of :meth:`delete_attachment`.""" + spec = self._attachment_endpoint( + entity_id, record_id, field_name, expansion_level + ) + response = await self.request_async("DELETE", spec.endpoint, params=spec.params) + return response.json() if response.content else {} + + # ------------------------------------------------------------------ + # Entity schema management + # ------------------------------------------------------------------ + + @traced(name="entity_create", run_type="uipath") + def create_entity( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Create a new entity with the given schema and return its id. + + Args: + name: Entity name; must start with a letter and contain only + letters, digits, and underscores (3-100 characters). + fields: Field definitions for the new entity. + options: Optional entity-level settings such as display name, + description, folder placement, and RBAC / analytics flags. + """ + spec = self._create_entity_spec(name, fields, options) + response = self.request(spec.method, spec.endpoint, json=spec.json) + return self._extract_entity_id(response) + + @traced(name="entity_create", run_type="uipath") + async def create_entity_async( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> str: + """Async variant of :meth:`create_entity`.""" + spec = self._create_entity_spec(name, fields, options) + response = await self.request_async(spec.method, spec.endpoint, json=spec.json) + return self._extract_entity_id(response) + + @traced(name="entity_delete", run_type="uipath") + def delete_entity(self, entity_id: str) -> None: + """Delete an entity and all of its records.""" + spec = self._delete_entity_spec(entity_id) + self.request(spec.method, spec.endpoint) + + @traced(name="entity_delete", run_type="uipath") + async def delete_entity_async(self, entity_id: str) -> None: + """Async variant of :meth:`delete_entity`.""" + spec = self._delete_entity_spec(entity_id) + await self.request_async(spec.method, spec.endpoint) + + @traced(name="entity_update_metadata", run_type="uipath") + def update_entity_metadata( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Update an entity's display name, description, and/or RBAC flag. + + Args: + entity_id: Entity identifier. + metadata: An :class:`EntityMetadataUpdateOptions` instance or a dict + with any of ``display_name``, ``description``, ``is_rbac_enabled``. + """ + spec = self._update_entity_metadata_spec(entity_id, metadata) + self.request(spec.method, spec.endpoint, json=spec.json) + + @traced(name="entity_update_metadata", run_type="uipath") + async def update_entity_metadata_async( + self, + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> None: + """Async variant of :meth:`update_entity_metadata`.""" + spec = self._update_entity_metadata_spec(entity_id, metadata) + await self.request_async(spec.method, spec.endpoint, json=spec.json) + + # ------------------------------------------------------------------ + # Bulk import + # ------------------------------------------------------------------ + + @traced(name="entity_import_records", run_type="uipath") + def import_records( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Bulk-import records into an entity from a CSV file. + + Provide exactly one of ``file`` (raw bytes) or ``file_path`` (path on + disk). The response reports the total rows in the file, the number + successfully inserted, and an optional link to a CSV listing rows + that failed. + """ + with self._open_file(file, file_path) as handle: + response = self.request( + "POST", + Endpoint( + f"datafabric_/api/EntityService/entity/{entity_id}/bulk-upload" + ), + files={"file": handle}, + ) + return EntityImportRecordsResponse.model_validate(response.json() or {}) + + @traced(name="entity_import_records", run_type="uipath") + async def import_records_async( + self, + entity_id: str, + file: Optional[FileContent] = None, + file_path: Optional[str] = None, + ) -> EntityImportRecordsResponse: + """Async variant of :meth:`import_records`.""" + with self._open_file(file, file_path) as handle: + response = await self.request_async( + "POST", + Endpoint( + f"datafabric_/api/EntityService/entity/{entity_id}/bulk-upload" + ), + files={"file": handle}, + ) + return EntityImportRecordsResponse.model_validate(response.json() or {}) + + def validate_entity_batch( + self, + batch_response: Response, + schema: Optional[Type[Any]] = None, + ) -> EntityRecordsBatchResponse: + """Parse a batch response, optionally validating success records against ``schema``. + + Failure records are returned as :class:`FailureRecord` instances and + are not validated against the user schema. + """ + parsed = EntityRecordsBatchResponse.model_validate(batch_response.json()) + + validated_successful_records = [] + for successful_record in parsed.success_records: + data = successful_record.model_dump(by_alias=True) + if data.get("Id") is not None: + validated_successful_records.append( + EntityRecord.from_data(data=data, model=schema) + ) return EntityRecordsBatchResponse( success_records=validated_successful_records, - failure_records=validated_failed_records, + failure_records=parsed.failure_records, ) def _retrieve_spec( @@ -1212,12 +1825,27 @@ def _list_records_spec( entity_key: str, start: Optional[int] = None, limit: Optional[int] = None, + expansion_level: Optional[int] = None, + filter: Optional[str] = None, + orderby: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, ) -> RequestSpec: params: dict[str, Any] = {} if start is not None: params["start"] = start if limit is not None: params["limit"] = limit + if expansion_level is not None: + params["expansionLevel"] = expansion_level + if filter is not None: + params["$filter"] = filter + if orderby is not None: + params["$orderby"] = orderby + if select: + params["$select"] = ",".join(select) + if expand: + params["$expand"] = ",".join(expand) return RequestSpec( method="GET", endpoint=Endpoint( @@ -1242,35 +1870,683 @@ def _query_entity_records_spec( json=body, ) - def _insert_batch_spec(self, entity_key: str, records: List[Any]) -> RequestSpec: + def _insert_batch_spec( + self, + entity_key: str, + records: List[Any], + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> RequestSpec: + params = self._batch_params( + expansion_level=expansion_level, fail_on_first=fail_on_first + ) return RequestSpec( method="POST", endpoint=Endpoint( f"datafabric_/api/EntityService/entity/{entity_key}/insert-batch" ), - json=[record.__dict__ for record in records], + params=params, + json=[self._record_to_dict(record) for record in records], ) def _update_batch_spec( - self, entity_key: str, records: List[EntityRecord] + self, + entity_key: str, + records: List[Dict[str, Any]], + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, ) -> RequestSpec: + params = self._batch_params( + expansion_level=expansion_level, fail_on_first=fail_on_first + ) return RequestSpec( method="POST", endpoint=Endpoint( f"datafabric_/api/EntityService/entity/{entity_key}/update-batch" ), - json=[record.model_dump(by_alias=True) for record in records], + params=params, + json=records, ) - def _delete_batch_spec(self, entity_key: str, record_ids: List[str]) -> RequestSpec: + def _delete_batch_spec( + self, + entity_key: str, + record_ids: List[str], + fail_on_first: Optional[bool] = None, + ) -> RequestSpec: + params = self._batch_params(fail_on_first=fail_on_first) return RequestSpec( method="POST", endpoint=Endpoint( f"datafabric_/api/EntityService/entity/{entity_key}/delete-batch" ), + params=params, json=record_ids, ) + @staticmethod + def _batch_params( + expansion_level: Optional[int] = None, + fail_on_first: Optional[bool] = None, + ) -> Dict[str, Any]: + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + if fail_on_first is not None: + params["failOnFirst"] = "true" if fail_on_first else "false" + return params + + @staticmethod + def _record_to_dict(record: Any) -> Dict[str, Any]: + """Normalize an input record to a plain dict. + + Accepts dicts, Pydantic ``BaseModel`` (including :class:`EntityRecord`), + or any object exposing ``__dict__``. Explicit ``None`` values are + preserved so callers can clear fields by setting them to ``None`` on a + model instance — only unset fields (whose Pydantic default applies) are + dropped via ``exclude_unset=True``. + """ + if isinstance(record, dict): + return dict(record) + if isinstance(record, BaseModel): + return record.model_dump(by_alias=True, exclude_unset=True) + if hasattr(record, "__dict__"): + return {k: v for k, v in record.__dict__.items() if not k.startswith("_")} + raise TypeError( + f"Cannot convert record of type {type(record).__name__} to dict — " + "pass a dict, an EntityRecord, a Pydantic BaseModel, or an object with __dict__." + ) + + @staticmethod + def _build_records_list_response( + response: Response, + schema: Optional[Type[Any]], + start: Optional[int], + limit: Optional[int], + ) -> EntityRecordsListResponse: + """Build an :class:`EntityRecordsListResponse` from a list-records body.""" + body = response.json() or {} + records_data = body.get("value", []) + total_count = int( + body.get("totalRecordCount", body.get("totalCount", len(records_data))) or 0 + ) + records = [ + EntityRecord.from_data(data=record, model=schema) for record in records_data + ] + + next_cursor = body.get("nextCursor") + if limit is not None and limit > 0: + consumed = (start or 0) + len(records) + has_next_page = consumed < total_count + else: + has_next_page = bool(body.get("hasNextPage", False)) + + return EntityRecordsListResponse( + items=records, + total_count=total_count, + has_next_page=has_next_page, + next_cursor=next_cursor, + ) + + def _request_or_extract_batch( + self, + sync_call: Any, + ) -> Union[Response, EntityRecordsBatchResponse]: + """Run a batch request and recover per-record failures from a 400 body. + + On HTTP 400 with a body that lists both ``successRecords`` and + ``failureRecords``, returns the parsed batch response instead of + raising. All other errors propagate. + """ + try: + return sync_call() + except EnrichedException as exc: + extracted = self._extract_batch_response_from_error(exc) + if extracted is not None: + return extracted + raise + + async def _request_or_extract_batch_async( + self, + async_call: Any, + ) -> Union[Response, EntityRecordsBatchResponse]: + """Async variant of :meth:`_request_or_extract_batch`.""" + try: + return await async_call() + except EnrichedException as exc: + extracted = self._extract_batch_response_from_error(exc) + if extracted is not None: + return extracted + raise + + @staticmethod + def _extract_batch_response_from_error( + exc: EnrichedException, + ) -> Optional[EntityRecordsBatchResponse]: + """Return a parsed batch response when the error body matches the per-record-failure shape. + + Recovery is intentionally narrow: only HTTP 400 with a JSON object + containing list-typed ``successRecords`` and ``failureRecords`` keys. + Returns ``None`` for any other status, body shape, or parse failure + so that the original error propagates. + """ + cause = exc.__cause__ + if not isinstance(cause, HTTPStatusError): + return None + if cause.response.status_code != 400: + return None + try: + data = cause.response.json() + except Exception: + return None + if not isinstance(data, dict): + return None + if not ( + isinstance(data.get("successRecords"), list) + and isinstance(data.get("failureRecords"), list) + ): + return None + try: + return EntityRecordsBatchResponse.model_validate(data) + except Exception: + return None + + # ------------------------------------------------------------------ + # Request-spec builders + # ------------------------------------------------------------------ + + def _insert_record_spec( + self, + entity_key: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/insert" + ), + params=params, + json=self._record_to_dict(data), + ) + + def _get_record_spec( + self, + entity_key: str, + record_id: str, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="GET", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}" + ), + params=params, + ) + + def _update_record_spec( + self, + entity_key: str, + record_id: str, + data: Any, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/update/{record_id}" + ), + params=params, + json=self._record_to_dict(data), + ) + + @staticmethod + def _delete_record_spec(entity_key: str, record_id: str) -> RequestSpec: + return RequestSpec( + method="DELETE", + endpoint=Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}" + ), + ) + + def _query_records_spec( + self, + entity_key: str, + filter_group: Optional[EntityQueryFilterGroup] = None, + sort_options: Optional[List[EntityQuerySortOption]] = None, + selected_fields: Optional[List[str]] = None, + expansions: Optional[List[Any]] = None, + expansion_level: Optional[int] = None, + aggregates: Optional[List[Any]] = None, + group_by: Optional[List[str]] = None, + joins: Optional[List[EntityJoin]] = None, + binnings: Optional[List[EntityBinning]] = None, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> RequestSpec: + """Build the request spec for the structured-query endpoint. + + Filters, sorting, projection, expansions, aggregates, group-by, joins, + binnings, ``start``, and ``limit`` are placed in the JSON body; + ``expansionLevel`` is a URL query parameter. The V2 endpoint is used + only when ``binnings`` are supplied. + """ + body: Dict[str, Any] = {} + if filter_group is not None: + body["filterGroup"] = filter_group.model_dump( + by_alias=True, exclude_none=True + ) + if sort_options: + body["sortOptions"] = [ + opt.model_dump(by_alias=True, exclude_none=True) for opt in sort_options + ] + if selected_fields: + body["selectedFields"] = list(selected_fields) + if expansions: + body["expansions"] = [ + e.model_dump(by_alias=True, exclude_none=True) + if isinstance(e, BaseModel) + else e + for e in expansions + ] + if aggregates: + body["aggregates"] = [ + a.model_dump(by_alias=True, exclude_none=True) + if isinstance(a, BaseModel) + else a + for a in aggregates + ] + if group_by: + body["groupBy"] = list(group_by) + if joins: + body["joins"] = [ + j.model_dump(by_alias=True, exclude_none=True) for j in joins + ] + if binnings: + body["binnings"] = [ + b.model_dump(by_alias=True, exclude_none=True) for b in binnings + ] + if start is not None: + body["start"] = start + if limit is not None: + body["limit"] = limit + + # ``expansionLevel`` is a URL query param on the controller, not a body field. + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + + if binnings: + endpoint = Endpoint( + f"datafabric_/api/v2/EntityService/entity/{entity_key}/query" + ) + else: + endpoint = Endpoint( + f"datafabric_/api/EntityService/entity/{entity_key}/query" + ) + + return RequestSpec( + method="POST", + endpoint=endpoint, + params=params, + json=body, + ) + + @staticmethod + def _parse_query_response( + response: Response, + start: Optional[int] = None, + limit: Optional[int] = None, + ) -> EntityQueryRecordsResponse: + """Parse a query response into :class:`EntityQueryRecordsResponse`. + + ``has_next_page`` is derived from ``start + len(items) < total_count`` + whenever ``limit`` is supplied; ``next_cursor`` is populated only when + the backend returns one, otherwise the caller paginates by passing the + next ``start``. + """ + body = response.json() or {} + # Aggregate / binning rows do not carry an ``Id`` field, so fall back + # to constructing the record without strict validation when the row + # cannot be parsed as a regular entity record. + items_raw = body.get("value", []) or [] + items: List[EntityRecord] = [] + for raw in items_raw: + try: + items.append(EntityRecord.from_data(data=raw)) + except ValueError: + items.append(EntityRecord.model_construct(_fields_set=set(raw), **raw)) + + total_count = int(body.get("totalRecordCount", body.get("totalCount", 0)) or 0) + + next_cursor: Optional[str] = body.get("nextCursor") + has_next_page = bool(body.get("hasNextPage", False)) + if next_cursor is None and limit is not None and limit > 0: + consumed = (start or 0) + len(items) + has_next_page = consumed < total_count + + return EntityQueryRecordsResponse( + items=items, + total_count=total_count, + has_next_page=has_next_page, + next_cursor=next_cursor, + ) + + @staticmethod + def _attachment_endpoint( + entity_id: str, + record_id: str, + field_name: str, + expansion_level: Optional[int] = None, + ) -> RequestSpec: + """Return the attachment endpoint plus any ``expansionLevel`` query param. + + The HTTP verb is supplied by the caller; only the URL and query + parameters depend on these arguments. + """ + params: Dict[str, Any] = {} + if expansion_level is not None: + params["expansionLevel"] = expansion_level + return RequestSpec( + method="POST", + endpoint=Endpoint( + f"datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}" + ), + params=params, + ) + + @staticmethod + def _open_file(file: Optional[FileContent], file_path: Optional[str]) -> Any: + """Yield a file-like object from raw bytes or a path on disk. + + Exactly one of ``file`` and ``file_path`` must be supplied. + """ + if (file is None) == (file_path is None): + raise ValueError( + "Provide exactly one of `file` (bytes) or `file_path` (str path on disk)." + ) + if file_path is not None: + return open(Path(file_path), "rb") + return nullcontext(file) + + def _create_entity_spec( + self, + name: str, + fields: List[EntityCreateFieldOptions], + options: Optional[EntityCreateOptions] = None, + ) -> RequestSpec: + self._validate_name(name, "entity") + for field in fields: + self._validate_name(field.field_name, "field") + opts = options or EntityCreateOptions() + # The user-facing option ``is_analytics_enabled`` maps to the legacy + # backend field name ``isInsightsEnabled`` — the wire name predates + # the "Analytics" UI rename. + payload: Dict[str, Any] = { + "displayName": opts.display_name or name, + "entityDefinition": { + "name": name, + "fields": [self._build_schema_field_payload(f) for f in fields], + "folderId": opts.folder_key or DATA_FABRIC_TENANT_FOLDER_ID, + "isRbacEnabled": bool(opts.is_rbac_enabled or False), + "isInsightsEnabled": bool(opts.is_analytics_enabled or False), + "externalFields": opts.external_fields or [], + }, + } + if opts.description is not None: + payload["description"] = opts.description + return RequestSpec( + method="POST", + endpoint=Endpoint("datafabric_/api/Entity"), + json=payload, + ) + + @classmethod + def _build_schema_field_payload( + cls, field: EntityCreateFieldOptions + ) -> Dict[str, Any]: + """Build the API field payload for a single field on create-entity. + + Maps :class:`EntityFieldDataType` to the backend's ``sqlType.name`` and + ``fieldDisplayType`` (e.g. ``STRING`` becomes ``NVARCHAR`` / ``Basic``). + Caller-supplied constraints are validated against + :data:`ENTITY_FIELD_CONSTRAINT_SPEC`; unsupplied per-type constraints + fall back to :data:`ENTITY_FIELD_CONSTRAINT_DEFAULTS` so the field is + persisted fully and remains editable later. + """ + ftype = field.type or EntityFieldDataType.STRING + cls._validate_name(field.field_name, "field") + cls._validate_field_constraints(ftype, field) + + sql_type_name, field_display_type = ENTITY_SCHEMA_FIELD_TYPE_MAP[ftype] + sql_type: Dict[str, Any] = {"name": sql_type_name} + sql_type.update(cls._build_sql_type_constraints(ftype, field)) + + payload: Dict[str, Any] = { + "name": field.field_name, + "displayName": field.display_name or field.field_name, + "sqlType": sql_type, + "fieldDisplayType": field_display_type, + "description": field.description or "", + "isRequired": bool(field.is_required or False), + "isUnique": bool(field.is_unique or False), + "isRbacEnabled": bool(field.is_rbac_enabled or False), + "isEncrypted": bool(field.is_encrypted or False), + } + if field.default_value is not None: + payload["defaultValue"] = field.default_value + if field.choice_set_id is not None: + payload["choiceSetId"] = field.choice_set_id + if field.reference_entity_name is not None: + payload["referenceEntityName"] = field.reference_entity_name + if field.reference_field_name is not None: + payload["referenceFieldName"] = field.reference_field_name + return payload + + @staticmethod + def _build_sql_type_constraints( + ftype: EntityFieldDataType, field: EntityCreateFieldOptions + ) -> Dict[str, Any]: + """Return the ``sqlType`` constraint fields required for ``ftype``. + + Caller-supplied values override defaults where the type accepts them; + types that take no constraints (UUID, DATETIME, CHOICE_SET_SINGLE, + AUTO_NUMBER) return an empty dict. + """ + d = ENTITY_FIELD_CONSTRAINT_DEFAULTS + if ftype is EntityFieldDataType.STRING: + return {"lengthLimit": field.length_limit or d["STRING_LENGTH_LIMIT"]} + if ftype is EntityFieldDataType.MULTILINE_TEXT: + return { + "lengthLimit": field.length_limit or d["MULTILINE_TEXT_LENGTH_LIMIT"] + } + if ftype is EntityFieldDataType.DECIMAL: + return { + "lengthLimit": d["DECIMAL_LENGTH_LIMIT"], + "decimalPrecision": ( + field.decimal_precision + if field.decimal_precision is not None + else d["DECIMAL_PRECISION"] + ), + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype is EntityFieldDataType.BOOLEAN: + return {"lengthLimit": d["BOOLEAN_LENGTH_LIMIT"]} + if ftype in ( + EntityFieldDataType.DATE, + EntityFieldDataType.DATETIME_WITH_TZ, + ): + return {"lengthLimit": d["DATE_LENGTH_LIMIT"]} + if ftype in (EntityFieldDataType.INTEGER, EntityFieldDataType.BIG_INTEGER): + return { + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype in (EntityFieldDataType.FLOAT, EntityFieldDataType.DOUBLE): + return { + "decimalPrecision": ( + field.decimal_precision + if field.decimal_precision is not None + else d["DECIMAL_PRECISION"] + ), + "maxValue": ( + field.max_value + if field.max_value is not None + else d["NUMERIC_MAX_VALUE"] + ), + "minValue": ( + field.min_value + if field.min_value is not None + else d["NUMERIC_MIN_VALUE"] + ), + } + if ftype in (EntityFieldDataType.FILE, EntityFieldDataType.RELATIONSHIP): + return {"lengthLimit": d["UNIQUEIDENTIFIER_LENGTH_LIMIT"]} + if ftype is EntityFieldDataType.CHOICE_SET_MULTIPLE: + return {"lengthLimit": d["CHOICE_SET_MULTIPLE_LENGTH_LIMIT"]} + # UUID, DATETIME, CHOICE_SET_SINGLE, AUTO_NUMBER — no constraints + return {} + + @staticmethod + def _validate_name(name: str, context: str) -> None: + r"""Validate an entity or field name. + + Names must match ``^[a-zA-Z]\w*$`` and be 3-100 characters long. Field + names additionally cannot collide with the system-reserved field names + in :data:`RESERVED_FIELD_NAMES`; the reserved-name check runs first so + that short reserved names produce a more informative error. + """ + if context == "field" and name in RESERVED_FIELD_NAMES: + reserved = ", ".join(sorted(RESERVED_FIELD_NAMES)) + raise ValueError( + f"Field name {name!r} is reserved. Reserved names: {reserved}." + ) + if not (3 <= len(name) <= 100) or not _NAME_RE.match(name): + raise ValueError( + f"Invalid {context} name {name!r}. Must start with a letter, " + "contain only letters, numbers, and underscores, " + "and be 3-100 characters." + ) + + @staticmethod + def _validate_field_constraints( + ftype: EntityFieldDataType, field: EntityCreateFieldOptions + ) -> None: + """Validate caller-supplied per-field constraints. + + Rejects constraints that ``ftype`` does not accept (e.g. + ``decimal_precision`` on ``STRING``), values outside the inclusive + range declared in :data:`ENTITY_FIELD_CONSTRAINT_SPEC`, and + ``min_value`` greater than or equal to ``max_value`` when both are + supplied. + """ + spec = ENTITY_FIELD_CONSTRAINT_SPEC.get(ftype, {}) + provided: Dict[str, Any] = {} + for attr in ("length_limit", "max_value", "min_value", "decimal_precision"): + value = getattr(field, attr) + if value is not None: + provided[attr] = value + + unsupported = [name for name in provided if name not in spec] + if unsupported: + allowed = ", ".join(sorted(spec.keys())) or "none" + raise ValueError( + f"Field {field.field_name!r} of type {ftype.value} does not accept " + f"{', '.join(sorted(unsupported))}. Allowed constraints: {allowed}." + ) + + for name, value in provided.items(): + low, high = spec[name] + if not (low <= value <= high): + raise ValueError( + f"Field {field.field_name!r} of type {ftype.value}: " + f"{name}={value} is out of range [{low}, {high}]." + ) + + if ( + field.min_value is not None + and field.max_value is not None + and field.min_value >= field.max_value + ): + raise ValueError( + f"Field {field.field_name!r}: min_value ({field.min_value}) must be " + f"strictly less than max_value ({field.max_value})." + ) + + @staticmethod + def _delete_entity_spec(entity_id: str) -> RequestSpec: + return RequestSpec( + method="DELETE", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_id}"), + ) + + @staticmethod + def _update_entity_metadata_spec( + entity_id: str, + metadata: Union[EntityMetadataUpdateOptions, Dict[str, Any]], + ) -> RequestSpec: + """Build the PATCH request spec for updating entity metadata. + + Dict inputs are validated through :class:`EntityMetadataUpdateOptions` + so that snake_case keys (``display_name``) and camelCase keys + (``displayName``) are both accepted and serialized to the API field + names the backend expects. + """ + if not isinstance(metadata, EntityMetadataUpdateOptions): + metadata = EntityMetadataUpdateOptions.model_validate(metadata) + body = metadata.model_dump(by_alias=True, exclude_none=True) + return RequestSpec( + method="PATCH", + endpoint=Endpoint(f"datafabric_/api/Entity/{entity_id}/metadata"), + json=body, + ) + + @staticmethod + def _extract_entity_id(response: Response) -> str: + """Return the new entity id from a create-entity response. + + Accepts both a bare JSON string id and a JSON object containing + ``id`` or ``entityId``. + """ + try: + body = response.json() + except Exception: + return response.text.strip().strip('"') + if isinstance(body, str): + return body + if isinstance(body, dict): + for key in ("id", "Id", "entityId", "EntityId"): + value = body.get(key) + if isinstance(value, str): + return value + return response.text.strip().strip('"') + def _list_choicesets_spec(self) -> RequestSpec: return RequestSpec( method="GET", diff --git a/packages/uipath-platform/src/uipath/platform/entities/entities.py b/packages/uipath-platform/src/uipath/platform/entities/entities.py index 48c8dce07..c20d95b74 100644 --- a/packages/uipath-platform/src/uipath/platform/entities/entities.py +++ b/packages/uipath-platform/src/uipath/platform/entities/entities.py @@ -2,18 +2,20 @@ from __future__ import annotations -from enum import Enum +from enum import Enum, IntEnum from types import EllipsisType from typing import ( TYPE_CHECKING, Any, Dict, + Iterator, List, Optional, Type, Union, get_args, get_origin, + overload, ) from pydantic import AliasChoices, BaseModel, ConfigDict, Field, create_model @@ -82,8 +84,8 @@ class ExternalConnection(BaseModel): id: str connection_id: str = Field(alias="connectionId") element_instance_id: str = Field(alias="elementInstanceId") - folder_id: str = Field(alias="folderKey") # named folderKey in TS SDK - connector_id: str = Field(alias="connectorKey") # named connectorKey in TS SDK + folder_id: str = Field(alias="folderKey") + connector_id: str = Field(alias="connectorKey") connector_name: str = Field(alias="connectorName") connection_name: str = Field(alias="connectionName") @@ -257,7 +259,7 @@ class EntityRecord(BaseModel): "extra": "allow", } - id: str = Field(alias="Id") # Mandatory field validated by Pydantic + id: str = Field(alias="Id") @classmethod def from_data( @@ -356,6 +358,25 @@ class Entity(BaseModel): id: str +class FailureRecord(BaseModel): + """A record that failed to insert/update/delete in a batch operation. + + Backend error responses for failed records do not always include a valid + ``Id`` field — this model accepts arbitrary shapes so the caller can + inspect ``error`` text and the original ``record`` payload. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + extra="allow", + ) + + id: Optional[str] = Field(default=None, alias="Id") + error: Optional[str] = Field(default=None) + record: Optional[Dict[str, Any]] = Field(default=None) + + class EntityRecordsBatchResponse(BaseModel): """Model representing a batch response of entity records.""" @@ -364,8 +385,360 @@ class EntityRecordsBatchResponse(BaseModel): validate_by_alias=True, ) - success_records: List[EntityRecord] = Field(alias="successRecords") - failure_records: List[EntityRecord] = Field(alias="failureRecords") + success_records: List[EntityRecord] = Field( + default_factory=list, alias="successRecords" + ) + failure_records: List[FailureRecord] = Field( + default_factory=list, alias="failureRecords" + ) + + +class EntityRecordsListResponse(List[EntityRecord]): + """List of EntityRecord with pagination metadata. + + Subclasses ``list`` so existing call sites that iterate, index, or call + ``len()`` continue to work; new fields ``total_count``, ``has_next_page``, + and ``next_cursor`` expose pagination information returned by the backend. + """ + + def __init__( + self, + items: Optional[List[EntityRecord]] = None, + total_count: int = 0, + has_next_page: bool = False, + next_cursor: Optional[str] = None, + ) -> None: + """Construct from a list of records plus pagination metadata.""" + super().__init__(items or []) + self.total_count = total_count + self.has_next_page = has_next_page + self.next_cursor = next_cursor + + +class LogicalOperator(IntEnum): + """Logical operator for combining query filter groups.""" + + And = 0 + Or = 1 + + +class QueryFilterOperator(str, Enum): + """Comparison operators supported by the structured query API.""" + + Equals = "=" + NotEquals = "!=" + GreaterThan = ">" + LessThan = "<" + GreaterThanOrEqual = ">=" + LessThanOrEqual = "<=" + Contains = "contains" + NotContains = "not contains" + StartsWith = "startswith" + EndsWith = "endswith" + In = "in" + NotIn = "not in" + + +class EntityQueryFilter(BaseModel): + """A single filter condition for querying entity records.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + field_name: str = Field(alias="fieldName") + operator: QueryFilterOperator + value: Optional[str] = None + value_list: Optional[List[str]] = Field(default=None, alias="valueList") + + +class EntityQueryFilterGroup(BaseModel): + """A group of query filters combined with a logical operator.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + logical_operator: Optional[LogicalOperator] = Field( + default=None, alias="logicalOperator" + ) + continue_logical_operator: Optional[LogicalOperator] = Field( + default=None, alias="continueLogicalOperator" + ) + query_filters: Optional[List[EntityQueryFilter]] = Field( + default=None, alias="queryFilters" + ) + filter_groups: Optional[List["EntityQueryFilterGroup"]] = Field( + default=None, alias="filterGroups" + ) + + +class EntityQuerySortOption(BaseModel): + """Sort option for query results.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + field_name: str = Field(alias="fieldName") + is_descending: Optional[bool] = Field(default=None, alias="isDescending") + + +class EntityAggregateFunction(str, Enum): + """Aggregate functions supported by the Data Fabric query API.""" + + Count = "COUNT" + Sum = "SUM" + Avg = "AVG" + Min = "MIN" + Max = "MAX" + + +class EntityAggregate(BaseModel): + """A single aggregate expression to apply during a query.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + function: EntityAggregateFunction + field: str + alias: Optional[str] = None + + +class EntityJoin(BaseModel): + """Multi-entity JOIN definition for cross-entity queries.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + entity_name: Optional[str] = Field(default=None, alias="entityName") + join_type: Optional[str] = Field(default=None, alias="joinType") + join_field_name: Optional[str] = Field(default=None, alias="joinFieldName") + related_entity_name: Optional[str] = Field(default=None, alias="relatedEntityName") + related_field_name: Optional[str] = Field(default=None, alias="relatedFieldName") + + +class EntityBinning(BaseModel): + """A binning (GROUP BY/aggregation) clause for V2 query endpoint.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + field_name: Optional[str] = Field(default=None, alias="fieldName") + aggregate_function: Optional[EntityAggregateFunction] = Field( + default=None, alias="aggregateFunction" + ) + alias: Optional[str] = None + + +class EntityQueryRecordsResponse(BaseModel): + """Response from querying entity records.""" + + model_config = ConfigDict(validate_by_name=True, validate_by_alias=True) + + items: List[EntityRecord] = Field(default_factory=list) + total_count: int = Field(default=0, alias="totalCount") + has_next_page: bool = Field(default=False, alias="hasNextPage") + next_cursor: Optional[str] = Field(default=None, alias="nextCursor") + + def __iter__(self) -> Iterator[EntityRecord]: # type: ignore[override] + """Iterate over records (delegates to ``self.items``).""" + return iter(self.items) + + def __len__(self) -> int: + """Return the number of records (delegates to ``self.items``).""" + return len(self.items) + + @overload + def __getitem__(self, index: int) -> EntityRecord: ... + + @overload + def __getitem__(self, index: slice) -> List[EntityRecord]: ... + + def __getitem__( + self, index: Union[int, slice] + ) -> Union[EntityRecord, List[EntityRecord]]: + """Index or slice records (delegates to ``self.items``).""" + return self.items[index] + + +class EntityFieldDataType(str, Enum): + """User-facing entity field data type names accepted by ``create_entity``.""" + + UUID = "UUID" + STRING = "STRING" + INTEGER = "INTEGER" + DATETIME = "DATETIME" + DATETIME_WITH_TZ = "DATETIME_WITH_TZ" + DECIMAL = "DECIMAL" + FLOAT = "FLOAT" + DOUBLE = "DOUBLE" + DATE = "DATE" + BOOLEAN = "BOOLEAN" + BIG_INTEGER = "BIG_INTEGER" + MULTILINE_TEXT = "MULTILINE_TEXT" + FILE = "FILE" + CHOICE_SET_SINGLE = "CHOICE_SET_SINGLE" + CHOICE_SET_MULTIPLE = "CHOICE_SET_MULTIPLE" + AUTO_NUMBER = "AUTO_NUMBER" + RELATIONSHIP = "RELATIONSHIP" + + +# Maps the user-facing EntityFieldDataType to the ``(sqlType.name, fieldDisplayType)`` +# tuple expected by the backend when creating an entity. ``sqlType.name`` is +# the raw SQL Server type the backend persists; ``fieldDisplayType`` controls +# how the field renders in the UI. +ENTITY_SCHEMA_FIELD_TYPE_MAP: Dict[EntityFieldDataType, "tuple[str, str]"] = { + EntityFieldDataType.UUID: ("UNIQUEIDENTIFIER", "Basic"), + EntityFieldDataType.STRING: ("NVARCHAR", "Basic"), + EntityFieldDataType.INTEGER: ("INT", "Basic"), + EntityFieldDataType.DATETIME: ("DATETIME2", "Basic"), + EntityFieldDataType.DATETIME_WITH_TZ: ("DATETIMEOFFSET", "Basic"), + EntityFieldDataType.DECIMAL: ("DECIMAL", "Basic"), + EntityFieldDataType.FLOAT: ("FLOAT", "Basic"), + EntityFieldDataType.DOUBLE: ("REAL", "Basic"), + EntityFieldDataType.DATE: ("DATE", "Basic"), + EntityFieldDataType.BOOLEAN: ("BIT", "Basic"), + EntityFieldDataType.BIG_INTEGER: ("BIGINT", "Basic"), + EntityFieldDataType.MULTILINE_TEXT: ("MULTILINE", "Basic"), + EntityFieldDataType.FILE: ("UNIQUEIDENTIFIER", "File"), + EntityFieldDataType.CHOICE_SET_SINGLE: ("INT", "ChoiceSetSingle"), + EntityFieldDataType.CHOICE_SET_MULTIPLE: ("NVARCHAR", "ChoiceSetMultiple"), + EntityFieldDataType.AUTO_NUMBER: ("DECIMAL", "AutoNumber"), + EntityFieldDataType.RELATIONSHIP: ("UNIQUEIDENTIFIER", "Relationship"), +} + +# Default and fixed sqlType constraint values applied when the caller does +# not supply them. The backend requires these on field creation — without +# them the field is stored in an incomplete state and the UI later fails +# with "Field type cannot be changed" when editing advanced options. +ENTITY_FIELD_CONSTRAINT_DEFAULTS: Dict[str, int] = { + "STRING_LENGTH_LIMIT": 200, + "MULTILINE_TEXT_LENGTH_LIMIT": 200, + "DECIMAL_LENGTH_LIMIT": 1000, + "DECIMAL_PRECISION": 2, + "BOOLEAN_LENGTH_LIMIT": 100, + "DATE_LENGTH_LIMIT": 1000, + "UNIQUEIDENTIFIER_LENGTH_LIMIT": 300, + "CHOICE_SET_MULTIPLE_LENGTH_LIMIT": 4000, + "NUMERIC_MAX_VALUE": 1_000_000_000_000, + "NUMERIC_MIN_VALUE": -1_000_000_000_000, +} + +# Per-field-type spec describing which user-supplied constraints are valid +# and their inclusive ranges. Field types absent from this map (BOOLEAN, +# DATE, DATETIME, DATETIME_WITH_TZ, FILE, RELATIONSHIP, UUID, CHOICE_SET_*, +# AUTO_NUMBER) accept no user-supplied constraints — passing one raises +# ``ValueError`` so the caller gets a clear local error before any HTTP call. +_MAX_SAFE_INTEGER = 9_007_199_254_740_991 + +ENTITY_FIELD_CONSTRAINT_SPEC: Dict[ + EntityFieldDataType, Dict[str, "tuple[float, float]"] +] = { + EntityFieldDataType.STRING: { + "length_limit": (1, 4000), + }, + EntityFieldDataType.MULTILINE_TEXT: { + "length_limit": (1, 10000), + }, + EntityFieldDataType.INTEGER: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + }, + EntityFieldDataType.BIG_INTEGER: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + }, + EntityFieldDataType.DECIMAL: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, + EntityFieldDataType.FLOAT: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, + EntityFieldDataType.DOUBLE: { + "max_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "min_value": (-_MAX_SAFE_INTEGER, _MAX_SAFE_INTEGER), + "decimal_precision": (0, 10), + }, +} + +RESERVED_FIELD_NAMES = frozenset( + ["Id", "CreatedBy", "CreateTime", "UpdatedBy", "UpdateTime"] +) +"""Field names reserved by the backend — using one as a user field name is rejected.""" + + +class EntityCreateFieldOptions(BaseModel): + """User-facing field definition for creating or updating entity schemas.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + field_name: str = Field(alias="fieldName") + type: Optional[EntityFieldDataType] = Field( + default=EntityFieldDataType.STRING, alias="type" + ) + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + is_required: Optional[bool] = Field(default=None, alias="isRequired") + is_unique: Optional[bool] = Field(default=None, alias="isUnique") + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + is_encrypted: Optional[bool] = Field(default=None, alias="isEncrypted") + default_value: Optional[str] = Field(default=None, alias="defaultValue") + length_limit: Optional[int] = Field(default=None, alias="lengthLimit") + max_value: Optional[float] = Field(default=None, alias="maxValue") + min_value: Optional[float] = Field(default=None, alias="minValue") + decimal_precision: Optional[int] = Field(default=None, alias="decimalPrecision") + choice_set_id: Optional[str] = Field(default=None, alias="choiceSetId") + reference_entity_name: Optional[str] = Field( + default=None, alias="referenceEntityName" + ) + reference_field_name: Optional[str] = Field( + default=None, alias="referenceFieldName" + ) + + +class EntityCreateOptions(BaseModel): + """Options for creating a new Data Fabric entity.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + folder_key: Optional[str] = Field(default=None, alias="folderKey") + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + is_analytics_enabled: Optional[bool] = Field( + default=None, alias="isAnalyticsEnabled" + ) + external_fields: Optional[List[Dict[str, Any]]] = Field( + default=None, alias="externalFields" + ) + + +class EntityMetadataUpdateOptions(BaseModel): + """Options for updating an entity's metadata via PATCH /metadata.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + display_name: Optional[str] = Field(default=None, alias="displayName") + description: Optional[str] = None + is_rbac_enabled: Optional[bool] = Field(default=None, alias="isRbacEnabled") + + +class EntityImportRecordsResponse(BaseModel): + """Response from a bulk import operation.""" + + model_config = ConfigDict( + validate_by_name=True, validate_by_alias=True, extra="allow" + ) + + total_records: int = Field(default=0, alias="totalRecords") + inserted_records: int = Field(default=0, alias="insertedRecords") + error_file_link: Optional[str] = Field(default=None, alias="errorFileLink") class EntityRouting(BaseModel): @@ -412,3 +785,4 @@ class EntitySetResolution(BaseModel): Entity.model_rebuild() +EntityQueryFilterGroup.model_rebuild() diff --git a/packages/uipath-platform/tests/services/test_entities_service.py b/packages/uipath-platform/tests/services/test_entities_service.py index 29ce6fb79..2978acbcf 100644 --- a/packages/uipath-platform/tests/services/test_entities_service.py +++ b/packages/uipath-platform/tests/services/test_entities_service.py @@ -1096,3 +1096,1102 @@ def test_get_choiceset_values_empty( values = service.get_choiceset_values(choiceset_id) assert values == [] + + +class TestEntitiesServiceNewMethods: + """Single-record, structured-query, attachment, schema and bulk-import tests.""" + + def test_insert_record_fires_post_with_expansion_level( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityRecord + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert?expansionLevel=2", + status_code=200, + json={"Id": "rec-1", "name": "alice"}, + ) + + record = service.insert_record( + entity_key=str(entity_key), + data={"name": "alice"}, + expansion_level=2, + ) + + assert isinstance(record, EntityRecord) + assert record.id == "rec-1" + + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "POST" + assert json.loads(sent.content) == {"name": "alice"} + + async def test_insert_record_async( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert", + status_code=200, + json={"Id": "rec-1"}, + ) + + record = await service.insert_record_async( + entity_key=str(entity_key), data={"name": "bob"} + ) + assert record.id == "rec-1" + + def test_get_record( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "12345" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}?expansionLevel=1", + status_code=200, + json={"Id": record_id, "name": "found"}, + ) + + record = service.get_record( + entity_key=str(entity_key), record_id=record_id, expansion_level=1 + ) + + assert record.id == record_id + + def test_update_record_accepts_dict( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-9" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update/{record_id}", + status_code=200, + json={"Id": record_id, "name": "updated"}, + ) + + record = service.update_record( + entity_key=str(entity_key), + record_id=record_id, + data={"name": "updated"}, + ) + + assert record.id == record_id + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == {"name": "updated"} + + def test_delete_record_uses_http_delete( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-9" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}", + method="DELETE", + status_code=200, + ) + + service.delete_record(entity_key=str(entity_key), record_id=record_id) + + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "DELETE" + + def test_query_records_v1_with_filter_and_pagination( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import ( + EntityQueryFilter, + EntityQueryFilterGroup, + EntityQuerySortOption, + LogicalOperator, + QueryFilterOperator, + ) + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={ + "value": [{"Id": "1", "name": "alice"}, {"Id": "2", "name": "bob"}], + "totalRecordCount": 5, + }, + ) + + result = service.query_records( + entity_key=str(entity_key), + filter_group=EntityQueryFilterGroup( + logical_operator=LogicalOperator.And, + query_filters=[ + EntityQueryFilter( + field_name="status", + operator=QueryFilterOperator.Equals, + value="active", + ) + ], + ), + sort_options=[EntityQuerySortOption(field_name="name", is_descending=True)], + selected_fields=["Id", "name"], + start=0, + limit=2, + expansion_level=1, + ) + + assert result.total_count == 5 + assert len(result.items) == 2 + assert result.has_next_page is True + # Backend doesn't return next_cursor on this endpoint — caller paginates + # by passing the next ``start`` themselves. + assert result.next_cursor is None + + sent = httpx_mock.get_request() + assert sent is not None + assert "/query" in str(sent.url) and "/v2/" not in str(sent.url) + # expansionLevel is a URL query param, not body + assert sent.url.params.get("expansionLevel") == "1" + body = json.loads(sent.content) + assert body["filterGroup"]["logicalOperator"] == 0 # And + assert body["filterGroup"]["queryFilters"][0]["fieldName"] == "status" + assert body["sortOptions"][0]["fieldName"] == "name" + assert body["selectedFields"] == ["Id", "name"] + # start/limit go in BODY, not as $top/$skip query params + assert body["start"] == 0 + assert body["limit"] == 2 + + def test_query_records_aggregate_response_handles_id_less_rows( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + """Aggregate / GROUP BY rows lack ``Id`` — must not raise.""" + from uipath.platform.entities import ( + EntityAggregate, + EntityAggregateFunction, + ) + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={ + "value": [ + {"status": "active", "total": 12}, + {"status": "inactive", "total": 7}, + ], + "totalRecordCount": 2, + }, + ) + + result = service.query_records( + entity_key=str(entity_key), + selected_fields=["status"], + group_by=["status"], + aggregates=[ + EntityAggregate( + function=EntityAggregateFunction.Count, + field="Id", + alias="total", + ) + ], + ) + + assert result.total_count == 2 + assert len(result.items) == 2 + # Aggregate rows are exposed as EntityRecord with extra fields, no Id. + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body["aggregates"][0]["function"] == "COUNT" + assert body["aggregates"][0]["alias"] == "total" + assert body["groupBy"] == ["status"] + + def test_query_records_v2_when_binnings_provided( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityAggregateFunction, EntityBinning + + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/v2/EntityService/entity/{entity_key}/query.*" + ), + status_code=200, + json={"value": [], "totalCount": 0}, + ) + + service.query_records( + entity_key=str(entity_key), + binnings=[ + EntityBinning( + field_name="status", + aggregate_function=EntityAggregateFunction.Count, + alias="total", + ) + ], + ) + + sent = httpx_mock.get_request() + assert sent is not None + assert "/v2/EntityService/" in str(sent.url) + + def test_upload_attachment_sends_multipart( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}?expansionLevel=1", + method="POST", + status_code=200, + json={"Id": record_id, "doc": "uploaded"}, + ) + + result = service.upload_attachment( + entity_id=entity_id, + record_id=record_id, + field_name=field_name, + file=b"hello world", + expansion_level=1, + ) + + assert result.get("doc") == "uploaded" + + sent = httpx_mock.get_request() + assert sent is not None + assert b"hello world" in sent.content + + def test_download_attachment_returns_bytes( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}", + method="GET", + status_code=200, + content=b"file-content", + ) + + content = service.download_attachment( + entity_id=entity_id, record_id=record_id, field_name=field_name + ) + assert content == b"file-content" + + def test_delete_attachment( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-1" + record_id = "rec-1" + field_name = "doc" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/{entity_id}/{record_id}/{field_name}", + method="DELETE", + status_code=200, + json={}, + ) + + result = service.delete_attachment( + entity_id=entity_id, record_id=record_id, field_name=field_name + ) + assert result == {} + + def test_create_entity_returns_id( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityCreateOptions, + EntityFieldDataType, + ) + + new_entity_id = str(uuid.uuid4()) + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json=new_entity_id, + ) + + created_id = service.create_entity( + name="productCatalog", + fields=[ + EntityCreateFieldOptions( + field_name="productName", + type=EntityFieldDataType.STRING, + is_required=True, + length_limit=200, + ), + ], + options=EntityCreateOptions( + display_name="Product Catalog", + description="Catalog of products", + is_rbac_enabled=True, + ), + ) + + assert created_id == new_entity_id + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body["displayName"] == "Product Catalog" + assert body["entityDefinition"]["name"] == "productCatalog" + assert body["entityDefinition"]["fields"][0]["name"] == "productName" + assert body["entityDefinition"]["isRbacEnabled"] is True + + def test_delete_entity( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-doomed" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/{entity_id}", + method="DELETE", + status_code=200, + ) + + service.delete_entity(entity_id=entity_id) + sent = httpx_mock.get_request() + assert sent is not None + assert sent.method == "DELETE" + + def test_update_entity_metadata( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + from uipath.platform.entities import EntityMetadataUpdateOptions + + entity_id = "ent-meta" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/{entity_id}/metadata", + method="PATCH", + status_code=200, + json={}, + ) + + service.update_entity_metadata( + entity_id=entity_id, + metadata=EntityMetadataUpdateOptions( + display_name="New Name", is_rbac_enabled=False + ), + ) + + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + assert body == {"displayName": "New Name", "isRbacEnabled": False} + + def test_import_records( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_id = "ent-imp" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_id}/bulk-upload", + method="POST", + status_code=200, + json={ + "totalRecords": 10, + "insertedRecords": 9, + "errorFileLink": "https://example.com/errors.csv", + }, + ) + + result = service.import_records(entity_id=entity_id, file=b"a,b,c\n1,2,3\n") + assert result.total_records == 10 + assert result.inserted_records == 9 + assert result.error_file_link == "https://example.com/errors.csv" + + def test_list_records_returns_paginated_metadata( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read.*" + ), + status_code=200, + json={ + "totalCount": 7, + "value": [{"Id": "1"}, {"Id": "2"}, {"Id": "3"}], + }, + ) + + records = service.list_records( + entity_key=str(entity_key), + start=0, + limit=3, + expansion_level=2, + filter="status eq 'active'", + orderby="name asc", + select=["Id", "name"], + expand=["Company"], + ) + + # New pagination metadata: backend totalCount surfaced verbatim. + assert records.total_count == 7 + assert records.has_next_page is True + # Backend does not currently emit next_cursor; caller paginates with start. + assert records.next_cursor is None + + # Backward-compat: behaves as a list. + assert isinstance(records, list) + assert len(records) == 3 + assert records[0].id == "1" + + sent = httpx_mock.get_request() + assert sent is not None + params = sent.url.params + assert params.get("expansionLevel") == "2" + assert params.get("$filter") == "status eq 'active'" + assert params.get("$orderby") == "name asc" + assert params.get("$select") == "Id,name" + assert params.get("$expand") == "Company" + + def test_insert_records_passes_expansion_level_and_fail_on_first( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/insert-batch.*" + ), + status_code=200, + json={"successRecords": [{"Id": "1"}], "failureRecords": []}, + ) + + service.insert_records( + entity_key=str(entity_key), + records=[{"name": "alice"}], + expansion_level=1, + fail_on_first=True, + ) + + sent = httpx_mock.get_request() + assert sent is not None + params = sent.url.params + assert params.get("expansionLevel") == "1" + assert params.get("failOnFirst") == "true" + # Records are normalized to dicts before being sent. + assert json.loads(sent.content) == [{"name": "alice"}] + + def test_update_records_recovers_failure_records_from_4xx( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + """A 400 response that lists per-record failures should parse into the response. + + The caller receives an ``EntityRecordsBatchResponse`` with the failed + records populated rather than an exception, so unknown record ids on + update can be handled the same way as any other batch failure. + """ + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=400, + json={ + "successRecords": [], + "failureRecords": [ + {"error": "Record not found", "record": {"Id": "missing"}} + ], + }, + ) + + result = service.update_records( + entity_key=str(entity_key), + records=[{"Id": "missing", "name": "x"}], + ) + + assert len(result.failure_records) == 1 + assert result.failure_records[0].error == "Record not found" + + def test_delete_records_recovers_failure_records_from_4xx( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + version: str, + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete-batch", + method="POST", + status_code=400, + json={ + "successRecords": [], + "failureRecords": [{"error": "not found"}], + }, + ) + + result = service.delete_records( + entity_key=str(entity_key), record_ids=["missing"] + ) + + assert result.failure_records[0].error == "not found" + + def test_record_to_dict_accepts_dict_pydantic_and_object(self) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + # dict + assert EntitiesService._record_to_dict({"a": 1}) == {"a": 1} + # Pydantic model — uses model_dump + result = EntitiesService._record_to_dict( + EntityCreateFieldOptions(field_name="x") + ) + assert result["fieldName"] == "x" + # Object with __dict__ + from dataclasses import dataclass + + @dataclass + class Rec: + name: str + + assert EntitiesService._record_to_dict(Rec(name="bob")) == {"name": "bob"} + + +class TestEntitiesServiceCreateEntitySqlTypeMapping: + """Verify ``create_entity`` produces the SQL types and constraint defaults the backend expects.""" + + def _captured_field( + self, + httpx_mock: HTTPXMock, + service: EntitiesService, + base_url: str, + org: str, + tenant: str, + field_options, + ): + from uipath.platform.entities import EntityCreateOptions + + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json="00000000-0000-0000-0000-000000000001", + ) + service.create_entity( + name="myEntity", + fields=[field_options], + options=EntityCreateOptions(display_name="My Entity"), + ) + sent = httpx_mock.get_request() + assert sent is not None + body = json.loads(sent.content) + return body["entityDefinition"]["fields"][0] + + def test_string_field_maps_to_nvarchar_with_default_length( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="productName", type=EntityFieldDataType.STRING + ), + ) + assert f["sqlType"]["name"] == "NVARCHAR" + assert f["sqlType"]["lengthLimit"] == 200 # default + assert f["fieldDisplayType"] == "Basic" + + def test_decimal_field_includes_precision_and_value_bounds( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="price", + type=EntityFieldDataType.DECIMAL, + decimal_precision=4, + ), + ) + assert f["sqlType"]["name"] == "DECIMAL" + assert f["sqlType"]["decimalPrecision"] == 4 + assert f["sqlType"]["lengthLimit"] == 1000 + assert f["sqlType"]["maxValue"] == 1_000_000_000_000 + assert f["sqlType"]["minValue"] == -1_000_000_000_000 + + def test_boolean_field_maps_to_bit( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="isActive", type=EntityFieldDataType.BOOLEAN + ), + ) + assert f["sqlType"]["name"] == "BIT" + assert f["sqlType"]["lengthLimit"] == 100 + + def test_file_field_maps_to_uniqueidentifier_with_file_display_type( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + f = self._captured_field( + httpx_mock, + service, + base_url, + org, + tenant, + EntityCreateFieldOptions( + field_name="document", type=EntityFieldDataType.FILE + ), + ) + assert f["sqlType"]["name"] == "UNIQUEIDENTIFIER" + assert f["fieldDisplayType"] == "File" + assert f["sqlType"]["lengthLimit"] == 300 + + +class TestEntitiesServiceValidation: + """Client-side validation rejects bad entity / field definitions before any HTTP call.""" + + def test_create_entity_rejects_invalid_entity_name(self, service) -> None: + + with pytest.raises(ValueError, match="Invalid entity name"): + service.create_entity(name="1bad", fields=[]) + + def test_create_entity_rejects_invalid_field_name(self, service) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + with pytest.raises(ValueError, match="Invalid field name"): + service.create_entity( + name="goodEntity", + fields=[EntityCreateFieldOptions(field_name="9bad")], + ) + + def test_create_entity_rejects_reserved_field_name(self, service) -> None: + from uipath.platform.entities import EntityCreateFieldOptions + + with pytest.raises(ValueError, match="reserved"): + service.create_entity( + name="goodEntity", + fields=[EntityCreateFieldOptions(field_name="Id")], + ) + + def test_create_entity_rejects_unsupported_constraint_for_type( + self, service + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="does not accept"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.STRING, + decimal_precision=2, # not allowed on STRING + ) + ], + ) + + def test_create_entity_rejects_out_of_range_constraint(self, service) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="out of range"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.STRING, + length_limit=99999, # > 4000 + ) + ], + ) + + def test_create_entity_rejects_min_ge_max(self, service) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + with pytest.raises(ValueError, match="strictly less than"): + service.create_entity( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", + type=EntityFieldDataType.INTEGER, + min_value=100, + max_value=10, + ) + ], + ) + + +class TestEntitiesServiceAsyncAndEdgeCases: + async def test_get_record_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-1" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/read/{record_id}", + status_code=200, + json={"Id": record_id, "name": "found"}, + ) + record = await service.get_record_async( + entity_key=str(entity_key), record_id=record_id + ) + assert record.id == record_id + + async def test_query_records_async_v1( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=re.compile( + rf"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/query" + ), + status_code=200, + json={"value": [{"Id": "1"}], "totalRecordCount": 1}, + ) + result = await service.query_records_async(entity_key=str(entity_key)) + assert result.total_count == 1 + + async def test_delete_record_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + record_id = "rec-1" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/delete/{record_id}", + method="DELETE", + status_code=200, + ) + await service.delete_record_async( + entity_key=str(entity_key), record_id=record_id + ) + + async def test_create_entity_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + from uipath.platform.entities import ( + EntityCreateFieldOptions, + EntityFieldDataType, + ) + + new_id = "00000000-0000-0000-0000-000000000123" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity", + method="POST", + status_code=200, + json=new_id, + ) + result = await service.create_entity_async( + name="goodEntity", + fields=[ + EntityCreateFieldOptions( + field_name="myField", type=EntityFieldDataType.STRING + ) + ], + ) + assert result == new_id + + async def test_delete_entity_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1", + method="DELETE", + status_code=200, + ) + await service.delete_entity_async(entity_id="ent-1") + + async def test_update_entity_metadata_async_with_dict( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1/metadata", + method="PATCH", + status_code=200, + json={}, + ) + # Accepts a plain dict too + await service.update_entity_metadata_async( + entity_id="ent-1", metadata={"displayName": "X", "description": "Y"} + ) + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == {"displayName": "X", "description": "Y"} + + def test_update_entity_metadata_normalizes_snake_case_dict_keys( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """Snake_case dict keys must be sent to the backend as camelCase.""" + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Entity/ent-1/metadata", + method="PATCH", + status_code=200, + json={}, + ) + service.update_entity_metadata( + entity_id="ent-1", + metadata={ + "display_name": "New Name", + "description": "Updated", + "is_rbac_enabled": True, + }, + ) + sent = httpx_mock.get_request() + assert sent is not None + assert json.loads(sent.content) == { + "displayName": "New Name", + "description": "Updated", + "isRbacEnabled": True, + } + + async def test_upload_attachment_async_via_file_path( + self, httpx_mock, service, base_url, org, tenant, version, tmp_path + ) -> None: + path = tmp_path / "data.bin" + path.write_bytes(b"file-on-disk") + + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/ent/rec/doc", + method="POST", + status_code=200, + json={"Id": "rec", "doc": "ok"}, + ) + result = await service.upload_attachment_async( + entity_id="ent", + record_id="rec", + field_name="doc", + file_path=str(path), + ) + assert result["doc"] == "ok" + + sent = httpx_mock.get_request() + assert sent is not None + assert b"file-on-disk" in sent.content + + async def test_download_and_delete_attachment_async( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + url = f"{base_url}{org}{tenant}/datafabric_/api/Attachment/entity/e/r/f" + httpx_mock.add_response( + url=url, method="GET", status_code=200, content=b"bytes" + ) + httpx_mock.add_response(url=url, method="DELETE", status_code=200, json={}) + + content = await service.download_attachment_async( + entity_id="e", record_id="r", field_name="f" + ) + assert content == b"bytes" + assert ( + await service.delete_attachment_async( + entity_id="e", record_id="r", field_name="f" + ) + == {} + ) + + def test_open_file_rejects_both_file_and_path(self) -> None: + with pytest.raises(ValueError, match="exactly one of"): + EntitiesService._open_file(file=b"x", file_path="some/path") + + def test_open_file_rejects_neither_file_nor_path(self) -> None: + with pytest.raises(ValueError, match="exactly one of"): + EntitiesService._open_file(file=None, file_path=None) + + def test_4xx_recovery_only_400_with_strict_shape( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """5xx and 4xx other than 400 must propagate; 400 with valid shape recovers.""" + entity_key = uuid.uuid4() + # 500 with the shape — must propagate, not be silently treated as success. + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=500, + json={"successRecords": [], "failureRecords": []}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) + + def test_4xx_recovery_404_propagates( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + entity_key = uuid.uuid4() + # 404 with valid shape — still propagates because not a 400. + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=404, + json={"successRecords": [], "failureRecords": []}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) + + def test_4xx_recovery_400_unrelated_body_propagates( + self, httpx_mock, service, base_url, org, tenant, version + ) -> None: + """A 400 with an error body that lacks ``successRecords``/``failureRecords`` + must surface as an exception (so generic validation errors aren't masked).""" + entity_key = uuid.uuid4() + httpx_mock.add_response( + url=f"{base_url}{org}{tenant}/datafabric_/api/EntityService/entity/{entity_key}/update-batch", + method="POST", + status_code=400, + json={"error": "Validation failed", "code": "InvalidArg"}, + ) + from uipath.platform.errors._enriched_exception import EnrichedException + + with pytest.raises(EnrichedException): + service.update_records( + entity_key=str(entity_key), records=[{"Id": "x", "name": "y"}] + ) diff --git a/packages/uipath-platform/uv.lock b/packages/uipath-platform/uv.lock index dbea2b79a..f8fa7aced 100644 --- a/packages/uipath-platform/uv.lock +++ b/packages/uipath-platform/uv.lock @@ -1088,7 +1088,7 @@ dev = [ [[package]] name = "uipath-platform" -version = "0.1.46" +version = "0.1.47" source = { editable = "." } dependencies = [ { name = "httpx" }, diff --git a/packages/uipath/uv.lock b/packages/uipath/uv.lock index 4339a51d2..e12347835 100644 --- a/packages/uipath/uv.lock +++ b/packages/uipath/uv.lock @@ -2682,7 +2682,7 @@ dev = [ [[package]] name = "uipath-platform" -version = "0.1.46" +version = "0.1.47" source = { editable = "../uipath-platform" } dependencies = [ { name = "httpx" },