diff --git a/buf.gen.datasets.yaml b/buf.gen.datasets.yaml index 8e4fd50..e557bae 100644 --- a/buf.gen.datasets.yaml +++ b/buf.gen.datasets.yaml @@ -1,3 +1,4 @@ +# please keep this file in sync with buf.gen.workflows.yaml, especially the plugins versions version: v2 managed: enabled: true @@ -14,3 +15,4 @@ inputs: - module: buf.build/tilebox/api paths: - "datasets" + - "tilebox" diff --git a/buf.gen.workflows.yaml b/buf.gen.workflows.yaml index 97822d9..e0aebd4 100644 --- a/buf.gen.workflows.yaml +++ b/buf.gen.workflows.yaml @@ -1,3 +1,4 @@ +# please keep this file in sync with buf.gen.datasets.yaml, especially the plugins versions version: v2 managed: enabled: true diff --git a/pyproject.toml b/pyproject.toml index 695ee31..ed5bb8d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ exclude = [ # it's auto generated, don't lint it "*/datasets/v1/*", "*/workflows/v1/*", + "*/tilebox/v1/*", ] [tool.ruff.lint] @@ -118,6 +119,7 @@ ignore = [ # it's auto generated "**/datasets/v1", "**/workflows/v1", + "**/tilebox/v1", ] # pyright needs to have all the dependencies installed to be able to type check diff --git a/tilebox-datasets/pyproject.toml b/tilebox-datasets/pyproject.toml index efe35c9..f12a90c 100644 --- a/tilebox-datasets/pyproject.toml +++ b/tilebox-datasets/pyproject.toml @@ -54,7 +54,7 @@ packages = ["tilebox"] packages = ["tilebox"] [tool.coverage.run] -omit = ["tilebox/*/datasets/v1/*"] +omit = ["tilebox/*/datasets/v1/*", "tilebox/*/tilebox/v1/*"] [tool.pytest.ini_options] minversion = "6.0" diff --git a/tilebox-datasets/tests/data/collection.py b/tilebox-datasets/tests/data/collection.py index aa6af96..5cf38fc 100644 --- a/tilebox-datasets/tests/data/collection.py +++ b/tilebox-datasets/tests/data/collection.py @@ -3,9 +3,9 @@ from hypothesis.strategies import DrawFn, composite, integers, just, none, text, uuids -from tests.data.time_interval import time_intervals +from tests.query.time_interval import time_intervals from tilebox.datasets.data.collection import Collection, CollectionInfo -from tilebox.datasets.data.time_interval import _EMPTY_TIME_INTERVAL +from tilebox.datasets.query.time_interval import _EMPTY_TIME_INTERVAL @composite diff --git a/tilebox-datasets/tests/data/data_access.py b/tilebox-datasets/tests/data/data_access.py index 0eeffe9..872391e 100644 --- a/tilebox-datasets/tests/data/data_access.py +++ b/tilebox-datasets/tests/data/data_access.py @@ -3,9 +3,9 @@ from hypothesis.strategies import DrawFn, booleans, composite, none, one_of, sampled_from from shapely import Geometry -from tests.data.datapoint import datapoint_intervals -from tests.data.time_interval import time_intervals from tests.data.well_known_types import shapely_polygons +from tests.query.datapoint import id_intervals +from tests.query.time_interval import time_intervals from tilebox.datasets.data.data_access import ( QueryFilters, SpatialCoordinateSystem, @@ -55,6 +55,6 @@ def spatial_filters(draw: DrawFn) -> SpatialFilter: @composite def query_filters(draw: DrawFn) -> QueryFilters: """A hypothesis strategy for generating random query filters""" - temporal_extent = draw(one_of(time_intervals(), datapoint_intervals())) + temporal_extent = draw(one_of(time_intervals(), id_intervals())) spatial_extent = draw(spatial_filters() | none()) return QueryFilters(temporal_extent, spatial_extent) diff --git a/tilebox-datasets/tests/data/datapoint.py b/tilebox-datasets/tests/data/datapoint.py index 21e4573..d811e81 100644 --- a/tilebox-datasets/tests/data/datapoint.py +++ b/tilebox-datasets/tests/data/datapoint.py @@ -18,8 +18,6 @@ ) from tests.data.datasets import example_dataset_type_url -from tests.data.pagination import paginations -from tests.data.time_interval import i64_datetimes from tests.data.well_known_types import ( datetime_messages, duration_messages, @@ -31,44 +29,11 @@ vec3_messages, ) from tests.example_dataset.example_dataset_pb2 import ExampleDatapoint -from tilebox.datasets.data.datapoint import ( - AnyMessage, - DatapointInterval, - DatapointIntervalLike, - IngestResponse, - QueryResultPage, - RepeatedAny, -) -from tilebox.datasets.data.time_interval import ( - datetime_to_timestamp, -) +from tests.query.pagination import paginations +from tests.query.time_interval import i64_datetimes +from tilebox.datasets.data.datapoint import AnyMessage, IngestResponse, QueryResultPage, RepeatedAny from tilebox.datasets.datasets.v1 import core_pb2 - - -@composite -def datapoint_intervals(draw: DrawFn) -> DatapointInterval: - """A hypothesis strategy for generating random datapoint intervals""" - start = draw(uuids(version=4)) - end = draw(uuids(version=4)) - start, end = min(start, end), max(start, end) # make sure start is before end - - start_exclusive = draw(booleans()) - end_inclusive = draw(booleans()) - - return DatapointInterval(start, end, start_exclusive, end_inclusive) - - -@composite -def datapoint_intervals_like(draw: DrawFn) -> DatapointIntervalLike: - """A hypothesis strategy for generating random datapoint intervals""" - interval = draw(datapoint_intervals()) - return draw( - one_of( - just(interval), - just((str(interval.start_id), str(interval.end_id))), - just((interval.start_id, interval.end_id)), - ) - ) +from tilebox.datasets.query.time_interval import datetime_to_timestamp @composite diff --git a/tilebox-datasets/tests/data/test_collection.py b/tilebox-datasets/tests/data/test_collection.py index 820545a..8651c4f 100644 --- a/tilebox-datasets/tests/data/test_collection.py +++ b/tilebox-datasets/tests/data/test_collection.py @@ -2,7 +2,7 @@ from tests.data.collection import collection_infos, collections from tilebox.datasets.data.collection import Collection, CollectionInfo -from tilebox.datasets.data.time_interval import _EMPTY_TIME_INTERVAL +from tilebox.datasets.query.time_interval import _EMPTY_TIME_INTERVAL @given(collections()) diff --git a/tilebox-datasets/tests/data/test_datapoint.py b/tilebox-datasets/tests/data/test_datapoint.py index 99d3d85..91f277d 100644 --- a/tilebox-datasets/tests/data/test_datapoint.py +++ b/tilebox-datasets/tests/data/test_datapoint.py @@ -1,39 +1,7 @@ from hypothesis import given -from tests.data.datapoint import ( - anys, - datapoint_intervals, - datapoint_intervals_like, - ingest_datapoints_responses, - query_result_pages, - repeated_anys, -) -from tilebox.datasets.data.datapoint import ( - AnyMessage, - DatapointInterval, - DatapointIntervalLike, - IngestResponse, - QueryResultPage, - RepeatedAny, -) - - -@given(datapoint_intervals()) -def test_datapoint_intervals_to_message_and_back(interval: DatapointInterval) -> None: - assert DatapointInterval.from_message(interval.to_message()) == interval - - -@given(datapoint_intervals_like()) -def test_parse_datapoint_interval_from_tuple(interval: DatapointIntervalLike) -> None: - parsed = DatapointInterval.parse(interval) - - if isinstance(interval, DatapointInterval): - assert parsed == interval, f"Failed parsing interval from {interval}" - assert parsed.start_exclusive == interval.start_exclusive - assert parsed.end_inclusive == interval.end_inclusive - else: - assert not parsed.start_exclusive - assert parsed.end_inclusive +from tests.data.datapoint import anys, ingest_datapoints_responses, query_result_pages, repeated_anys +from tilebox.datasets.data.datapoint import AnyMessage, IngestResponse, QueryResultPage, RepeatedAny @given(anys()) diff --git a/tilebox-datasets/tests/data/well_known_types.py b/tilebox-datasets/tests/data/well_known_types.py index f708001..efd063a 100644 --- a/tilebox-datasets/tests/data/well_known_types.py +++ b/tilebox-datasets/tests/data/well_known_types.py @@ -10,9 +10,9 @@ ) from shapely import MultiPolygon, Polygon, box, to_wkb -from tests.data.time_interval import i64_datetimes -from tilebox.datasets.data.time_interval import datetime_to_timestamp +from tests.query.time_interval import i64_datetimes from tilebox.datasets.datasets.v1 import well_known_types_pb2 +from tilebox.datasets.query.time_interval import datetime_to_timestamp @composite diff --git a/tilebox-datasets/tests/protobuf_conversion/test_protobuf_xarray.py b/tilebox-datasets/tests/protobuf_conversion/test_protobuf_xarray.py index 47fce0d..8b6308a 100644 --- a/tilebox-datasets/tests/protobuf_conversion/test_protobuf_xarray.py +++ b/tilebox-datasets/tests/protobuf_conversion/test_protobuf_xarray.py @@ -10,8 +10,8 @@ from tests.data.datapoint import example_datapoints from tests.example_dataset.example_dataset_pb2 import ExampleDatapoint -from tilebox.datasets.data.time_interval import timestamp_to_datetime, us_to_datetime from tilebox.datasets.protobuf_conversion.protobuf_xarray import MessageToXarrayConverter +from tilebox.datasets.query.time_interval import timestamp_to_datetime, us_to_datetime @given(example_datapoints(generated_fields=True, missing_fields=False)) diff --git a/tilebox-datasets/tests/query/datapoint.py b/tilebox-datasets/tests/query/datapoint.py new file mode 100644 index 0000000..dbe0c33 --- /dev/null +++ b/tilebox-datasets/tests/query/datapoint.py @@ -0,0 +1,29 @@ +from hypothesis.strategies import DrawFn, booleans, composite, just, one_of, uuids + +from tilebox.datasets.query.id_interval import IDInterval, IDIntervalLike + + +@composite +def id_intervals(draw: DrawFn) -> IDInterval: + """A hypothesis strategy for generating random id intervals""" + start = draw(uuids(version=4)) + end = draw(uuids(version=4)) + start, end = min(start, end), max(start, end) # make sure start is before end + + start_exclusive = draw(booleans()) + end_inclusive = draw(booleans()) + + return IDInterval(start, end, start_exclusive, end_inclusive) + + +@composite +def id_intervals_like(draw: DrawFn) -> IDIntervalLike: + """A hypothesis strategy for generating random id intervals""" + interval = draw(id_intervals()) + return draw( + one_of( + just(interval), + just((str(interval.start_id), str(interval.end_id))), + just((interval.start_id, interval.end_id)), + ) + ) diff --git a/tilebox-datasets/tests/data/pagination.py b/tilebox-datasets/tests/query/pagination.py similarity index 69% rename from tilebox-datasets/tests/data/pagination.py rename to tilebox-datasets/tests/query/pagination.py index 50d42d9..c6b4e12 100644 --- a/tilebox-datasets/tests/data/pagination.py +++ b/tilebox-datasets/tests/query/pagination.py @@ -1,12 +1,6 @@ -from hypothesis.strategies import ( - DrawFn, - booleans, - composite, - integers, - uuids, -) +from hypothesis.strategies import DrawFn, booleans, composite, integers, uuids -from tilebox.datasets.data.pagination import Pagination +from tilebox.datasets.query.pagination import Pagination @composite diff --git a/tilebox-datasets/tests/query/test_datapoint.py b/tilebox-datasets/tests/query/test_datapoint.py new file mode 100644 index 0000000..3bcf7dc --- /dev/null +++ b/tilebox-datasets/tests/query/test_datapoint.py @@ -0,0 +1,22 @@ +from hypothesis import given +from tests.query.datapoint import id_intervals, id_intervals_like + +from tilebox.datasets.query.id_interval import IDInterval, IDIntervalLike + + +@given(id_intervals()) +def test_id_intervals_to_message_and_back(interval: IDInterval) -> None: + assert IDInterval.from_message(interval.to_message()) == interval + + +@given(id_intervals_like()) +def test_parse_id_interval_from_tuple(interval: IDIntervalLike) -> None: + parsed = IDInterval.parse(interval) + + if isinstance(interval, IDInterval): + assert parsed == interval, f"Failed parsing interval from {interval}" + assert parsed.start_exclusive == interval.start_exclusive + assert parsed.end_inclusive == interval.end_inclusive + else: + assert not parsed.start_exclusive + assert parsed.end_inclusive diff --git a/tilebox-datasets/tests/data/test_pagination.py b/tilebox-datasets/tests/query/test_pagination.py similarity index 63% rename from tilebox-datasets/tests/data/test_pagination.py rename to tilebox-datasets/tests/query/test_pagination.py index 437c932..af500cb 100644 --- a/tilebox-datasets/tests/data/test_pagination.py +++ b/tilebox-datasets/tests/query/test_pagination.py @@ -1,7 +1,7 @@ from hypothesis import given +from tests.query.pagination import paginations -from tests.data.pagination import paginations -from tilebox.datasets.data.pagination import Pagination +from tilebox.datasets.query.pagination import Pagination @given(paginations()) diff --git a/tilebox-datasets/tests/data/test_time_interval.py b/tilebox-datasets/tests/query/test_time_interval.py similarity index 97% rename from tilebox-datasets/tests/data/test_time_interval.py rename to tilebox-datasets/tests/query/test_time_interval.py index 5933aea..6e1fd5f 100644 --- a/tilebox-datasets/tests/data/test_time_interval.py +++ b/tilebox-datasets/tests/query/test_time_interval.py @@ -3,9 +3,9 @@ from hypothesis import given from hypothesis.strategies import datetimes from pandas.core.tools.datetimes import DatetimeScalar +from tests.query.time_interval import datetime_scalars, time_intervals -from tests.data.time_interval import datetime_scalars, time_intervals -from tilebox.datasets.data.time_interval import ( +from tilebox.datasets.query.time_interval import ( _SMALLEST_POSSIBLE_TIMEDELTA, TimeInterval, _convert_to_datetime, diff --git a/tilebox-datasets/tests/data/time_interval.py b/tilebox-datasets/tests/query/time_interval.py similarity index 94% rename from tilebox-datasets/tests/data/time_interval.py rename to tilebox-datasets/tests/query/time_interval.py index 4b29d4a..ae76d60 100644 --- a/tilebox-datasets/tests/data/time_interval.py +++ b/tilebox-datasets/tests/query/time_interval.py @@ -5,20 +5,10 @@ from datetime import datetime, timezone import pandas as pd -from hypothesis.strategies import ( - DrawFn, - booleans, - composite, - datetimes, - just, - sampled_from, -) +from hypothesis.strategies import DrawFn, booleans, composite, datetimes, just, sampled_from from pandas.core.tools.datetimes import DatetimeScalar -from tilebox.datasets.data.time_interval import ( - TimeInterval, - datetime_to_us, -) +from tilebox.datasets.query.time_interval import TimeInterval, datetime_to_us # The minimum and maximum datetime that can be represented by pandas.Timestamp and are therefore supported # by the pd.to_datetime function which we are using for parsing datetime scalars. diff --git a/tilebox-datasets/tests/test_client.py b/tilebox-datasets/tests/test_client.py index 78fbe91..90eb3ea 100644 --- a/tilebox-datasets/tests/test_client.py +++ b/tilebox-datasets/tests/test_client.py @@ -11,7 +11,7 @@ from _tilebox.grpc.replay import open_recording_channel, open_replay_channel from tilebox.datasets import Client, DatasetClient from tilebox.datasets.data.datapoint import QueryResultPage -from tilebox.datasets.data.time_interval import us_to_datetime +from tilebox.datasets.query.time_interval import us_to_datetime def replay_client(replay_file: str, assert_request_matches: bool = True) -> Client: diff --git a/tilebox-datasets/tests/test_timeseries.py b/tilebox-datasets/tests/test_timeseries.py index 0a6f34c..bf4103c 100644 --- a/tilebox-datasets/tests/test_timeseries.py +++ b/tilebox-datasets/tests/test_timeseries.py @@ -19,13 +19,6 @@ from tilebox.datasets.data.collection import Collection, CollectionInfo from tilebox.datasets.data.datapoint import AnyMessage, QueryResultPage from tilebox.datasets.data.datasets import Dataset -from tilebox.datasets.data.time_interval import ( - _EMPTY_TIME_INTERVAL, - TimeInterval, - _convert_to_datetime, - timestamp_to_datetime, -) -from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_message from tilebox.datasets.datasets.v1.collections_pb2 import ( CreateCollectionRequest, DeleteCollectionRequest, @@ -36,7 +29,14 @@ from tilebox.datasets.datasets.v1.core_pb2 import Collection as CollectionMessage from tilebox.datasets.datasets.v1.core_pb2 import CollectionInfo as CollectionInfoMessage from tilebox.datasets.datasets.v1.core_pb2 import CollectionInfos as CollectionInfosMessage +from tilebox.datasets.query.time_interval import ( + _EMPTY_TIME_INTERVAL, + TimeInterval, + _convert_to_datetime, + timestamp_to_datetime, +) from tilebox.datasets.service import TileboxDatasetService +from tilebox.datasets.uuid import uuid_message_to_uuid, uuid_to_uuid_message def _mocked_dataset() -> tuple[DatasetClient, MagicMock]: diff --git a/tilebox-datasets/tilebox/datasets/aio/dataset.py b/tilebox-datasets/tilebox/datasets/aio/dataset.py index e61aa1b..ff5abfa 100644 --- a/tilebox-datasets/tilebox/datasets/aio/dataset.py +++ b/tilebox-datasets/tilebox/datasets/aio/dataset.py @@ -11,18 +11,11 @@ from _tilebox.grpc.aio.pagination import paginated_request from _tilebox.grpc.aio.producer_consumer import async_producer_consumer from _tilebox.grpc.error import ArgumentError, NotFoundError -from tilebox.datasets.aio.pagination import ( - with_progressbar, - with_time_progress_callback, - with_time_progressbar, -) +from tilebox.datasets.aio.pagination import with_progressbar, with_time_progress_callback, with_time_progressbar from tilebox.datasets.data.collection import CollectionInfo from tilebox.datasets.data.data_access import QueryFilters, SpatialFilter, SpatialFilterLike -from tilebox.datasets.data.datapoint import DatapointInterval, DatapointIntervalLike, QueryResultPage +from tilebox.datasets.data.datapoint import QueryResultPage from tilebox.datasets.data.datasets import Dataset -from tilebox.datasets.data.pagination import Pagination -from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike -from tilebox.datasets.data.uuid import as_uuid from tilebox.datasets.message_pool import get_message_type from tilebox.datasets.progress import ProgressCallback from tilebox.datasets.protobuf_conversion.protobuf_xarray import MessageToXarrayConverter @@ -33,7 +26,11 @@ marshal_messages, to_messages, ) +from tilebox.datasets.query.id_interval import IDInterval, IDIntervalLike +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.query.time_interval import TimeInterval, TimeIntervalLike from tilebox.datasets.service import TileboxDatasetService +from tilebox.datasets.uuid import as_uuid # allow private member access: we allow it here because we want to make as much private as possible so that we can # minimize the publicly facing API (which allows us to change internals later, and also limits to auto-completion) @@ -240,7 +237,7 @@ async def find(self, datapoint_id: str | UUID, skip_data: bool = False) -> xr.Da async def _find_interval( self, - datapoint_id_interval: DatapointIntervalLike, + datapoint_id_interval: IDIntervalLike, end_inclusive: bool = True, *, skip_data: bool = False, @@ -259,9 +256,7 @@ async def _find_interval( Returns: The datapoints in the given interval as an xarray dataset """ - filters = QueryFilters( - temporal_extent=DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive) - ) + filters = QueryFilters(temporal_extent=IDInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)) async def request(page: PaginationProtocol) -> QueryResultPage: query_page = Pagination(page.limit, page.starting_after) diff --git a/tilebox-datasets/tilebox/datasets/aio/pagination.py b/tilebox-datasets/tilebox/datasets/aio/pagination.py index bcaa9c0..c661609 100644 --- a/tilebox-datasets/tilebox/datasets/aio/pagination.py +++ b/tilebox-datasets/tilebox/datasets/aio/pagination.py @@ -5,11 +5,9 @@ from tqdm.auto import tqdm -from tilebox.datasets.data import ( - TimeInterval, -) from tilebox.datasets.data.datapoint import QueryResultPage from tilebox.datasets.progress import ProgressCallback, TimeIntervalProgressBar +from tilebox.datasets.query.time_interval import TimeInterval ResultPage = TypeVar("ResultPage", bound=QueryResultPage) diff --git a/tilebox-datasets/tilebox/datasets/client.py b/tilebox-datasets/tilebox/datasets/client.py index 5fae4c4..a781ac1 100644 --- a/tilebox-datasets/tilebox/datasets/client.py +++ b/tilebox-datasets/tilebox/datasets/client.py @@ -8,10 +8,10 @@ from _tilebox.grpc.channel import parse_channel_info from tilebox.datasets.data.datasets import Dataset, DatasetGroup, ListDatasetsResponse -from tilebox.datasets.data.uuid import as_uuid from tilebox.datasets.group import Group from tilebox.datasets.message_pool import register_once from tilebox.datasets.service import TileboxDatasetService +from tilebox.datasets.uuid import as_uuid class TimeseriesDatasetLike(Protocol): diff --git a/tilebox-datasets/tilebox/datasets/data/__init__.py b/tilebox-datasets/tilebox/datasets/data/__init__.py index cc83b07..0746517 100644 --- a/tilebox-datasets/tilebox/datasets/data/__init__.py +++ b/tilebox-datasets/tilebox/datasets/data/__init__.py @@ -1,16 +1,28 @@ -from tilebox.datasets.data.time_interval import ( - TimeInterval, - TimeIntervalLike, - datetime_to_timestamp, - timestamp_to_datetime, -) -from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_message +# for backwards compatibility, we can remove this hack in the future -__all__ = [ - "TimeInterval", - "TimeIntervalLike", - "datetime_to_timestamp", - "timestamp_to_datetime", - "uuid_message_to_uuid", - "uuid_to_uuid_message", -] +from warnings import warn + +from tilebox.datasets.query.time_interval import TimeInterval as _TimeInterval +from tilebox.datasets.query.time_interval import TimeIntervalLike + + +class TimeInterval(_TimeInterval): + def __post_init__(self) -> None: + warn( + "The TimeInterval class has been deprecated, import from tilebox.datasets.query instead.", + DeprecationWarning, + stacklevel=2, + ) + super().__post_init__() + + @classmethod + def parse(cls, arg: TimeIntervalLike) -> "_TimeInterval": + warn( + "The TimeInterval class has been deprecated, import from tilebox.datasets.query instead.", + DeprecationWarning, + stacklevel=2, + ) + return super().parse(arg) + + +__all__ = ["TimeInterval"] diff --git a/tilebox-datasets/tilebox/datasets/data/collection.py b/tilebox-datasets/tilebox/datasets/data/collection.py index 14e8e63..34fdde1 100644 --- a/tilebox-datasets/tilebox/datasets/data/collection.py +++ b/tilebox-datasets/tilebox/datasets/data/collection.py @@ -1,9 +1,9 @@ from dataclasses import dataclass from uuid import UUID -from tilebox.datasets.data.time_interval import TimeInterval -from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_message from tilebox.datasets.datasets.v1 import core_pb2 +from tilebox.datasets.query.time_interval import TimeInterval +from tilebox.datasets.uuid import uuid_message_to_uuid, uuid_to_uuid_message @dataclass diff --git a/tilebox-datasets/tilebox/datasets/data/data_access.py b/tilebox-datasets/tilebox/datasets/data/data_access.py index c9d33bb..c859055 100644 --- a/tilebox-datasets/tilebox/datasets/data/data_access.py +++ b/tilebox-datasets/tilebox/datasets/data/data_access.py @@ -7,9 +7,9 @@ # from python 3.11 onwards this is available as typing.NotRequired: from typing_extensions import NotRequired -from tilebox.datasets.data.datapoint import DatapointInterval -from tilebox.datasets.data.time_interval import TimeInterval from tilebox.datasets.datasets.v1 import data_access_pb2, well_known_types_pb2 +from tilebox.datasets.query.id_interval import IDInterval +from tilebox.datasets.query.time_interval import TimeInterval class SpatialFilterMode(Enum): @@ -104,16 +104,16 @@ def parse(cls, spatial_filter_like: SpatialFilterLike) -> "SpatialFilter": @dataclass(frozen=True) class QueryFilters: - temporal_extent: TimeInterval | DatapointInterval + temporal_extent: TimeInterval | IDInterval spatial_extent: SpatialFilter | None = None @classmethod def from_message(cls, filters: data_access_pb2.QueryFilters) -> "QueryFilters": - temporal_extent: TimeInterval | DatapointInterval | None = None + temporal_extent: TimeInterval | IDInterval | None = None if filters.HasField("time_interval"): temporal_extent = TimeInterval.from_message(filters.time_interval) if filters.HasField("datapoint_interval"): - temporal_extent = DatapointInterval.from_message(filters.datapoint_interval) + temporal_extent = IDInterval.from_message(filters.datapoint_interval) if temporal_extent is None: raise ValueError("Invalid filter: time or datapoint interval must be set") diff --git a/tilebox-datasets/tilebox/datasets/data/datapoint.py b/tilebox-datasets/tilebox/datasets/data/datapoint.py index ef621e3..225091b 100644 --- a/tilebox-datasets/tilebox/datasets/data/datapoint.py +++ b/tilebox-datasets/tilebox/datasets/data/datapoint.py @@ -1,81 +1,14 @@ from dataclasses import dataclass, field from datetime import datetime -from typing import Any, TypeAlias +from typing import Any from uuid import UUID -from tilebox.datasets.data.pagination import Pagination -from tilebox.datasets.data.time_interval import timestamp_to_datetime -from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_message from tilebox.datasets.datasets.v1 import core_pb2, data_access_pb2, data_ingestion_pb2 from tilebox.datasets.message_pool import get_message_type - -DatapointIntervalLike: TypeAlias = "tuple[str, str] | tuple[UUID, UUID] | DatapointInterval" - - -@dataclass(frozen=True) -class DatapointInterval: - start_id: UUID - end_id: UUID - start_exclusive: bool - end_inclusive: bool - - @classmethod - def from_message(cls, interval: core_pb2.DatapointInterval) -> "DatapointInterval": - return cls( - start_id=uuid_message_to_uuid(interval.start_id), - end_id=uuid_message_to_uuid(interval.end_id), - start_exclusive=interval.start_exclusive, - end_inclusive=interval.end_inclusive, - ) - - def to_message(self) -> core_pb2.DatapointInterval: - return core_pb2.DatapointInterval( - start_id=uuid_to_uuid_message(self.start_id), - end_id=uuid_to_uuid_message(self.end_id), - start_exclusive=self.start_exclusive, - end_inclusive=self.end_inclusive, - ) - - @classmethod - def parse( - cls, arg: DatapointIntervalLike, start_exclusive: bool = False, end_inclusive: bool = True - ) -> "DatapointInterval": - """ - Convert a variety of input types to a DatapointInterval. - - Supported input types: - - DatapointInterval: Return the input as is - - tuple of two UUIDs: Return an DatapointInterval with start and end id set to the given values - - tuple of two strings: Return an DatapointInterval with start and end id set to the UUIDs parsed from the given strings - - Args: - arg: The input to convert - start_exclusive: Whether the start id is exclusive - end_inclusive: Whether the end id is inclusive - - Returns: - DatapointInterval: The parsed ID interval - """ - - match arg: - case DatapointInterval(_, _, _, _): - return arg - case (UUID(), UUID()): - start, end = arg - return DatapointInterval( - start_id=start, - end_id=end, - start_exclusive=start_exclusive, - end_inclusive=end_inclusive, - ) - case (str(), str()): - start, end = arg - return DatapointInterval( - start_id=UUID(start), - end_id=UUID(end), - start_exclusive=start_exclusive, - end_inclusive=end_inclusive, - ) +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.query.time_interval import timestamp_to_datetime +from tilebox.datasets.tilebox.v1 import id_pb2 +from tilebox.datasets.uuid import uuid_message_to_uuid @dataclass(frozen=True) @@ -173,5 +106,5 @@ def to_message(self) -> data_ingestion_pb2.IngestResponse: return data_ingestion_pb2.IngestResponse( num_created=self.num_created, num_existing=self.num_existing, - datapoint_ids=[core_pb2.ID(uuid=datapoint_id.bytes) for datapoint_id in self.datapoint_ids], + datapoint_ids=[id_pb2.ID(uuid=datapoint_id.bytes) for datapoint_id in self.datapoint_ids], ) diff --git a/tilebox-datasets/tilebox/datasets/data/datasets.py b/tilebox-datasets/tilebox/datasets/data/datasets.py index 7bb0b6d..580f484 100644 --- a/tilebox-datasets/tilebox/datasets/data/datasets.py +++ b/tilebox-datasets/tilebox/datasets/data/datasets.py @@ -3,8 +3,8 @@ from google.protobuf.descriptor_pb2 import FileDescriptorSet -from tilebox.datasets.data.uuid import uuid_message_to_optional_uuid, uuid_message_to_uuid, uuid_to_uuid_message from tilebox.datasets.datasets.v1 import core_pb2, dataset_type_pb2, datasets_pb2 +from tilebox.datasets.uuid import uuid_message_to_optional_uuid, uuid_message_to_uuid, uuid_to_uuid_message @dataclass(frozen=True) diff --git a/tilebox-datasets/tilebox/datasets/data/time_interval.py b/tilebox-datasets/tilebox/datasets/data/time_interval.py index 2a59b86..48197bc 100644 --- a/tilebox-datasets/tilebox/datasets/data/time_interval.py +++ b/tilebox-datasets/tilebox/datasets/data/time_interval.py @@ -1,237 +1,18 @@ -from dataclasses import dataclass -from datetime import datetime, timedelta, timezone -from typing import TypeAlias +# kept for backwards compatibility, we can remove this whole file in the future -import numpy as np -import xarray as xr -from google.protobuf.duration_pb2 import Duration -from google.protobuf.timestamp_pb2 import Timestamp -from pandas.core.tools.datetimes import DatetimeScalar, to_datetime +from warnings import warn -from tilebox.datasets.datasets.v1 import core_pb2 - -_SMALLEST_POSSIBLE_TIMEDELTA = timedelta(microseconds=1) -_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) - -# A type alias for the different types that can be used to specify a time interval -TimeIntervalLike: TypeAlias = ( - DatetimeScalar | tuple[DatetimeScalar, DatetimeScalar] | xr.DataArray | xr.Dataset | "TimeInterval" +from tilebox.datasets.query.time_interval import ( + TimeInterval, + TimeIntervalLike, + datetime_to_timestamp, + timestamp_to_datetime, ) +warn( + "The time_interval module has been deprecated, import from tilebox.datasets.query instead.", + DeprecationWarning, + stacklevel=2, +) -@dataclass(frozen=True) -class TimeInterval: - """ - A time interval from a given start to a given end time. - Both the start and end time can be exclusive or inclusive. - """ - - start: datetime - end: datetime - - # We use exclusive for start and inclusive for end, because that way when both are false - # we have a half-open interval [start, end) which is the default behaviour we want to achieve. - start_exclusive: bool = False - end_inclusive: bool = False - - def __post_init__(self) -> None: - """Validate the time interval""" - if not isinstance(self.start, datetime): - raise TypeError(f"Start time {self.start} must be a datetime object") - if not isinstance(self.end, datetime): - raise TypeError(f"End time {self.end} must be a datetime object") - - # in case datetime objects are timezone naive, we assume they are in UTC - if self.start.tzinfo is None: - object.__setattr__(self, "start", self.start.replace(tzinfo=timezone.utc)) # since self is frozen - if self.end.tzinfo is None: - object.__setattr__(self, "end", self.end.replace(tzinfo=timezone.utc)) # since self is frozen - - def to_half_open(self) -> "TimeInterval": - """Convert the time interval to a half-open interval [start, end)""" - return TimeInterval( - start=self.start + int(self.start_exclusive) * _SMALLEST_POSSIBLE_TIMEDELTA, - end=self.end + int(self.end_inclusive) * _SMALLEST_POSSIBLE_TIMEDELTA, - start_exclusive=False, - end_inclusive=False, - ) - - def astimezone(self, tzinfo: timezone) -> "TimeInterval": - """Convert start and end time to the given timezone""" - return TimeInterval( - start=self.start.astimezone(tzinfo), - end=self.end.astimezone(tzinfo), - start_exclusive=self.start_exclusive, - end_inclusive=self.end_inclusive, - ) - - def __eq__(self, other: object) -> bool: - """Check whether two intervals are equal""" - if not isinstance(other, TimeInterval): - return False - - a, b = self.to_half_open(), other.to_half_open() - return (a.start, a.end) == (b.start, b.end) - - def __hash__(self) -> int: - """Hash the time interval""" - - # if two intervals are equal, they should have the same hash, so we convert to half-open intervals first - half_open = self.to_half_open() - return hash((half_open.start, half_open.end)) - - def __repr__(self) -> str: - return self.format() - - def format(self, endpoints: bool = True, sep: str = ", ", timespec: str = "milliseconds") -> str: - """Human readable representation of the time interval - - Args: - endpoints: Whether to format as interval using scientific interval notation [start, end]. - "[", "]" for closed intervals and "(", ")" for open intervals. - sep: The separator to use between the start and end of the interval. - timespec: Specifies the number of additional terms of the time to include. Valid options are 'auto', - 'hours', 'minutes', 'seconds', 'milliseconds' and 'microseconds'. - """ - if self == _EMPTY_TIME_INTERVAL: - return "" - - start = self.start.isoformat(timespec=timespec).replace("+00:00", " UTC") - end = self.end.isoformat(timespec=timespec).replace("+00:00", " UTC") - - formatted = f"{start}{sep}{end}" - if endpoints: - start_ch = "[" if not self.start_exclusive else "(" - end_ch = "]" if self.end_inclusive else ")" - formatted = f"{start_ch}{formatted}{end_ch}" - return formatted - - def __str__(self) -> str: - """Human readable representation of the time interval""" - return self.format() - - @classmethod - def parse(cls, arg: TimeIntervalLike) -> "TimeInterval": - """ - Convert a variety of input types to a TimeInterval. - - Supported input types: - - TimeInterval: Return the input as is - - DatetimeScalar: Return a TimeInterval with start and end time set to the same value and the end time inclusive - - tuple of two DatetimeScalar: Return a TimeInterval with start and end time - - xr.DataArray: Return a TimeInterval with start and end time set to the first and last value in the array and - the end time inclusive - - xr.Dataset: Return a TimeInterval with start and end time set to the first and last value in the time - coordinate of the dataset and the end time inclusive - - Args: - arg: The input to convert - - Returns: - TimeInterval: The parsed time interval - """ - - match arg: - case TimeInterval(_, _, _, _): - return arg - case (start, end): - return TimeInterval(start=_convert_to_datetime(start), end=_convert_to_datetime(end)) - case point_in_time if isinstance(point_in_time, DatetimeScalar | int): - dt = _convert_to_datetime(point_in_time) - return TimeInterval(start=dt, end=dt, start_exclusive=False, end_inclusive=True) - case arr if ( - isinstance(arr, xr.DataArray) - and arr.ndim == 1 - and arr.size > 0 - and arr.dtype == np.dtype("datetime64[ns]") - ): - start = arr.data[0] - end = arr.data[-1] - return TimeInterval( - start=_convert_to_datetime(start), - end=_convert_to_datetime(end), - start_exclusive=False, - end_inclusive=True, - ) - case ds if isinstance(ds, xr.Dataset) and "time" in ds.coords: - return cls.parse(ds.time) - - raise ValueError(f"Failed to convert {arg} ({type(arg)}) to TimeInterval)") - - @classmethod - def from_message( - cls, interval: core_pb2.TimeInterval - ) -> "TimeInterval": # lets use typing.Self once we require python >= 3.11 - """Convert a TimeInterval protobuf message to a TimeInterval object.""" - - start = timestamp_to_datetime(interval.start_time) - end = timestamp_to_datetime(interval.end_time) - if start == _EPOCH and end == _EPOCH and not interval.start_exclusive and not interval.end_inclusive: - return _EMPTY_TIME_INTERVAL - - return cls( - start=timestamp_to_datetime(interval.start_time), - end=timestamp_to_datetime(interval.end_time), - start_exclusive=interval.start_exclusive, - end_inclusive=interval.end_inclusive, - ) - - def to_message(self) -> core_pb2.TimeInterval: - """Convert a TimeInterval object to a TimeInterval protobuf message.""" - return core_pb2.TimeInterval( - start_time=datetime_to_timestamp(self.start), - end_time=datetime_to_timestamp(self.end), - start_exclusive=self.start_exclusive, - end_inclusive=self.end_inclusive, - ) - - -# A sentinel value to use for time interval that are empty -_EMPTY_TIME_INTERVAL = TimeInterval(_EPOCH, _EPOCH, start_exclusive=True, end_inclusive=False) - - -def _convert_to_datetime(arg: DatetimeScalar) -> datetime: - """Convert the given datetime scalar to a datetime object in the UTC timezone""" - dt: datetime = to_datetime(arg, utc=True).to_pydatetime() - if dt.tzinfo is None: - dt = dt.replace(tzinfo=timezone.utc) - return dt.astimezone(timezone.utc) - - -def timestamp_to_datetime(timestamp: Timestamp) -> datetime: - """Convert a protobuf timestamp to a datetime object.""" - # datetime.fromtimestamp() expects a timestamp in seconds, not microseconds - # if we pass it as a floating point number, we will run into rounding errors - offset = timedelta(seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000) - return _EPOCH + offset - - -def datetime_to_timestamp(dt: datetime) -> Timestamp: - """Convert a datetime object to a protobuf timestamp.""" - # manual epoch offset calculation to avoid rounding errors and support negative timestamps (before 1970) - offset_us = datetime_to_us(dt.astimezone(timezone.utc)) - seconds, us = divmod(offset_us, 10**6) - return Timestamp(seconds=seconds, nanos=us * 10**3) - - -def datetime_to_us(dt: datetime) -> int: - """Convert a datetime object to a timestamp in microseconds since the epoch.""" - offset = dt - _EPOCH - # implementation taken from timedelta.total_seconds() but without dividing by 10**6 at the end to avoid floating - # point rounding errors - return (offset.days * 86400 + offset.seconds) * 10**6 + offset.microseconds - - -def us_to_datetime(us: int) -> datetime: - """Convert a timestamp in microseconds since the epoch to a datetime object.""" - return _EPOCH + timedelta(microseconds=us) - - -def timedelta_to_duration(td: timedelta) -> Duration: - """Convert a timedelta to a duration protobuf message.""" - return Duration(seconds=int(td.total_seconds()), nanos=int(td.microseconds * 1000)) - - -def duration_to_timedelta(duration: Duration) -> timedelta: - """Convert a duration protobuf message to a timedelta.""" - return timedelta(seconds=duration.seconds, microseconds=duration.nanos // 1000) +__all__ = ["TimeInterval", "TimeIntervalLike", "datetime_to_timestamp", "timestamp_to_datetime"] diff --git a/tilebox-datasets/tilebox/datasets/data/timeseries.py b/tilebox-datasets/tilebox/datasets/data/timeseries.py index 6d13f6f..47fb4c9 100644 --- a/tilebox-datasets/tilebox/datasets/data/timeseries.py +++ b/tilebox-datasets/tilebox/datasets/data/timeseries.py @@ -2,10 +2,10 @@ from datetime import timedelta from uuid import UUID -from tilebox.datasets.data.datapoint import DatapointInterval -from tilebox.datasets.data.time_interval import TimeInterval, duration_to_timedelta, timedelta_to_duration -from tilebox.datasets.data.uuid import uuid_message_to_uuid, uuid_to_uuid_message from tilebox.datasets.datasets.v1 import timeseries_pb2 +from tilebox.datasets.query.id_interval import IDInterval +from tilebox.datasets.query.time_interval import TimeInterval, duration_to_timedelta, timedelta_to_duration +from tilebox.datasets.uuid import uuid_message_to_uuid, uuid_to_uuid_message @dataclass @@ -13,7 +13,7 @@ class TimeseriesDatasetChunk: dataset_id: UUID collection_id: UUID time_interval: TimeInterval | None - datapoint_interval: DatapointInterval | None + datapoint_interval: IDInterval | None branch_factor: int chunk_size: int datapoints_per_365_days: int @@ -28,7 +28,7 @@ def from_message(cls, chunk: timeseries_pb2.TimeseriesDatasetChunk) -> "Timeseri and chunk.datapoint_interval.start_id.uuid and chunk.datapoint_interval.end_id.uuid ): - datapoint_interval = DatapointInterval.from_message(chunk.datapoint_interval) + datapoint_interval = IDInterval.from_message(chunk.datapoint_interval) time_interval = None if chunk.time_interval and chunk.time_interval.start_time and chunk.time_interval.end_time: diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.py index c7dd6e2..68c3c79 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.py @@ -23,26 +23,27 @@ from tilebox.datasets.datasets.v1 import core_pb2 as datasets_dot_v1_dot_core__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x64\x61tasets/v1/collections.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\"]\n\x17\x43reateCollectionRequest\x12.\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\tdatasetId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\xc1\x01\n\x1aGetCollectionByNameRequest\x12\'\n\x0f\x63ollection_name\x18\x01 \x01(\tR\x0e\x63ollectionName\x12+\n\x11with_availability\x18\x02 \x01(\x08R\x10withAvailability\x12\x1d\n\nwith_count\x18\x03 \x01(\x08R\twithCount\x12.\n\ndataset_id\x18\x04 \x01(\x0b\x32\x0f.datasets.v1.IDR\tdatasetId\"\x7f\n\x17\x44\x65leteCollectionRequest\x12\x34\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x0c\x63ollectionId\x12.\n\ndataset_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\tdatasetId\"\x1a\n\x18\x44\x65leteCollectionResponse\"\x94\x01\n\x16ListCollectionsRequest\x12.\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\tdatasetId\x12+\n\x11with_availability\x18\x02 \x01(\x08R\x10withAvailability\x12\x1d\n\nwith_count\x18\x03 \x01(\x08R\twithCount2\x86\x03\n\x11\x43ollectionService\x12W\n\x10\x43reateCollection\x12$.datasets.v1.CreateCollectionRequest\x1a\x1b.datasets.v1.CollectionInfo\"\x00\x12]\n\x13GetCollectionByName\x12\'.datasets.v1.GetCollectionByNameRequest\x1a\x1b.datasets.v1.CollectionInfo\"\x00\x12\x61\n\x10\x44\x65leteCollection\x12$.datasets.v1.DeleteCollectionRequest\x1a%.datasets.v1.DeleteCollectionResponse\"\x00\x12V\n\x0fListCollections\x12#.datasets.v1.ListCollectionsRequest\x1a\x1c.datasets.v1.CollectionInfos\"\x00\x42p\n\x0f\x63om.datasets.v1B\x10\x43ollectionsProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x64\x61tasets/v1/collections.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\x13tilebox/v1/id.proto\"\\\n\x17\x43reateCollectionRequest\x12-\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\tdatasetId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\xc0\x01\n\x1aGetCollectionByNameRequest\x12\'\n\x0f\x63ollection_name\x18\x01 \x01(\tR\x0e\x63ollectionName\x12+\n\x11with_availability\x18\x02 \x01(\x08R\x10withAvailability\x12\x1d\n\nwith_count\x18\x03 \x01(\x08R\twithCount\x12-\n\ndataset_id\x18\x04 \x01(\x0b\x32\x0e.tilebox.v1.IDR\tdatasetId\"}\n\x17\x44\x65leteCollectionRequest\x12\x33\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x63ollectionId\x12-\n\ndataset_id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\tdatasetId\"\x1a\n\x18\x44\x65leteCollectionResponse\"\x93\x01\n\x16ListCollectionsRequest\x12-\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\tdatasetId\x12+\n\x11with_availability\x18\x02 \x01(\x08R\x10withAvailability\x12\x1d\n\nwith_count\x18\x03 \x01(\x08R\twithCount2\x86\x03\n\x11\x43ollectionService\x12W\n\x10\x43reateCollection\x12$.datasets.v1.CreateCollectionRequest\x1a\x1b.datasets.v1.CollectionInfo\"\x00\x12]\n\x13GetCollectionByName\x12\'.datasets.v1.GetCollectionByNameRequest\x1a\x1b.datasets.v1.CollectionInfo\"\x00\x12\x61\n\x10\x44\x65leteCollection\x12$.datasets.v1.DeleteCollectionRequest\x1a%.datasets.v1.DeleteCollectionResponse\"\x00\x12V\n\x0fListCollections\x12#.datasets.v1.ListCollectionsRequest\x1a\x1c.datasets.v1.CollectionInfos\"\x00\x42u\n\x0f\x63om.datasets.v1B\x10\x43ollectionsProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.collections_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\020CollectionsProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_CREATECOLLECTIONREQUEST']._serialized_start=70 - _globals['_CREATECOLLECTIONREQUEST']._serialized_end=163 - _globals['_GETCOLLECTIONBYNAMEREQUEST']._serialized_start=166 - _globals['_GETCOLLECTIONBYNAMEREQUEST']._serialized_end=359 - _globals['_DELETECOLLECTIONREQUEST']._serialized_start=361 - _globals['_DELETECOLLECTIONREQUEST']._serialized_end=488 - _globals['_DELETECOLLECTIONRESPONSE']._serialized_start=490 - _globals['_DELETECOLLECTIONRESPONSE']._serialized_end=516 - _globals['_LISTCOLLECTIONSREQUEST']._serialized_start=519 - _globals['_LISTCOLLECTIONSREQUEST']._serialized_end=667 - _globals['_COLLECTIONSERVICE']._serialized_start=670 - _globals['_COLLECTIONSERVICE']._serialized_end=1060 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\020CollectionsProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_CREATECOLLECTIONREQUEST']._serialized_start=91 + _globals['_CREATECOLLECTIONREQUEST']._serialized_end=183 + _globals['_GETCOLLECTIONBYNAMEREQUEST']._serialized_start=186 + _globals['_GETCOLLECTIONBYNAMEREQUEST']._serialized_end=378 + _globals['_DELETECOLLECTIONREQUEST']._serialized_start=380 + _globals['_DELETECOLLECTIONREQUEST']._serialized_end=505 + _globals['_DELETECOLLECTIONRESPONSE']._serialized_start=507 + _globals['_DELETECOLLECTIONRESPONSE']._serialized_end=533 + _globals['_LISTCOLLECTIONSREQUEST']._serialized_start=536 + _globals['_LISTCOLLECTIONSREQUEST']._serialized_end=683 + _globals['_COLLECTIONSERVICE']._serialized_start=686 + _globals['_COLLECTIONSERVICE']._serialized_end=1076 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.pyi index b550b8d..7fe8201 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/collections_pb2.pyi @@ -1,4 +1,5 @@ from tilebox.datasets.datasets.v1 import core_pb2 as _core_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union @@ -9,9 +10,9 @@ class CreateCollectionRequest(_message.Message): __slots__ = ("dataset_id", "name") DATASET_ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] - dataset_id: _core_pb2.ID + dataset_id: _id_pb2.ID name: str - def __init__(self, dataset_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ...) -> None: ... + def __init__(self, dataset_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ...) -> None: ... class GetCollectionByNameRequest(_message.Message): __slots__ = ("collection_name", "with_availability", "with_count", "dataset_id") @@ -22,16 +23,16 @@ class GetCollectionByNameRequest(_message.Message): collection_name: str with_availability: bool with_count: bool - dataset_id: _core_pb2.ID - def __init__(self, collection_name: _Optional[str] = ..., with_availability: bool = ..., with_count: bool = ..., dataset_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ...) -> None: ... + dataset_id: _id_pb2.ID + def __init__(self, collection_name: _Optional[str] = ..., with_availability: bool = ..., with_count: bool = ..., dataset_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class DeleteCollectionRequest(_message.Message): __slots__ = ("collection_id", "dataset_id") COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] DATASET_ID_FIELD_NUMBER: _ClassVar[int] - collection_id: _core_pb2.ID - dataset_id: _core_pb2.ID - def __init__(self, collection_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., dataset_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ...) -> None: ... + collection_id: _id_pb2.ID + dataset_id: _id_pb2.ID + def __init__(self, collection_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., dataset_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class DeleteCollectionResponse(_message.Message): __slots__ = () @@ -42,7 +43,7 @@ class ListCollectionsRequest(_message.Message): DATASET_ID_FIELD_NUMBER: _ClassVar[int] WITH_AVAILABILITY_FIELD_NUMBER: _ClassVar[int] WITH_COUNT_FIELD_NUMBER: _ClassVar[int] - dataset_id: _core_pb2.ID + dataset_id: _id_pb2.ID with_availability: bool with_count: bool - def __init__(self, dataset_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., with_availability: bool = ..., with_count: bool = ...) -> None: ... + def __init__(self, dataset_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., with_availability: bool = ..., with_count: bool = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.py index 1688489..a2eb880 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.py @@ -24,50 +24,56 @@ from tilebox.datasets.datasets.v1 import dataset_type_pb2 as datasets_dot_v1_dot_dataset__type__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as tilebox_dot_v1_dot_query__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x64\x61tasets/v1/core.proto\x12\x0b\x64\x61tasets.v1\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x18\n\x02ID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"\xce\x01\n\x0cTimeInterval\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\xb5\x01\n\x11\x44\x61tapointInterval\x12*\n\x08start_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x07startId\x12&\n\x06\x65nd_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x05\x65ndId\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"v\n\x10LegacyPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12*\n\x0estarting_after\x18\x02 \x01(\tH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"\x81\x01\n\nPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12;\n\x0estarting_after\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value\">\n\x0bRepeatedAny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x03(\x0cR\x05value\"\xad\x01\n\x11\x44\x61tapointMetadata\x12\x39\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\teventTime\x12\x41\n\x0eingestion_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x13\n\x02id\x18\x03 \x01(\tH\x00R\x02id\x88\x01\x01\x42\x05\n\x03_id\"n\n\nDatapoints\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\"\xc0\x01\n\rDatapointPage\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12?\n\tnext_page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationH\x00R\x08nextPage\x88\x01\x01\x42\x0c\n\n_next_page\"e\n\tDatapoint\x12\x32\n\x04meta\x18\x01 \x01(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x10.datasets.v1.AnyR\x04\x64\x61ta\"^\n\nCollection\x12\x1b\n\tlegacy_id\x18\x01 \x01(\tR\x08legacyId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1f\n\x02id\x18\x03 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\"\xc3\x01\n\x0e\x43ollectionInfo\x12\x37\n\ncollection\x18\x01 \x01(\x0b\x32\x17.datasets.v1.CollectionR\ncollection\x12\x42\n\x0c\x61vailability\x18\x02 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalH\x00R\x0c\x61vailability\x88\x01\x01\x12\x19\n\x05\x63ount\x18\x03 \x01(\x04H\x01R\x05\x63ount\x88\x01\x01\x42\x0f\n\r_availabilityB\x08\n\x06_count\"B\n\x0f\x43ollectionInfos\x12/\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x1b.datasets.v1.CollectionInfoR\x04\x64\x61ta\"\xbb\x03\n\x07\x44\x61taset\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12*\n\x08group_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x07groupId\x12.\n\x04type\x18\x03 \x01(\x0b\x32\x1a.datasets.v1.AnnotatedTypeR\x04type\x12\x1b\n\tcode_name\x18\x04 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x05 \x01(\tR\x04name\x12\x18\n\x07summary\x18\x06 \x01(\tR\x07summary\x12\x12\n\x04icon\x18\x07 \x01(\tR\x04icon\x12 \n\x0b\x64\x65scription\x18\x08 \x01(\tR\x0b\x64\x65scription\x12@\n\x0bpermissions\x18\n \x03(\x0e\x32\x1e.datasets.v1.DatasetPermissionR\x0bpermissions\x12\x37\n\nvisibility\x18\x0b \x01(\x0e\x32\x17.datasets.v1.VisibilityR\nvisibility\x12\x12\n\x04slug\x18\x0c \x01(\tR\x04slug\x12#\n\rtype_editable\x18\r \x01(\x08R\x0ctypeEditable\"\xa2\x01\n\x0c\x44\x61tasetGroup\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12,\n\tparent_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x08parentId\x12\x1b\n\tcode_name\x18\x03 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x12\n\x04icon\x18\x05 \x01(\tR\x04icon*\x9b\x01\n\x11\x44\x61tasetPermission\x12\"\n\x1e\x44\x41TASET_PERMISSION_UNSPECIFIED\x10\x00\x12\"\n\x1e\x44\x41TASET_PERMISSION_ACCESS_DATA\x10\x01\x12!\n\x1d\x44\x41TASET_PERMISSION_WRITE_DATA\x10\x02\x12\x1b\n\x17\x44\x41TASET_PERMISSION_EDIT\x10\x03*v\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x16\n\x12VISIBILITY_PRIVATE\x10\x01\x12\x1d\n\x19VISIBILITY_SHARED_WITH_ME\x10\x02\x12\x15\n\x11VISIBILITY_PUBLIC\x10\x03\x42i\n\x0f\x63om.datasets.v1B\tCoreProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16\x64\x61tasets/v1/core.proto\x12\x0b\x64\x61tasets.v1\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x13tilebox/v1/id.proto\x1a\x16tilebox/v1/query.proto\"]\n\x10LegacyPagination\x12\x1b\n\x05limit\x18\x01 \x01(\x03\x42\x05\xaa\x01\x02\x08\x01R\x05limit\x12,\n\x0estarting_after\x18\x02 \x01(\tB\x05\xaa\x01\x02\x08\x01R\rstartingAfter\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05value\">\n\x0bRepeatedAny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x03(\x0cR\x05value\"\xa8\x01\n\x11\x44\x61tapointMetadata\x12\x39\n\nevent_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\teventTime\x12\x41\n\x0eingestion_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x15\n\x02id\x18\x03 \x01(\tB\x05\xaa\x01\x02\x08\x01R\x02id\"n\n\nDatapoints\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\"\xb4\x01\n\rDatapointPage\x12\x32\n\x04meta\x18\x01 \x03(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12\x41\n\tnext_page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationB\x05\xaa\x01\x02\x08\x01R\x08nextPage\"e\n\tDatapoint\x12\x32\n\x04meta\x18\x01 \x01(\x0b\x32\x1e.datasets.v1.DatapointMetadataR\x04meta\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x10.datasets.v1.AnyR\x04\x64\x61ta\"]\n\nCollection\x12\x1b\n\tlegacy_id\x18\x01 \x01(\tR\x08legacyId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1e\n\x02id\x18\x03 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\"\xab\x01\n\x0e\x43ollectionInfo\x12\x37\n\ncollection\x18\x01 \x01(\x0b\x32\x17.datasets.v1.CollectionR\ncollection\x12\x43\n\x0c\x61vailability\x18\x02 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalB\x05\xaa\x01\x02\x08\x01R\x0c\x61vailability\x12\x1b\n\x05\x63ount\x18\x03 \x01(\x04\x42\x05\xaa\x01\x02\x08\x01R\x05\x63ount\"B\n\x0f\x43ollectionInfos\x12/\n\x04\x64\x61ta\x18\x01 \x03(\x0b\x32\x1b.datasets.v1.CollectionInfoR\x04\x64\x61ta\"\xb9\x03\n\x07\x44\x61taset\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12)\n\x08group_id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x07groupId\x12.\n\x04type\x18\x03 \x01(\x0b\x32\x1a.datasets.v1.AnnotatedTypeR\x04type\x12\x1b\n\tcode_name\x18\x04 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x05 \x01(\tR\x04name\x12\x18\n\x07summary\x18\x06 \x01(\tR\x07summary\x12\x12\n\x04icon\x18\x07 \x01(\tR\x04icon\x12 \n\x0b\x64\x65scription\x18\x08 \x01(\tR\x0b\x64\x65scription\x12@\n\x0bpermissions\x18\n \x03(\x0e\x32\x1e.datasets.v1.DatasetPermissionR\x0bpermissions\x12\x37\n\nvisibility\x18\x0b \x01(\x0e\x32\x17.datasets.v1.VisibilityR\nvisibility\x12\x12\n\x04slug\x18\x0c \x01(\tR\x04slug\x12#\n\rtype_editable\x18\r \x01(\x08R\x0ctypeEditable\"\xa0\x01\n\x0c\x44\x61tasetGroup\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12+\n\tparent_id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x08parentId\x12\x1b\n\tcode_name\x18\x03 \x01(\tR\x08\x63odeName\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x12\n\x04icon\x18\x05 \x01(\tR\x04icon*\x9b\x01\n\x11\x44\x61tasetPermission\x12\"\n\x1e\x44\x41TASET_PERMISSION_UNSPECIFIED\x10\x00\x12\"\n\x1e\x44\x41TASET_PERMISSION_ACCESS_DATA\x10\x01\x12!\n\x1d\x44\x41TASET_PERMISSION_WRITE_DATA\x10\x02\x12\x1b\n\x17\x44\x41TASET_PERMISSION_EDIT\x10\x03*v\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x16\n\x12VISIBILITY_PRIVATE\x10\x01\x12\x1d\n\x19VISIBILITY_SHARED_WITH_ME\x10\x02\x12\x15\n\x11VISIBILITY_PUBLIC\x10\x03\x42n\n\x0f\x63om.datasets.v1B\tCoreProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.core_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\tCoreProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_DATASETPERMISSION']._serialized_start=2455 - _globals['_DATASETPERMISSION']._serialized_end=2610 - _globals['_VISIBILITY']._serialized_start=2612 - _globals['_VISIBILITY']._serialized_end=2730 - _globals['_ID']._serialized_start=104 - _globals['_ID']._serialized_end=128 - _globals['_TIMEINTERVAL']._serialized_start=131 - _globals['_TIMEINTERVAL']._serialized_end=337 - _globals['_DATAPOINTINTERVAL']._serialized_start=340 - _globals['_DATAPOINTINTERVAL']._serialized_end=521 - _globals['_LEGACYPAGINATION']._serialized_start=523 - _globals['_LEGACYPAGINATION']._serialized_end=641 - _globals['_PAGINATION']._serialized_start=644 - _globals['_PAGINATION']._serialized_end=773 - _globals['_ANY']._serialized_start=775 - _globals['_ANY']._serialized_end=829 - _globals['_REPEATEDANY']._serialized_start=831 - _globals['_REPEATEDANY']._serialized_end=893 - _globals['_DATAPOINTMETADATA']._serialized_start=896 - _globals['_DATAPOINTMETADATA']._serialized_end=1069 - _globals['_DATAPOINTS']._serialized_start=1071 - _globals['_DATAPOINTS']._serialized_end=1181 - _globals['_DATAPOINTPAGE']._serialized_start=1184 - _globals['_DATAPOINTPAGE']._serialized_end=1376 - _globals['_DATAPOINT']._serialized_start=1378 - _globals['_DATAPOINT']._serialized_end=1479 - _globals['_COLLECTION']._serialized_start=1481 - _globals['_COLLECTION']._serialized_end=1575 - _globals['_COLLECTIONINFO']._serialized_start=1578 - _globals['_COLLECTIONINFO']._serialized_end=1773 - _globals['_COLLECTIONINFOS']._serialized_start=1775 - _globals['_COLLECTIONINFOS']._serialized_end=1841 - _globals['_DATASET']._serialized_start=1844 - _globals['_DATASET']._serialized_end=2287 - _globals['_DATASETGROUP']._serialized_start=2290 - _globals['_DATASETGROUP']._serialized_end=2452 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\tCoreProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_LEGACYPAGINATION'].fields_by_name['limit']._loaded_options = None + _globals['_LEGACYPAGINATION'].fields_by_name['limit']._serialized_options = b'\252\001\002\010\001' + _globals['_LEGACYPAGINATION'].fields_by_name['starting_after']._loaded_options = None + _globals['_LEGACYPAGINATION'].fields_by_name['starting_after']._serialized_options = b'\252\001\002\010\001' + _globals['_DATAPOINTMETADATA'].fields_by_name['id']._loaded_options = None + _globals['_DATAPOINTMETADATA'].fields_by_name['id']._serialized_options = b'\252\001\002\010\001' + _globals['_DATAPOINTPAGE'].fields_by_name['next_page']._loaded_options = None + _globals['_DATAPOINTPAGE'].fields_by_name['next_page']._serialized_options = b'\252\001\002\010\001' + _globals['_COLLECTIONINFO'].fields_by_name['availability']._loaded_options = None + _globals['_COLLECTIONINFO'].fields_by_name['availability']._serialized_options = b'\252\001\002\010\001' + _globals['_COLLECTIONINFO'].fields_by_name['count']._loaded_options = None + _globals['_COLLECTIONINFO'].fields_by_name['count']._serialized_options = b'\252\001\002\010\001' + _globals['_DATASETPERMISSION']._serialized_start=1878 + _globals['_DATASETPERMISSION']._serialized_end=2033 + _globals['_VISIBILITY']._serialized_start=2035 + _globals['_VISIBILITY']._serialized_end=2153 + _globals['_LEGACYPAGINATION']._serialized_start=149 + _globals['_LEGACYPAGINATION']._serialized_end=242 + _globals['_ANY']._serialized_start=244 + _globals['_ANY']._serialized_end=298 + _globals['_REPEATEDANY']._serialized_start=300 + _globals['_REPEATEDANY']._serialized_end=362 + _globals['_DATAPOINTMETADATA']._serialized_start=365 + _globals['_DATAPOINTMETADATA']._serialized_end=533 + _globals['_DATAPOINTS']._serialized_start=535 + _globals['_DATAPOINTS']._serialized_end=645 + _globals['_DATAPOINTPAGE']._serialized_start=648 + _globals['_DATAPOINTPAGE']._serialized_end=828 + _globals['_DATAPOINT']._serialized_start=830 + _globals['_DATAPOINT']._serialized_end=931 + _globals['_COLLECTION']._serialized_start=933 + _globals['_COLLECTION']._serialized_end=1026 + _globals['_COLLECTIONINFO']._serialized_start=1029 + _globals['_COLLECTIONINFO']._serialized_end=1200 + _globals['_COLLECTIONINFOS']._serialized_start=1202 + _globals['_COLLECTIONINFOS']._serialized_end=1268 + _globals['_DATASET']._serialized_start=1271 + _globals['_DATASET']._serialized_end=1712 + _globals['_DATASETGROUP']._serialized_start=1715 + _globals['_DATASETGROUP']._serialized_end=1875 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.pyi index b79cd68..7abf663 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/core_pb2.pyi @@ -1,5 +1,7 @@ from tilebox.datasets.datasets.v1 import dataset_type_pb2 as _dataset_type_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as _query_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -30,36 +32,6 @@ VISIBILITY_PRIVATE: Visibility VISIBILITY_SHARED_WITH_ME: Visibility VISIBILITY_PUBLIC: Visibility -class ID(_message.Message): - __slots__ = ("uuid",) - UUID_FIELD_NUMBER: _ClassVar[int] - uuid: bytes - def __init__(self, uuid: _Optional[bytes] = ...) -> None: ... - -class TimeInterval(_message.Message): - __slots__ = ("start_time", "end_time", "start_exclusive", "end_inclusive") - START_TIME_FIELD_NUMBER: _ClassVar[int] - END_TIME_FIELD_NUMBER: _ClassVar[int] - START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] - END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] - start_time: _timestamp_pb2.Timestamp - end_time: _timestamp_pb2.Timestamp - start_exclusive: bool - end_inclusive: bool - def __init__(self, start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... - -class DatapointInterval(_message.Message): - __slots__ = ("start_id", "end_id", "start_exclusive", "end_inclusive") - START_ID_FIELD_NUMBER: _ClassVar[int] - END_ID_FIELD_NUMBER: _ClassVar[int] - START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] - END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] - start_id: ID - end_id: ID - start_exclusive: bool - end_inclusive: bool - def __init__(self, start_id: _Optional[_Union[ID, _Mapping]] = ..., end_id: _Optional[_Union[ID, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... - class LegacyPagination(_message.Message): __slots__ = ("limit", "starting_after") LIMIT_FIELD_NUMBER: _ClassVar[int] @@ -68,14 +40,6 @@ class LegacyPagination(_message.Message): starting_after: str def __init__(self, limit: _Optional[int] = ..., starting_after: _Optional[str] = ...) -> None: ... -class Pagination(_message.Message): - __slots__ = ("limit", "starting_after") - LIMIT_FIELD_NUMBER: _ClassVar[int] - STARTING_AFTER_FIELD_NUMBER: _ClassVar[int] - limit: int - starting_after: ID - def __init__(self, limit: _Optional[int] = ..., starting_after: _Optional[_Union[ID, _Mapping]] = ...) -> None: ... - class Any(_message.Message): __slots__ = ("type_url", "value") TYPE_URL_FIELD_NUMBER: _ClassVar[int] @@ -135,8 +99,8 @@ class Collection(_message.Message): ID_FIELD_NUMBER: _ClassVar[int] legacy_id: str name: str - id: ID - def __init__(self, legacy_id: _Optional[str] = ..., name: _Optional[str] = ..., id: _Optional[_Union[ID, _Mapping]] = ...) -> None: ... + id: _id_pb2.ID + def __init__(self, legacy_id: _Optional[str] = ..., name: _Optional[str] = ..., id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class CollectionInfo(_message.Message): __slots__ = ("collection", "availability", "count") @@ -144,9 +108,9 @@ class CollectionInfo(_message.Message): AVAILABILITY_FIELD_NUMBER: _ClassVar[int] COUNT_FIELD_NUMBER: _ClassVar[int] collection: Collection - availability: TimeInterval + availability: _query_pb2.TimeInterval count: int - def __init__(self, collection: _Optional[_Union[Collection, _Mapping]] = ..., availability: _Optional[_Union[TimeInterval, _Mapping]] = ..., count: _Optional[int] = ...) -> None: ... + def __init__(self, collection: _Optional[_Union[Collection, _Mapping]] = ..., availability: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., count: _Optional[int] = ...) -> None: ... class CollectionInfos(_message.Message): __slots__ = ("data",) @@ -168,8 +132,8 @@ class Dataset(_message.Message): VISIBILITY_FIELD_NUMBER: _ClassVar[int] SLUG_FIELD_NUMBER: _ClassVar[int] TYPE_EDITABLE_FIELD_NUMBER: _ClassVar[int] - id: ID - group_id: ID + id: _id_pb2.ID + group_id: _id_pb2.ID type: _dataset_type_pb2.AnnotatedType code_name: str name: str @@ -180,7 +144,7 @@ class Dataset(_message.Message): visibility: Visibility slug: str type_editable: bool - def __init__(self, id: _Optional[_Union[ID, _Mapping]] = ..., group_id: _Optional[_Union[ID, _Mapping]] = ..., type: _Optional[_Union[_dataset_type_pb2.AnnotatedType, _Mapping]] = ..., code_name: _Optional[str] = ..., name: _Optional[str] = ..., summary: _Optional[str] = ..., icon: _Optional[str] = ..., description: _Optional[str] = ..., permissions: _Optional[_Iterable[_Union[DatasetPermission, str]]] = ..., visibility: _Optional[_Union[Visibility, str]] = ..., slug: _Optional[str] = ..., type_editable: bool = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., group_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., type: _Optional[_Union[_dataset_type_pb2.AnnotatedType, _Mapping]] = ..., code_name: _Optional[str] = ..., name: _Optional[str] = ..., summary: _Optional[str] = ..., icon: _Optional[str] = ..., description: _Optional[str] = ..., permissions: _Optional[_Iterable[_Union[DatasetPermission, str]]] = ..., visibility: _Optional[_Union[Visibility, str]] = ..., slug: _Optional[str] = ..., type_editable: bool = ...) -> None: ... class DatasetGroup(_message.Message): __slots__ = ("id", "parent_id", "code_name", "name", "icon") @@ -189,9 +153,9 @@ class DatasetGroup(_message.Message): CODE_NAME_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] ICON_FIELD_NUMBER: _ClassVar[int] - id: ID - parent_id: ID + id: _id_pb2.ID + parent_id: _id_pb2.ID code_name: str name: str icon: str - def __init__(self, id: _Optional[_Union[ID, _Mapping]] = ..., parent_id: _Optional[_Union[ID, _Mapping]] = ..., code_name: _Optional[str] = ..., name: _Optional[str] = ..., icon: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., parent_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., code_name: _Optional[str] = ..., name: _Optional[str] = ..., icon: _Optional[str] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.py index 60f324c..a0e63dd 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.py @@ -24,34 +24,42 @@ from tilebox.datasets.datasets.v1 import core_pb2 as datasets_dot_v1_dot_core__pb2 from tilebox.datasets.datasets.v1 import well_known_types_pb2 as datasets_dot_v1_dot_well__known__types__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as tilebox_dot_v1_dot_query__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x64\x61tasets/v1/data_access.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\"datasets/v1/well_known_types.proto\"\xcd\x02\n\x1cGetDatasetForIntervalRequest\x12#\n\rcollection_id\x18\x01 \x01(\tR\x0c\x63ollectionId\x12>\n\rtime_interval\x18\x02 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalR\x0ctimeInterval\x12M\n\x12\x64\x61tapoint_interval\x18\x06 \x01(\x0b\x32\x1e.datasets.v1.DatapointIntervalR\x11\x64\x61tapointInterval\x12\x36\n\x04page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationH\x00R\x04page\x88\x01\x01\x12\x1b\n\tskip_data\x18\x04 \x01(\x08R\x08skipData\x12\x1b\n\tskip_meta\x18\x05 \x01(\x08R\x08skipMetaB\x07\n\x05_page\"k\n\x17GetDatapointByIdRequest\x12#\n\rcollection_id\x18\x01 \x01(\tR\x0c\x63ollectionId\x12\x0e\n\x02id\x18\x02 \x01(\tR\x02id\x12\x1b\n\tskip_data\x18\x03 \x01(\x08R\x08skipData\"\x88\x01\n\x10QueryByIDRequest\x12\x36\n\x0e\x63ollection_ids\x18\x01 \x03(\x0b\x32\x0f.datasets.v1.IDR\rcollectionIds\x12\x1f\n\x02id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12\x1b\n\tskip_data\x18\x03 \x01(\x08R\x08skipData\"\xf7\x01\n\x0cQueryFilters\x12@\n\rtime_interval\x18\x01 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalH\x00R\x0ctimeInterval\x12O\n\x12\x64\x61tapoint_interval\x18\x02 \x01(\x0b\x32\x1e.datasets.v1.DatapointIntervalH\x00R\x11\x64\x61tapointInterval\x12\x41\n\x0espatial_extent\x18\x03 \x01(\x0b\x32\x1a.datasets.v1.SpatialFilterR\rspatialExtentB\x11\n\x0ftemporal_extent\"\xc9\x01\n\rSpatialFilter\x12\x31\n\x08geometry\x18\x01 \x01(\x0b\x32\x15.datasets.v1.GeometryR\x08geometry\x12\x32\n\x04mode\x18\x02 \x01(\x0e\x32\x1e.datasets.v1.SpatialFilterModeR\x04mode\x12Q\n\x11\x63oordinate_system\x18\x03 \x01(\x0e\x32$.datasets.v1.SpatialCoordinateSystemR\x10\x63oordinateSystem\"\xd3\x01\n\x0cQueryRequest\x12\x36\n\x0e\x63ollection_ids\x18\x01 \x03(\x0b\x32\x0f.datasets.v1.IDR\rcollectionIds\x12\x33\n\x07\x66ilters\x18\x02 \x01(\x0b\x32\x19.datasets.v1.QueryFiltersR\x07\x66ilters\x12\x30\n\x04page\x18\x03 \x01(\x0b\x32\x17.datasets.v1.PaginationH\x00R\x04page\x88\x01\x01\x12\x1b\n\tskip_data\x18\x04 \x01(\x08R\x08skipDataB\x07\n\x05_page\"\x88\x01\n\x0fQueryResultPage\x12,\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12\x39\n\tnext_page\x18\x02 \x01(\x0b\x32\x17.datasets.v1.PaginationH\x00R\x08nextPage\x88\x01\x01\x42\x0c\n\n_next_page*~\n\x11SpatialFilterMode\x12#\n\x1fSPATIAL_FILTER_MODE_UNSPECIFIED\x10\x00\x12\"\n\x1eSPATIAL_FILTER_MODE_INTERSECTS\x10\x01\x12 \n\x1cSPATIAL_FILTER_MODE_CONTAINS\x10\x02*\x96\x01\n\x17SpatialCoordinateSystem\x12)\n%SPATIAL_COORDINATE_SYSTEM_UNSPECIFIED\x10\x00\x12\'\n#SPATIAL_COORDINATE_SYSTEM_CARTESIAN\x10\x01\x12\'\n#SPATIAL_COORDINATE_SYSTEM_SPHERICAL\x10\x02\x32\xcd\x02\n\x11\x44\x61taAccessService\x12`\n\x15GetDatasetForInterval\x12).datasets.v1.GetDatasetForIntervalRequest\x1a\x1a.datasets.v1.DatapointPage\"\x00\x12R\n\x10GetDatapointByID\x12$.datasets.v1.GetDatapointByIdRequest\x1a\x16.datasets.v1.Datapoint\"\x00\x12>\n\tQueryByID\x12\x1d.datasets.v1.QueryByIDRequest\x1a\x10.datasets.v1.Any\"\x00\x12\x42\n\x05Query\x12\x19.datasets.v1.QueryRequest\x1a\x1c.datasets.v1.QueryResultPage\"\x00\x42o\n\x0f\x63om.datasets.v1B\x0f\x44\x61taAccessProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x64\x61tasets/v1/data_access.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\"datasets/v1/well_known_types.proto\x1a\x13tilebox/v1/id.proto\x1a\x16tilebox/v1/query.proto\"\xbd\x02\n\x1cGetDatasetForIntervalRequest\x12#\n\rcollection_id\x18\x01 \x01(\tR\x0c\x63ollectionId\x12=\n\rtime_interval\x18\x02 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalR\x0ctimeInterval\x12\x45\n\x12\x64\x61tapoint_interval\x18\x06 \x01(\x0b\x32\x16.tilebox.v1.IDIntervalR\x11\x64\x61tapointInterval\x12\x38\n\x04page\x18\x03 \x01(\x0b\x32\x1d.datasets.v1.LegacyPaginationB\x05\xaa\x01\x02\x08\x01R\x04page\x12\x1b\n\tskip_data\x18\x04 \x01(\x08R\x08skipData\x12\x1b\n\tskip_meta\x18\x05 \x01(\x08R\x08skipMeta\"k\n\x17GetDatapointByIdRequest\x12#\n\rcollection_id\x18\x01 \x01(\tR\x0c\x63ollectionId\x12\x0e\n\x02id\x18\x02 \x01(\tR\x02id\x12\x1b\n\tskip_data\x18\x03 \x01(\x08R\x08skipData\"\x86\x01\n\x10QueryByIDRequest\x12\x35\n\x0e\x63ollection_ids\x18\x01 \x03(\x0b\x32\x0e.tilebox.v1.IDR\rcollectionIds\x12\x1e\n\x02id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x1b\n\tskip_data\x18\x03 \x01(\x08R\x08skipData\"\xee\x01\n\x0cQueryFilters\x12?\n\rtime_interval\x18\x01 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalH\x00R\x0ctimeInterval\x12G\n\x12\x64\x61tapoint_interval\x18\x02 \x01(\x0b\x32\x16.tilebox.v1.IDIntervalH\x00R\x11\x64\x61tapointInterval\x12\x41\n\x0espatial_extent\x18\x03 \x01(\x0b\x32\x1a.datasets.v1.SpatialFilterR\rspatialExtentB\x11\n\x0ftemporal_extent\"\xc9\x01\n\rSpatialFilter\x12\x31\n\x08geometry\x18\x01 \x01(\x0b\x32\x15.datasets.v1.GeometryR\x08geometry\x12\x32\n\x04mode\x18\x02 \x01(\x0e\x32\x1e.datasets.v1.SpatialFilterModeR\x04mode\x12Q\n\x11\x63oordinate_system\x18\x03 \x01(\x0e\x32$.datasets.v1.SpatialCoordinateSystemR\x10\x63oordinateSystem\"\xca\x01\n\x0cQueryRequest\x12\x35\n\x0e\x63ollection_ids\x18\x01 \x03(\x0b\x32\x0e.tilebox.v1.IDR\rcollectionIds\x12\x33\n\x07\x66ilters\x18\x02 \x01(\x0b\x32\x19.datasets.v1.QueryFiltersR\x07\x66ilters\x12\x31\n\x04page\x18\x03 \x01(\x0b\x32\x16.tilebox.v1.PaginationB\x05\xaa\x01\x02\x08\x01R\x04page\x12\x1b\n\tskip_data\x18\x04 \x01(\x08R\x08skipData\"{\n\x0fQueryResultPage\x12,\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x18.datasets.v1.RepeatedAnyR\x04\x64\x61ta\x12:\n\tnext_page\x18\x02 \x01(\x0b\x32\x16.tilebox.v1.PaginationB\x05\xaa\x01\x02\x08\x01R\x08nextPage*~\n\x11SpatialFilterMode\x12#\n\x1fSPATIAL_FILTER_MODE_UNSPECIFIED\x10\x00\x12\"\n\x1eSPATIAL_FILTER_MODE_INTERSECTS\x10\x01\x12 \n\x1cSPATIAL_FILTER_MODE_CONTAINS\x10\x02*\x96\x01\n\x17SpatialCoordinateSystem\x12)\n%SPATIAL_COORDINATE_SYSTEM_UNSPECIFIED\x10\x00\x12\'\n#SPATIAL_COORDINATE_SYSTEM_CARTESIAN\x10\x01\x12\'\n#SPATIAL_COORDINATE_SYSTEM_SPHERICAL\x10\x02\x32\xcd\x02\n\x11\x44\x61taAccessService\x12`\n\x15GetDatasetForInterval\x12).datasets.v1.GetDatasetForIntervalRequest\x1a\x1a.datasets.v1.DatapointPage\"\x00\x12R\n\x10GetDatapointByID\x12$.datasets.v1.GetDatapointByIdRequest\x1a\x16.datasets.v1.Datapoint\"\x00\x12>\n\tQueryByID\x12\x1d.datasets.v1.QueryByIDRequest\x1a\x10.datasets.v1.Any\"\x00\x12\x42\n\x05Query\x12\x19.datasets.v1.QueryRequest\x1a\x1c.datasets.v1.QueryResultPage\"\x00\x42t\n\x0f\x63om.datasets.v1B\x0f\x44\x61taAccessProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.data_access_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\017DataAccessProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_SPATIALFILTERMODE']._serialized_start=1497 - _globals['_SPATIALFILTERMODE']._serialized_end=1623 - _globals['_SPATIALCOORDINATESYSTEM']._serialized_start=1626 - _globals['_SPATIALCOORDINATESYSTEM']._serialized_end=1776 - _globals['_GETDATASETFORINTERVALREQUEST']._serialized_start=107 - _globals['_GETDATASETFORINTERVALREQUEST']._serialized_end=440 - _globals['_GETDATAPOINTBYIDREQUEST']._serialized_start=442 - _globals['_GETDATAPOINTBYIDREQUEST']._serialized_end=549 - _globals['_QUERYBYIDREQUEST']._serialized_start=552 - _globals['_QUERYBYIDREQUEST']._serialized_end=688 - _globals['_QUERYFILTERS']._serialized_start=691 - _globals['_QUERYFILTERS']._serialized_end=938 - _globals['_SPATIALFILTER']._serialized_start=941 - _globals['_SPATIALFILTER']._serialized_end=1142 - _globals['_QUERYREQUEST']._serialized_start=1145 - _globals['_QUERYREQUEST']._serialized_end=1356 - _globals['_QUERYRESULTPAGE']._serialized_start=1359 - _globals['_QUERYRESULTPAGE']._serialized_end=1495 - _globals['_DATAACCESSSERVICE']._serialized_start=1779 - _globals['_DATAACCESSSERVICE']._serialized_end=2112 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\017DataAccessProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_GETDATASETFORINTERVALREQUEST'].fields_by_name['page']._loaded_options = None + _globals['_GETDATASETFORINTERVALREQUEST'].fields_by_name['page']._serialized_options = b'\252\001\002\010\001' + _globals['_QUERYREQUEST'].fields_by_name['page']._loaded_options = None + _globals['_QUERYREQUEST'].fields_by_name['page']._serialized_options = b'\252\001\002\010\001' + _globals['_QUERYRESULTPAGE'].fields_by_name['next_page']._loaded_options = None + _globals['_QUERYRESULTPAGE'].fields_by_name['next_page']._serialized_options = b'\252\001\002\010\001' + _globals['_SPATIALFILTERMODE']._serialized_start=1492 + _globals['_SPATIALFILTERMODE']._serialized_end=1618 + _globals['_SPATIALCOORDINATESYSTEM']._serialized_start=1621 + _globals['_SPATIALCOORDINATESYSTEM']._serialized_end=1771 + _globals['_GETDATASETFORINTERVALREQUEST']._serialized_start=152 + _globals['_GETDATASETFORINTERVALREQUEST']._serialized_end=469 + _globals['_GETDATAPOINTBYIDREQUEST']._serialized_start=471 + _globals['_GETDATAPOINTBYIDREQUEST']._serialized_end=578 + _globals['_QUERYBYIDREQUEST']._serialized_start=581 + _globals['_QUERYBYIDREQUEST']._serialized_end=715 + _globals['_QUERYFILTERS']._serialized_start=718 + _globals['_QUERYFILTERS']._serialized_end=956 + _globals['_SPATIALFILTER']._serialized_start=959 + _globals['_SPATIALFILTER']._serialized_end=1160 + _globals['_QUERYREQUEST']._serialized_start=1163 + _globals['_QUERYREQUEST']._serialized_end=1365 + _globals['_QUERYRESULTPAGE']._serialized_start=1367 + _globals['_QUERYRESULTPAGE']._serialized_end=1490 + _globals['_DATAACCESSSERVICE']._serialized_start=1774 + _globals['_DATAACCESSSERVICE']._serialized_end=2107 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.pyi index dd078d1..01ca70b 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/data_access_pb2.pyi @@ -1,5 +1,7 @@ from tilebox.datasets.datasets.v1 import core_pb2 as _core_pb2 from tilebox.datasets.datasets.v1 import well_known_types_pb2 as _well_known_types_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as _query_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -35,12 +37,12 @@ class GetDatasetForIntervalRequest(_message.Message): SKIP_DATA_FIELD_NUMBER: _ClassVar[int] SKIP_META_FIELD_NUMBER: _ClassVar[int] collection_id: str - time_interval: _core_pb2.TimeInterval - datapoint_interval: _core_pb2.DatapointInterval + time_interval: _query_pb2.TimeInterval + datapoint_interval: _query_pb2.IDInterval page: _core_pb2.LegacyPagination skip_data: bool skip_meta: bool - def __init__(self, collection_id: _Optional[str] = ..., time_interval: _Optional[_Union[_core_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_core_pb2.DatapointInterval, _Mapping]] = ..., page: _Optional[_Union[_core_pb2.LegacyPagination, _Mapping]] = ..., skip_data: bool = ..., skip_meta: bool = ...) -> None: ... + def __init__(self, collection_id: _Optional[str] = ..., time_interval: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_query_pb2.IDInterval, _Mapping]] = ..., page: _Optional[_Union[_core_pb2.LegacyPagination, _Mapping]] = ..., skip_data: bool = ..., skip_meta: bool = ...) -> None: ... class GetDatapointByIdRequest(_message.Message): __slots__ = ("collection_id", "id", "skip_data") @@ -57,20 +59,20 @@ class QueryByIDRequest(_message.Message): COLLECTION_IDS_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] SKIP_DATA_FIELD_NUMBER: _ClassVar[int] - collection_ids: _containers.RepeatedCompositeFieldContainer[_core_pb2.ID] - id: _core_pb2.ID + collection_ids: _containers.RepeatedCompositeFieldContainer[_id_pb2.ID] + id: _id_pb2.ID skip_data: bool - def __init__(self, collection_ids: _Optional[_Iterable[_Union[_core_pb2.ID, _Mapping]]] = ..., id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., skip_data: bool = ...) -> None: ... + def __init__(self, collection_ids: _Optional[_Iterable[_Union[_id_pb2.ID, _Mapping]]] = ..., id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., skip_data: bool = ...) -> None: ... class QueryFilters(_message.Message): __slots__ = ("time_interval", "datapoint_interval", "spatial_extent") TIME_INTERVAL_FIELD_NUMBER: _ClassVar[int] DATAPOINT_INTERVAL_FIELD_NUMBER: _ClassVar[int] SPATIAL_EXTENT_FIELD_NUMBER: _ClassVar[int] - time_interval: _core_pb2.TimeInterval - datapoint_interval: _core_pb2.DatapointInterval + time_interval: _query_pb2.TimeInterval + datapoint_interval: _query_pb2.IDInterval spatial_extent: SpatialFilter - def __init__(self, time_interval: _Optional[_Union[_core_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_core_pb2.DatapointInterval, _Mapping]] = ..., spatial_extent: _Optional[_Union[SpatialFilter, _Mapping]] = ...) -> None: ... + def __init__(self, time_interval: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_query_pb2.IDInterval, _Mapping]] = ..., spatial_extent: _Optional[_Union[SpatialFilter, _Mapping]] = ...) -> None: ... class SpatialFilter(_message.Message): __slots__ = ("geometry", "mode", "coordinate_system") @@ -88,16 +90,16 @@ class QueryRequest(_message.Message): FILTERS_FIELD_NUMBER: _ClassVar[int] PAGE_FIELD_NUMBER: _ClassVar[int] SKIP_DATA_FIELD_NUMBER: _ClassVar[int] - collection_ids: _containers.RepeatedCompositeFieldContainer[_core_pb2.ID] + collection_ids: _containers.RepeatedCompositeFieldContainer[_id_pb2.ID] filters: QueryFilters - page: _core_pb2.Pagination + page: _query_pb2.Pagination skip_data: bool - def __init__(self, collection_ids: _Optional[_Iterable[_Union[_core_pb2.ID, _Mapping]]] = ..., filters: _Optional[_Union[QueryFilters, _Mapping]] = ..., page: _Optional[_Union[_core_pb2.Pagination, _Mapping]] = ..., skip_data: bool = ...) -> None: ... + def __init__(self, collection_ids: _Optional[_Iterable[_Union[_id_pb2.ID, _Mapping]]] = ..., filters: _Optional[_Union[QueryFilters, _Mapping]] = ..., page: _Optional[_Union[_query_pb2.Pagination, _Mapping]] = ..., skip_data: bool = ...) -> None: ... class QueryResultPage(_message.Message): __slots__ = ("data", "next_page") DATA_FIELD_NUMBER: _ClassVar[int] NEXT_PAGE_FIELD_NUMBER: _ClassVar[int] data: _core_pb2.RepeatedAny - next_page: _core_pb2.Pagination - def __init__(self, data: _Optional[_Union[_core_pb2.RepeatedAny, _Mapping]] = ..., next_page: _Optional[_Union[_core_pb2.Pagination, _Mapping]] = ...) -> None: ... + next_page: _query_pb2.Pagination + def __init__(self, data: _Optional[_Union[_core_pb2.RepeatedAny, _Mapping]] = ..., next_page: _Optional[_Union[_query_pb2.Pagination, _Mapping]] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.py index 03546b4..bfa73a7 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.py @@ -23,26 +23,27 @@ from tilebox.datasets.datasets.v1 import core_pb2 as datasets_dot_v1_dot_core__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n datasets/v1/data_ingestion.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\"\xaf\x01\n\x17IngestDatapointsRequest\x12\x34\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x0c\x63ollectionId\x12\x37\n\ndatapoints\x18\x02 \x01(\x0b\x32\x17.datasets.v1.DatapointsR\ndatapoints\x12%\n\x0e\x61llow_existing\x18\x03 \x01(\x08R\rallowExisting\"\x84\x01\n\rIngestRequest\x12\x34\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x0c\x63ollectionId\x12\x16\n\x06values\x18\x02 \x03(\x0cR\x06values\x12%\n\x0e\x61llow_existing\x18\x03 \x01(\x08R\rallowExisting\"\x8a\x01\n\x0eIngestResponse\x12\x1f\n\x0bnum_created\x18\x01 \x01(\x03R\nnumCreated\x12!\n\x0cnum_existing\x18\x02 \x01(\x03R\x0bnumExisting\x12\x34\n\rdatapoint_ids\x18\x03 \x03(\x0b\x32\x0f.datasets.v1.IDR\x0c\x64\x61tapointIds\"{\n\rDeleteRequest\x12\x34\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x0c\x63ollectionId\x12\x34\n\rdatapoint_ids\x18\x02 \x03(\x0b\x32\x0f.datasets.v1.IDR\x0c\x64\x61tapointIds\"1\n\x0e\x44\x65leteResponse\x12\x1f\n\x0bnum_deleted\x18\x01 \x01(\x03R\nnumDeleted2\xa0\x01\n\x14\x44\x61taIngestionService\x12\x43\n\x06Ingest\x12\x1a.datasets.v1.IngestRequest\x1a\x1b.datasets.v1.IngestResponse\"\x00\x12\x43\n\x06\x44\x65lete\x12\x1a.datasets.v1.DeleteRequest\x1a\x1b.datasets.v1.DeleteResponse\"\x00\x42r\n\x0f\x63om.datasets.v1B\x12\x44\x61taIngestionProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n datasets/v1/data_ingestion.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\x13tilebox/v1/id.proto\"\xae\x01\n\x17IngestDatapointsRequest\x12\x33\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x63ollectionId\x12\x37\n\ndatapoints\x18\x02 \x01(\x0b\x32\x17.datasets.v1.DatapointsR\ndatapoints\x12%\n\x0e\x61llow_existing\x18\x03 \x01(\x08R\rallowExisting\"\x83\x01\n\rIngestRequest\x12\x33\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x63ollectionId\x12\x16\n\x06values\x18\x02 \x03(\x0cR\x06values\x12%\n\x0e\x61llow_existing\x18\x03 \x01(\x08R\rallowExisting\"\x89\x01\n\x0eIngestResponse\x12\x1f\n\x0bnum_created\x18\x01 \x01(\x03R\nnumCreated\x12!\n\x0cnum_existing\x18\x02 \x01(\x03R\x0bnumExisting\x12\x33\n\rdatapoint_ids\x18\x03 \x03(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x64\x61tapointIds\"y\n\rDeleteRequest\x12\x33\n\rcollection_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x63ollectionId\x12\x33\n\rdatapoint_ids\x18\x02 \x03(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x64\x61tapointIds\"1\n\x0e\x44\x65leteResponse\x12\x1f\n\x0bnum_deleted\x18\x01 \x01(\x03R\nnumDeleted2\xa0\x01\n\x14\x44\x61taIngestionService\x12\x43\n\x06Ingest\x12\x1a.datasets.v1.IngestRequest\x1a\x1b.datasets.v1.IngestResponse\"\x00\x12\x43\n\x06\x44\x65lete\x12\x1a.datasets.v1.DeleteRequest\x1a\x1b.datasets.v1.DeleteResponse\"\x00\x42w\n\x0f\x63om.datasets.v1B\x12\x44\x61taIngestionProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.data_ingestion_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\022DataIngestionProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_INGESTDATAPOINTSREQUEST']._serialized_start=74 - _globals['_INGESTDATAPOINTSREQUEST']._serialized_end=249 - _globals['_INGESTREQUEST']._serialized_start=252 - _globals['_INGESTREQUEST']._serialized_end=384 - _globals['_INGESTRESPONSE']._serialized_start=387 - _globals['_INGESTRESPONSE']._serialized_end=525 - _globals['_DELETEREQUEST']._serialized_start=527 - _globals['_DELETEREQUEST']._serialized_end=650 - _globals['_DELETERESPONSE']._serialized_start=652 - _globals['_DELETERESPONSE']._serialized_end=701 - _globals['_DATAINGESTIONSERVICE']._serialized_start=704 - _globals['_DATAINGESTIONSERVICE']._serialized_end=864 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\022DataIngestionProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_INGESTDATAPOINTSREQUEST']._serialized_start=95 + _globals['_INGESTDATAPOINTSREQUEST']._serialized_end=269 + _globals['_INGESTREQUEST']._serialized_start=272 + _globals['_INGESTREQUEST']._serialized_end=403 + _globals['_INGESTRESPONSE']._serialized_start=406 + _globals['_INGESTRESPONSE']._serialized_end=543 + _globals['_DELETEREQUEST']._serialized_start=545 + _globals['_DELETEREQUEST']._serialized_end=666 + _globals['_DELETERESPONSE']._serialized_start=668 + _globals['_DELETERESPONSE']._serialized_end=717 + _globals['_DATAINGESTIONSERVICE']._serialized_start=720 + _globals['_DATAINGESTIONSERVICE']._serialized_end=880 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.pyi index faebfc2..136aae8 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/data_ingestion_pb2.pyi @@ -1,4 +1,5 @@ from tilebox.datasets.datasets.v1 import core_pb2 as _core_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -11,20 +12,20 @@ class IngestDatapointsRequest(_message.Message): COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] DATAPOINTS_FIELD_NUMBER: _ClassVar[int] ALLOW_EXISTING_FIELD_NUMBER: _ClassVar[int] - collection_id: _core_pb2.ID + collection_id: _id_pb2.ID datapoints: _core_pb2.Datapoints allow_existing: bool - def __init__(self, collection_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., datapoints: _Optional[_Union[_core_pb2.Datapoints, _Mapping]] = ..., allow_existing: bool = ...) -> None: ... + def __init__(self, collection_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., datapoints: _Optional[_Union[_core_pb2.Datapoints, _Mapping]] = ..., allow_existing: bool = ...) -> None: ... class IngestRequest(_message.Message): __slots__ = ("collection_id", "values", "allow_existing") COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] VALUES_FIELD_NUMBER: _ClassVar[int] ALLOW_EXISTING_FIELD_NUMBER: _ClassVar[int] - collection_id: _core_pb2.ID + collection_id: _id_pb2.ID values: _containers.RepeatedScalarFieldContainer[bytes] allow_existing: bool - def __init__(self, collection_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., values: _Optional[_Iterable[bytes]] = ..., allow_existing: bool = ...) -> None: ... + def __init__(self, collection_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., values: _Optional[_Iterable[bytes]] = ..., allow_existing: bool = ...) -> None: ... class IngestResponse(_message.Message): __slots__ = ("num_created", "num_existing", "datapoint_ids") @@ -33,16 +34,16 @@ class IngestResponse(_message.Message): DATAPOINT_IDS_FIELD_NUMBER: _ClassVar[int] num_created: int num_existing: int - datapoint_ids: _containers.RepeatedCompositeFieldContainer[_core_pb2.ID] - def __init__(self, num_created: _Optional[int] = ..., num_existing: _Optional[int] = ..., datapoint_ids: _Optional[_Iterable[_Union[_core_pb2.ID, _Mapping]]] = ...) -> None: ... + datapoint_ids: _containers.RepeatedCompositeFieldContainer[_id_pb2.ID] + def __init__(self, num_created: _Optional[int] = ..., num_existing: _Optional[int] = ..., datapoint_ids: _Optional[_Iterable[_Union[_id_pb2.ID, _Mapping]]] = ...) -> None: ... class DeleteRequest(_message.Message): __slots__ = ("collection_id", "datapoint_ids") COLLECTION_ID_FIELD_NUMBER: _ClassVar[int] DATAPOINT_IDS_FIELD_NUMBER: _ClassVar[int] - collection_id: _core_pb2.ID - datapoint_ids: _containers.RepeatedCompositeFieldContainer[_core_pb2.ID] - def __init__(self, collection_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., datapoint_ids: _Optional[_Iterable[_Union[_core_pb2.ID, _Mapping]]] = ...) -> None: ... + collection_id: _id_pb2.ID + datapoint_ids: _containers.RepeatedCompositeFieldContainer[_id_pb2.ID] + def __init__(self, collection_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., datapoint_ids: _Optional[_Iterable[_Union[_id_pb2.ID, _Mapping]]] = ...) -> None: ... class DeleteResponse(_message.Message): __slots__ = ("num_deleted",) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.py index f233ee6..05b9c04 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.py @@ -25,28 +25,29 @@ from tilebox.datasets.datasets.v1 import well_known_types_pb2 as datasets_dot_v1_dot_well__known__types__pb2 from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x64\x61tasets/v1/dataset_type.proto\x12\x0b\x64\x61tasets.v1\x1a\"datasets/v1/well_known_types.proto\x1a google/protobuf/descriptor.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xaa\x01\n\x05\x46ield\x12\x45\n\ndescriptor\x18\x01 \x01(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\ndescriptor\x12<\n\nannotation\x18\x02 \x01(\x0b\x32\x1c.datasets.v1.FieldAnnotationR\nannotation\x12\x1c\n\tqueryable\x18\x03 \x01(\x08R\tqueryable\"X\n\x0f\x46ieldAnnotation\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12#\n\rexample_value\x18\x02 \x01(\tR\x0c\x65xampleValue\"g\n\x0b\x44\x61tasetType\x12,\n\x04kind\x18\x01 \x01(\x0e\x32\x18.datasets.v1.DatasetKindR\x04kind\x12*\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x12.datasets.v1.FieldR\x06\x66ields\"\xf4\x01\n\rAnnotatedType\x12I\n\x0e\x64\x65scriptor_set\x18\x01 \x01(\x0b\x32\".google.protobuf.FileDescriptorSetR\rdescriptorSet\x12\x19\n\x08type_url\x18\x02 \x01(\tR\x07typeUrl\x12I\n\x11\x66ield_annotations\x18\x04 \x03(\x0b\x32\x1c.datasets.v1.FieldAnnotationR\x10\x66ieldAnnotations\x12,\n\x04kind\x18\x05 \x01(\x0e\x32\x18.datasets.v1.DatasetKindR\x04kindJ\x04\x08\x03\x10\x04\"\xa9\x01\n\x11TemporalDatapoint\x12.\n\x04time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x04time\x12!\n\x02id\x18\x02 \x01(\x0b\x32\x11.datasets.v1.UUIDR\x02id\x12\x41\n\x0eingestion_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\"\xe2\x01\n\x17SpatioTemporalDatapoint\x12.\n\x04time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x04time\x12!\n\x02id\x18\x02 \x01(\x0b\x32\x11.datasets.v1.UUIDR\x02id\x12\x41\n\x0eingestion_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x31\n\x08geometry\x18\x04 \x01(\x0b\x32\x15.datasets.v1.GeometryR\x08geometry*g\n\x0b\x44\x61tasetKind\x12\x1c\n\x18\x44\x41TASET_KIND_UNSPECIFIED\x10\x00\x12\x19\n\x15\x44\x41TASET_KIND_TEMPORAL\x10\x01\x12\x1f\n\x1b\x44\x41TASET_KIND_SPATIOTEMPORAL\x10\x02\x42p\n\x0f\x63om.datasets.v1B\x10\x44\x61tasetTypeProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x64\x61tasets/v1/dataset_type.proto\x12\x0b\x64\x61tasets.v1\x1a\"datasets/v1/well_known_types.proto\x1a google/protobuf/descriptor.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x13tilebox/v1/id.proto\"\xaa\x01\n\x05\x46ield\x12\x45\n\ndescriptor\x18\x01 \x01(\x0b\x32%.google.protobuf.FieldDescriptorProtoR\ndescriptor\x12<\n\nannotation\x18\x02 \x01(\x0b\x32\x1c.datasets.v1.FieldAnnotationR\nannotation\x12\x1c\n\tqueryable\x18\x03 \x01(\x08R\tqueryable\"X\n\x0f\x46ieldAnnotation\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12#\n\rexample_value\x18\x02 \x01(\tR\x0c\x65xampleValue\"g\n\x0b\x44\x61tasetType\x12,\n\x04kind\x18\x01 \x01(\x0e\x32\x18.datasets.v1.DatasetKindR\x04kind\x12*\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x12.datasets.v1.FieldR\x06\x66ields\"\xf4\x01\n\rAnnotatedType\x12I\n\x0e\x64\x65scriptor_set\x18\x01 \x01(\x0b\x32\".google.protobuf.FileDescriptorSetR\rdescriptorSet\x12\x19\n\x08type_url\x18\x02 \x01(\tR\x07typeUrl\x12I\n\x11\x66ield_annotations\x18\x04 \x03(\x0b\x32\x1c.datasets.v1.FieldAnnotationR\x10\x66ieldAnnotations\x12,\n\x04kind\x18\x05 \x01(\x0e\x32\x18.datasets.v1.DatasetKindR\x04kindJ\x04\x08\x03\x10\x04\"\xa6\x01\n\x11TemporalDatapoint\x12.\n\x04time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x04time\x12\x1e\n\x02id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x41\n\x0eingestion_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\"\xdf\x01\n\x17SpatioTemporalDatapoint\x12.\n\x04time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x04time\x12\x1e\n\x02id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x41\n\x0eingestion_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ringestionTime\x12\x31\n\x08geometry\x18\x04 \x01(\x0b\x32\x15.datasets.v1.GeometryR\x08geometry*g\n\x0b\x44\x61tasetKind\x12\x1c\n\x18\x44\x41TASET_KIND_UNSPECIFIED\x10\x00\x12\x19\n\x15\x44\x41TASET_KIND_TEMPORAL\x10\x01\x12\x1f\n\x1b\x44\x41TASET_KIND_SPATIOTEMPORAL\x10\x02\x42u\n\x0f\x63om.datasets.v1B\x10\x44\x61tasetTypeProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.dataset_type_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\020DatasetTypeProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_DATASETKIND']._serialized_start=1166 - _globals['_DATASETKIND']._serialized_end=1269 - _globals['_FIELD']._serialized_start=151 - _globals['_FIELD']._serialized_end=321 - _globals['_FIELDANNOTATION']._serialized_start=323 - _globals['_FIELDANNOTATION']._serialized_end=411 - _globals['_DATASETTYPE']._serialized_start=413 - _globals['_DATASETTYPE']._serialized_end=516 - _globals['_ANNOTATEDTYPE']._serialized_start=519 - _globals['_ANNOTATEDTYPE']._serialized_end=763 - _globals['_TEMPORALDATAPOINT']._serialized_start=766 - _globals['_TEMPORALDATAPOINT']._serialized_end=935 - _globals['_SPATIOTEMPORALDATAPOINT']._serialized_start=938 - _globals['_SPATIOTEMPORALDATAPOINT']._serialized_end=1164 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\020DatasetTypeProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_DATASETKIND']._serialized_start=1181 + _globals['_DATASETKIND']._serialized_end=1284 + _globals['_FIELD']._serialized_start=172 + _globals['_FIELD']._serialized_end=342 + _globals['_FIELDANNOTATION']._serialized_start=344 + _globals['_FIELDANNOTATION']._serialized_end=432 + _globals['_DATASETTYPE']._serialized_start=434 + _globals['_DATASETTYPE']._serialized_end=537 + _globals['_ANNOTATEDTYPE']._serialized_start=540 + _globals['_ANNOTATEDTYPE']._serialized_end=784 + _globals['_TEMPORALDATAPOINT']._serialized_start=787 + _globals['_TEMPORALDATAPOINT']._serialized_end=953 + _globals['_SPATIOTEMPORALDATAPOINT']._serialized_start=956 + _globals['_SPATIOTEMPORALDATAPOINT']._serialized_end=1179 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.pyi index 4a1daa6..38eabc0 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/dataset_type_pb2.pyi @@ -1,6 +1,7 @@ from tilebox.datasets.datasets.v1 import well_known_types_pb2 as _well_known_types_pb2 from google.protobuf import descriptor_pb2 as _descriptor_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -62,9 +63,9 @@ class TemporalDatapoint(_message.Message): ID_FIELD_NUMBER: _ClassVar[int] INGESTION_TIME_FIELD_NUMBER: _ClassVar[int] time: _timestamp_pb2.Timestamp - id: _well_known_types_pb2.UUID + id: _id_pb2.ID ingestion_time: _timestamp_pb2.Timestamp - def __init__(self, time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., id: _Optional[_Union[_well_known_types_pb2.UUID, _Mapping]] = ..., ingestion_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + def __init__(self, time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., ingestion_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class SpatioTemporalDatapoint(_message.Message): __slots__ = ("time", "id", "ingestion_time", "geometry") @@ -73,7 +74,7 @@ class SpatioTemporalDatapoint(_message.Message): INGESTION_TIME_FIELD_NUMBER: _ClassVar[int] GEOMETRY_FIELD_NUMBER: _ClassVar[int] time: _timestamp_pb2.Timestamp - id: _well_known_types_pb2.UUID + id: _id_pb2.ID ingestion_time: _timestamp_pb2.Timestamp geometry: _well_known_types_pb2.Geometry - def __init__(self, time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., id: _Optional[_Union[_well_known_types_pb2.UUID, _Mapping]] = ..., ingestion_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., geometry: _Optional[_Union[_well_known_types_pb2.Geometry, _Mapping]] = ...) -> None: ... + def __init__(self, time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., ingestion_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., geometry: _Optional[_Union[_well_known_types_pb2.Geometry, _Mapping]] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.py index efa9981..d64e035 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.py @@ -24,36 +24,39 @@ from tilebox.datasets.datasets.v1 import core_pb2 as datasets_dot_v1_dot_core__pb2 from tilebox.datasets.datasets.v1 import dataset_type_pb2 as datasets_dot_v1_dot_dataset__type__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x64\x61tasets/v1/datasets.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\"\x8f\x01\n\x14\x43reateDatasetRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12,\n\x04type\x18\x02 \x01(\x0b\x32\x18.datasets.v1.DatasetTypeR\x04type\x12\x18\n\x07summary\x18\x03 \x01(\tR\x07summary\x12\x1b\n\tcode_name\x18\x04 \x01(\tR\x08\x63odeName\"H\n\x11GetDatasetRequest\x12\x12\n\x04slug\x18\x01 \x01(\tR\x04slug\x12\x1f\n\x02id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\"\x93\x01\n\x14UpdateDatasetRequest\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12,\n\x04type\x18\x03 \x01(\x0b\x32\x18.datasets.v1.DatasetTypeR\x04type\x12\x18\n\x07summary\x18\x04 \x01(\tR\x07summary\"t\n\nClientInfo\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12 \n\x0b\x65nvironment\x18\x02 \x01(\tR\x0b\x65nvironment\x12\x30\n\x08packages\x18\x03 \x03(\x0b\x32\x14.datasets.v1.PackageR\x08packages\"7\n\x07Package\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\"d\n\x1fUpdateDatasetDescriptionRequest\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\"7\n\x14\x44\x65leteDatasetRequest\x12\x1f\n\x02id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\x02id\"1\n\x15\x44\x65leteDatasetResponse\x12\x18\n\x07trashed\x18\x01 \x01(\x08R\x07trashed\"O\n\x13ListDatasetsRequest\x12\x38\n\x0b\x63lient_info\x18\x01 \x01(\x0b\x32\x17.datasets.v1.ClientInfoR\nclientInfo\"\x9f\x02\n\x14ListDatasetsResponse\x12\x30\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x14.datasets.v1.DatasetR\x08\x64\x61tasets\x12\x31\n\x06groups\x18\x02 \x03(\x0b\x32\x19.datasets.v1.DatasetGroupR\x06groups\x12%\n\x0eserver_message\x18\x03 \x01(\tR\rserverMessage\x12%\n\x0eowned_datasets\x18\x04 \x01(\x03R\rownedDatasets\x12\x39\n\x16maximum_owned_datasets\x18\x05 \x01(\x03H\x00R\x14maximumOwnedDatasets\x88\x01\x01\x42\x19\n\x17_maximum_owned_datasets2\x81\x04\n\x0e\x44\x61tasetService\x12J\n\rCreateDataset\x12!.datasets.v1.CreateDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12\x44\n\nGetDataset\x12\x1e.datasets.v1.GetDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12J\n\rUpdateDataset\x12!.datasets.v1.UpdateDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12`\n\x18UpdateDatasetDescription\x12,.datasets.v1.UpdateDatasetDescriptionRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12X\n\rDeleteDataset\x12!.datasets.v1.DeleteDatasetRequest\x1a\".datasets.v1.DeleteDatasetResponse\"\x00\x12U\n\x0cListDatasets\x12 .datasets.v1.ListDatasetsRequest\x1a!.datasets.v1.ListDatasetsResponse\"\x00\x42m\n\x0f\x63om.datasets.v1B\rDatasetsProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x64\x61tasets/v1/datasets.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\x1e\x64\x61tasets/v1/dataset_type.proto\x1a\x13tilebox/v1/id.proto\"\x8f\x01\n\x14\x43reateDatasetRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12,\n\x04type\x18\x02 \x01(\x0b\x32\x18.datasets.v1.DatasetTypeR\x04type\x12\x18\n\x07summary\x18\x03 \x01(\tR\x07summary\x12\x1b\n\tcode_name\x18\x04 \x01(\tR\x08\x63odeName\"G\n\x11GetDatasetRequest\x12\x12\n\x04slug\x18\x01 \x01(\tR\x04slug\x12\x1e\n\x02id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\"\x92\x01\n\x14UpdateDatasetRequest\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12,\n\x04type\x18\x03 \x01(\x0b\x32\x18.datasets.v1.DatasetTypeR\x04type\x12\x18\n\x07summary\x18\x04 \x01(\tR\x07summary\"t\n\nClientInfo\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12 \n\x0b\x65nvironment\x18\x02 \x01(\tR\x0b\x65nvironment\x12\x30\n\x08packages\x18\x03 \x03(\x0b\x32\x14.datasets.v1.PackageR\x08packages\"7\n\x07Package\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\"c\n\x1fUpdateDatasetDescriptionRequest\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\"6\n\x14\x44\x65leteDatasetRequest\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\"1\n\x15\x44\x65leteDatasetResponse\x12\x18\n\x07trashed\x18\x01 \x01(\x08R\x07trashed\"O\n\x13ListDatasetsRequest\x12\x38\n\x0b\x63lient_info\x18\x01 \x01(\x0b\x32\x17.datasets.v1.ClientInfoR\nclientInfo\"\x86\x02\n\x14ListDatasetsResponse\x12\x30\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x14.datasets.v1.DatasetR\x08\x64\x61tasets\x12\x31\n\x06groups\x18\x02 \x03(\x0b\x32\x19.datasets.v1.DatasetGroupR\x06groups\x12%\n\x0eserver_message\x18\x03 \x01(\tR\rserverMessage\x12%\n\x0eowned_datasets\x18\x04 \x01(\x03R\rownedDatasets\x12;\n\x16maximum_owned_datasets\x18\x05 \x01(\x03\x42\x05\xaa\x01\x02\x08\x01R\x14maximumOwnedDatasets2\x81\x04\n\x0e\x44\x61tasetService\x12J\n\rCreateDataset\x12!.datasets.v1.CreateDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12\x44\n\nGetDataset\x12\x1e.datasets.v1.GetDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12J\n\rUpdateDataset\x12!.datasets.v1.UpdateDatasetRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12`\n\x18UpdateDatasetDescription\x12,.datasets.v1.UpdateDatasetDescriptionRequest\x1a\x14.datasets.v1.Dataset\"\x00\x12X\n\rDeleteDataset\x12!.datasets.v1.DeleteDatasetRequest\x1a\".datasets.v1.DeleteDatasetResponse\"\x00\x12U\n\x0cListDatasets\x12 .datasets.v1.ListDatasetsRequest\x1a!.datasets.v1.ListDatasetsResponse\"\x00\x42r\n\x0f\x63om.datasets.v1B\rDatasetsProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.datasets_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\rDatasetsProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_CREATEDATASETREQUEST']._serialized_start=100 - _globals['_CREATEDATASETREQUEST']._serialized_end=243 - _globals['_GETDATASETREQUEST']._serialized_start=245 - _globals['_GETDATASETREQUEST']._serialized_end=317 - _globals['_UPDATEDATASETREQUEST']._serialized_start=320 - _globals['_UPDATEDATASETREQUEST']._serialized_end=467 - _globals['_CLIENTINFO']._serialized_start=469 - _globals['_CLIENTINFO']._serialized_end=585 - _globals['_PACKAGE']._serialized_start=587 - _globals['_PACKAGE']._serialized_end=642 - _globals['_UPDATEDATASETDESCRIPTIONREQUEST']._serialized_start=644 - _globals['_UPDATEDATASETDESCRIPTIONREQUEST']._serialized_end=744 - _globals['_DELETEDATASETREQUEST']._serialized_start=746 - _globals['_DELETEDATASETREQUEST']._serialized_end=801 - _globals['_DELETEDATASETRESPONSE']._serialized_start=803 - _globals['_DELETEDATASETRESPONSE']._serialized_end=852 - _globals['_LISTDATASETSREQUEST']._serialized_start=854 - _globals['_LISTDATASETSREQUEST']._serialized_end=933 - _globals['_LISTDATASETSRESPONSE']._serialized_start=936 - _globals['_LISTDATASETSRESPONSE']._serialized_end=1223 - _globals['_DATASETSERVICE']._serialized_start=1226 - _globals['_DATASETSERVICE']._serialized_end=1739 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\rDatasetsProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_LISTDATASETSRESPONSE'].fields_by_name['maximum_owned_datasets']._loaded_options = None + _globals['_LISTDATASETSRESPONSE'].fields_by_name['maximum_owned_datasets']._serialized_options = b'\252\001\002\010\001' + _globals['_CREATEDATASETREQUEST']._serialized_start=121 + _globals['_CREATEDATASETREQUEST']._serialized_end=264 + _globals['_GETDATASETREQUEST']._serialized_start=266 + _globals['_GETDATASETREQUEST']._serialized_end=337 + _globals['_UPDATEDATASETREQUEST']._serialized_start=340 + _globals['_UPDATEDATASETREQUEST']._serialized_end=486 + _globals['_CLIENTINFO']._serialized_start=488 + _globals['_CLIENTINFO']._serialized_end=604 + _globals['_PACKAGE']._serialized_start=606 + _globals['_PACKAGE']._serialized_end=661 + _globals['_UPDATEDATASETDESCRIPTIONREQUEST']._serialized_start=663 + _globals['_UPDATEDATASETDESCRIPTIONREQUEST']._serialized_end=762 + _globals['_DELETEDATASETREQUEST']._serialized_start=764 + _globals['_DELETEDATASETREQUEST']._serialized_end=818 + _globals['_DELETEDATASETRESPONSE']._serialized_start=820 + _globals['_DELETEDATASETRESPONSE']._serialized_end=869 + _globals['_LISTDATASETSREQUEST']._serialized_start=871 + _globals['_LISTDATASETSREQUEST']._serialized_end=950 + _globals['_LISTDATASETSRESPONSE']._serialized_start=953 + _globals['_LISTDATASETSRESPONSE']._serialized_end=1215 + _globals['_DATASETSERVICE']._serialized_start=1218 + _globals['_DATASETSERVICE']._serialized_end=1731 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.pyi index 19f1409..ee4e15b 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/datasets_pb2.pyi @@ -1,5 +1,6 @@ from tilebox.datasets.datasets.v1 import core_pb2 as _core_pb2 from tilebox.datasets.datasets.v1 import dataset_type_pb2 as _dataset_type_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -24,8 +25,8 @@ class GetDatasetRequest(_message.Message): SLUG_FIELD_NUMBER: _ClassVar[int] ID_FIELD_NUMBER: _ClassVar[int] slug: str - id: _core_pb2.ID - def __init__(self, slug: _Optional[str] = ..., id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ...) -> None: ... + id: _id_pb2.ID + def __init__(self, slug: _Optional[str] = ..., id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class UpdateDatasetRequest(_message.Message): __slots__ = ("id", "name", "type", "summary") @@ -33,11 +34,11 @@ class UpdateDatasetRequest(_message.Message): NAME_FIELD_NUMBER: _ClassVar[int] TYPE_FIELD_NUMBER: _ClassVar[int] SUMMARY_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.ID + id: _id_pb2.ID name: str type: _dataset_type_pb2.DatasetType summary: str - def __init__(self, id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ..., type: _Optional[_Union[_dataset_type_pb2.DatasetType, _Mapping]] = ..., summary: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ..., type: _Optional[_Union[_dataset_type_pb2.DatasetType, _Mapping]] = ..., summary: _Optional[str] = ...) -> None: ... class ClientInfo(_message.Message): __slots__ = ("name", "environment", "packages") @@ -61,15 +62,15 @@ class UpdateDatasetDescriptionRequest(_message.Message): __slots__ = ("id", "description") ID_FIELD_NUMBER: _ClassVar[int] DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.ID + id: _id_pb2.ID description: str - def __init__(self, id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., description: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., description: _Optional[str] = ...) -> None: ... class DeleteDatasetRequest(_message.Message): __slots__ = ("id",) ID_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.ID - def __init__(self, id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ...) -> None: ... + id: _id_pb2.ID + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class DeleteDatasetResponse(_message.Message): __slots__ = ("trashed",) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.py index ed26e47..62943d4 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.py @@ -22,20 +22,21 @@ _sym_db = _symbol_database.Default() -from tilebox.datasets.datasets.v1 import core_pb2 as datasets_dot_v1_dot_core__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as tilebox_dot_v1_dot_query__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x64\x61tasets/v1/timeseries.proto\x12\x0b\x64\x61tasets.v1\x1a\x16\x64\x61tasets/v1/core.proto\x1a\x1egoogle/protobuf/duration.proto\"\x88\x03\n\x16TimeseriesDatasetChunk\x12.\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0f.datasets.v1.IDR\tdatasetId\x12\x34\n\rcollection_id\x18\x02 \x01(\x0b\x32\x0f.datasets.v1.IDR\x0c\x63ollectionId\x12>\n\rtime_interval\x18\x03 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalR\x0ctimeInterval\x12M\n\x12\x64\x61tapoint_interval\x18\x04 \x01(\x0b\x32\x1e.datasets.v1.DatapointIntervalR\x11\x64\x61tapointInterval\x12#\n\rbranch_factor\x18\x05 \x01(\x05R\x0c\x62ranchFactor\x12\x1d\n\nchunk_size\x18\x06 \x01(\x05R\tchunkSize\x12\x35\n\x17\x64\x61tapoints_per_365_days\x18\x07 \x01(\x03R\x14\x64\x61tapointsPer365Days\"\x85\x01\n\tTimeChunk\x12>\n\rtime_interval\x18\x01 \x01(\x0b\x32\x19.datasets.v1.TimeIntervalR\x0ctimeInterval\x12\x38\n\nchunk_size\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR\tchunkSizeBo\n\x0f\x63om.datasets.v1B\x0fTimeseriesProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x64\x61tasets/v1/timeseries.proto\x12\x0b\x64\x61tasets.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x13tilebox/v1/id.proto\x1a\x16tilebox/v1/query.proto\"\xfd\x02\n\x16TimeseriesDatasetChunk\x12-\n\ndataset_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\tdatasetId\x12\x33\n\rcollection_id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x63ollectionId\x12=\n\rtime_interval\x18\x03 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalR\x0ctimeInterval\x12\x45\n\x12\x64\x61tapoint_interval\x18\x04 \x01(\x0b\x32\x16.tilebox.v1.IDIntervalR\x11\x64\x61tapointInterval\x12#\n\rbranch_factor\x18\x05 \x01(\x05R\x0c\x62ranchFactor\x12\x1d\n\nchunk_size\x18\x06 \x01(\x05R\tchunkSize\x12\x35\n\x17\x64\x61tapoints_per_365_days\x18\x07 \x01(\x03R\x14\x64\x61tapointsPer365Days\"\x84\x01\n\tTimeChunk\x12=\n\rtime_interval\x18\x01 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalR\x0ctimeInterval\x12\x38\n\nchunk_size\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR\tchunkSizeBt\n\x0f\x63om.datasets.v1B\x0fTimeseriesProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.timeseries_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\017TimeseriesProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' - _globals['_TIMESERIESDATASETCHUNK']._serialized_start=102 - _globals['_TIMESERIESDATASETCHUNK']._serialized_end=494 - _globals['_TIMECHUNK']._serialized_start=497 - _globals['_TIMECHUNK']._serialized_end=630 + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\017TimeseriesProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' + _globals['_TIMESERIESDATASETCHUNK']._serialized_start=123 + _globals['_TIMESERIESDATASETCHUNK']._serialized_end=504 + _globals['_TIMECHUNK']._serialized_start=507 + _globals['_TIMECHUNK']._serialized_end=639 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.pyi b/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.pyi index a8d7f73..a5e2779 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.pyi +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/timeseries_pb2.pyi @@ -1,5 +1,6 @@ -from tilebox.datasets.datasets.v1 import core_pb2 as _core_pb2 from google.protobuf import duration_pb2 as _duration_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as _query_pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union @@ -15,19 +16,19 @@ class TimeseriesDatasetChunk(_message.Message): BRANCH_FACTOR_FIELD_NUMBER: _ClassVar[int] CHUNK_SIZE_FIELD_NUMBER: _ClassVar[int] DATAPOINTS_PER_365_DAYS_FIELD_NUMBER: _ClassVar[int] - dataset_id: _core_pb2.ID - collection_id: _core_pb2.ID - time_interval: _core_pb2.TimeInterval - datapoint_interval: _core_pb2.DatapointInterval + dataset_id: _id_pb2.ID + collection_id: _id_pb2.ID + time_interval: _query_pb2.TimeInterval + datapoint_interval: _query_pb2.IDInterval branch_factor: int chunk_size: int datapoints_per_365_days: int - def __init__(self, dataset_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., collection_id: _Optional[_Union[_core_pb2.ID, _Mapping]] = ..., time_interval: _Optional[_Union[_core_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_core_pb2.DatapointInterval, _Mapping]] = ..., branch_factor: _Optional[int] = ..., chunk_size: _Optional[int] = ..., datapoints_per_365_days: _Optional[int] = ...) -> None: ... + def __init__(self, dataset_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., collection_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., time_interval: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., datapoint_interval: _Optional[_Union[_query_pb2.IDInterval, _Mapping]] = ..., branch_factor: _Optional[int] = ..., chunk_size: _Optional[int] = ..., datapoints_per_365_days: _Optional[int] = ...) -> None: ... class TimeChunk(_message.Message): __slots__ = ("time_interval", "chunk_size") TIME_INTERVAL_FIELD_NUMBER: _ClassVar[int] CHUNK_SIZE_FIELD_NUMBER: _ClassVar[int] - time_interval: _core_pb2.TimeInterval + time_interval: _query_pb2.TimeInterval chunk_size: _duration_pb2.Duration - def __init__(self, time_interval: _Optional[_Union[_core_pb2.TimeInterval, _Mapping]] = ..., chunk_size: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... + def __init__(self, time_interval: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., chunk_size: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/datasets/v1/well_known_types_pb2.py b/tilebox-datasets/tilebox/datasets/datasets/v1/well_known_types_pb2.py index df9e62a..f501b34 100644 --- a/tilebox-datasets/tilebox/datasets/datasets/v1/well_known_types_pb2.py +++ b/tilebox-datasets/tilebox/datasets/datasets/v1/well_known_types_pb2.py @@ -26,14 +26,14 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"datasets/v1/well_known_types.proto\x12\x0b\x64\x61tasets.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x1a\n\x04UUID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"0\n\x04Vec3\x12\x0c\n\x01x\x18\x01 \x01(\x01R\x01x\x12\x0c\n\x01y\x18\x02 \x01(\x01R\x01y\x12\x0c\n\x01z\x18\x03 \x01(\x01R\x01z\"L\n\nQuaternion\x12\x0e\n\x02q1\x18\x01 \x01(\x01R\x02q1\x12\x0e\n\x02q2\x18\x02 \x01(\x01R\x02q2\x12\x0e\n\x02q3\x18\x03 \x01(\x01R\x02q3\x12\x0e\n\x02q4\x18\x04 \x01(\x01R\x02q4\"B\n\x06LatLon\x12\x1a\n\x08latitude\x18\x01 \x01(\x01R\x08latitude\x12\x1c\n\tlongitude\x18\x02 \x01(\x01R\tlongitude\"a\n\tLatLonAlt\x12\x1a\n\x08latitude\x18\x01 \x01(\x01R\x08latitude\x12\x1c\n\tlongitude\x18\x02 \x01(\x01R\tlongitude\x12\x1a\n\x08\x61ltitude\x18\x03 \x01(\x01R\x08\x61ltitude\"\x1c\n\x08Geometry\x12\x10\n\x03wkb\x18\x01 \x01(\x0cR\x03wkb*t\n\x0f\x46lightDirection\x12 \n\x1c\x46LIGHT_DIRECTION_UNSPECIFIED\x10\x00\x12\x1e\n\x1a\x46LIGHT_DIRECTION_ASCENDING\x10\x01\x12\x1f\n\x1b\x46LIGHT_DIRECTION_DESCENDING\x10\x02*~\n\x14ObservationDirection\x12%\n!OBSERVATION_DIRECTION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aOBSERVATION_DIRECTION_LEFT\x10\x01\x12\x1f\n\x1bOBSERVATION_DIRECTION_RIGHT\x10\x02*\x99\x01\n\x10OpendataProvider\x12!\n\x1dOPENDATA_PROVIDER_UNSPECIFIED\x10\x00\x12\x19\n\x15OPENDATA_PROVIDER_ASF\x10\x01\x12*\n&OPENDATA_PROVIDER_COPERNICUS_DATASPACE\x10\x02\x12\x1b\n\x17OPENDATA_PROVIDER_UMBRA\x10\x03*\xf1\x02\n\x0fProcessingLevel\x12 \n\x1cPROCESSING_LEVEL_UNSPECIFIED\x10\x00\x12\x17\n\x13PROCESSING_LEVEL_L0\x10\x0c\x12\x17\n\x13PROCESSING_LEVEL_L1\x10\n\x12\x18\n\x14PROCESSING_LEVEL_L1A\x10\x01\x12\x18\n\x14PROCESSING_LEVEL_L1B\x10\x02\x12\x18\n\x14PROCESSING_LEVEL_L1C\x10\x03\x12\x17\n\x13PROCESSING_LEVEL_L2\x10\x04\x12\x18\n\x14PROCESSING_LEVEL_L2A\x10\x05\x12\x18\n\x14PROCESSING_LEVEL_L2B\x10\x06\x12\x17\n\x13PROCESSING_LEVEL_L3\x10\x07\x12\x18\n\x14PROCESSING_LEVEL_L3A\x10\x08\x12\x17\n\x13PROCESSING_LEVEL_L4\x10\t\x12#\n\x1fPROCESSING_LEVEL_NOT_APPLICABLE\x10\x0b*\x98\x02\n\x0cPolarization\x12\x1c\n\x18POLARIZATION_UNSPECIFIED\x10\x00\x12\x13\n\x0fPOLARIZATION_HH\x10\x01\x12\x13\n\x0fPOLARIZATION_HV\x10\x02\x12\x13\n\x0fPOLARIZATION_VH\x10\x03\x12\x13\n\x0fPOLARIZATION_VV\x10\x04\x12\x18\n\x14POLARIZATION_DUAL_HH\x10\x05\x12\x18\n\x14POLARIZATION_DUAL_HV\x10\x06\x12\x18\n\x14POLARIZATION_DUAL_VH\x10\x07\x12\x18\n\x14POLARIZATION_DUAL_VV\x10\x08\x12\x16\n\x12POLARIZATION_HH_HV\x10\t\x12\x16\n\x12POLARIZATION_VV_VH\x10\n*\xf1\x02\n\x0f\x41\x63quisitionMode\x12 \n\x1c\x41\x43QUISITION_MODE_UNSPECIFIED\x10\x00\x12\x17\n\x13\x41\x43QUISITION_MODE_SM\x10\x01\x12\x17\n\x13\x41\x43QUISITION_MODE_EW\x10\x02\x12\x17\n\x13\x41\x43QUISITION_MODE_IW\x10\x03\x12\x17\n\x13\x41\x43QUISITION_MODE_WV\x10\x04\x12\x1e\n\x1a\x41\x43QUISITION_MODE_SPOTLIGHT\x10\n\x12\x19\n\x15\x41\x43QUISITION_MODE_NOBS\x10\x14\x12\x19\n\x15\x41\x43QUISITION_MODE_EOBS\x10\x15\x12\x19\n\x15\x41\x43QUISITION_MODE_DASC\x10\x16\x12\x19\n\x15\x41\x43QUISITION_MODE_ABSR\x10\x17\x12\x18\n\x14\x41\x43QUISITION_MODE_VIC\x10\x18\x12\x18\n\x14\x41\x43QUISITION_MODE_RAW\x10\x19\x12\x18\n\x14\x41\x43QUISITION_MODE_TST\x10\x1a\x42s\n\x0f\x63om.datasets.v1B\x13WellKnownTypesProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"datasets/v1/well_known_types.proto\x12\x0b\x64\x61tasets.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x1a\n\x04UUID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"0\n\x04Vec3\x12\x0c\n\x01x\x18\x01 \x01(\x01R\x01x\x12\x0c\n\x01y\x18\x02 \x01(\x01R\x01y\x12\x0c\n\x01z\x18\x03 \x01(\x01R\x01z\"L\n\nQuaternion\x12\x0e\n\x02q1\x18\x01 \x01(\x01R\x02q1\x12\x0e\n\x02q2\x18\x02 \x01(\x01R\x02q2\x12\x0e\n\x02q3\x18\x03 \x01(\x01R\x02q3\x12\x0e\n\x02q4\x18\x04 \x01(\x01R\x02q4\"B\n\x06LatLon\x12\x1a\n\x08latitude\x18\x01 \x01(\x01R\x08latitude\x12\x1c\n\tlongitude\x18\x02 \x01(\x01R\tlongitude\"a\n\tLatLonAlt\x12\x1a\n\x08latitude\x18\x01 \x01(\x01R\x08latitude\x12\x1c\n\tlongitude\x18\x02 \x01(\x01R\tlongitude\x12\x1a\n\x08\x61ltitude\x18\x03 \x01(\x01R\x08\x61ltitude\"\x1c\n\x08Geometry\x12\x10\n\x03wkb\x18\x01 \x01(\x0cR\x03wkb*t\n\x0f\x46lightDirection\x12 \n\x1c\x46LIGHT_DIRECTION_UNSPECIFIED\x10\x00\x12\x1e\n\x1a\x46LIGHT_DIRECTION_ASCENDING\x10\x01\x12\x1f\n\x1b\x46LIGHT_DIRECTION_DESCENDING\x10\x02*~\n\x14ObservationDirection\x12%\n!OBSERVATION_DIRECTION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aOBSERVATION_DIRECTION_LEFT\x10\x01\x12\x1f\n\x1bOBSERVATION_DIRECTION_RIGHT\x10\x02*\x99\x01\n\x10OpendataProvider\x12!\n\x1dOPENDATA_PROVIDER_UNSPECIFIED\x10\x00\x12\x19\n\x15OPENDATA_PROVIDER_ASF\x10\x01\x12*\n&OPENDATA_PROVIDER_COPERNICUS_DATASPACE\x10\x02\x12\x1b\n\x17OPENDATA_PROVIDER_UMBRA\x10\x03*\xf1\x02\n\x0fProcessingLevel\x12 \n\x1cPROCESSING_LEVEL_UNSPECIFIED\x10\x00\x12\x17\n\x13PROCESSING_LEVEL_L0\x10\x0c\x12\x17\n\x13PROCESSING_LEVEL_L1\x10\n\x12\x18\n\x14PROCESSING_LEVEL_L1A\x10\x01\x12\x18\n\x14PROCESSING_LEVEL_L1B\x10\x02\x12\x18\n\x14PROCESSING_LEVEL_L1C\x10\x03\x12\x17\n\x13PROCESSING_LEVEL_L2\x10\x04\x12\x18\n\x14PROCESSING_LEVEL_L2A\x10\x05\x12\x18\n\x14PROCESSING_LEVEL_L2B\x10\x06\x12\x17\n\x13PROCESSING_LEVEL_L3\x10\x07\x12\x18\n\x14PROCESSING_LEVEL_L3A\x10\x08\x12\x17\n\x13PROCESSING_LEVEL_L4\x10\t\x12#\n\x1fPROCESSING_LEVEL_NOT_APPLICABLE\x10\x0b*\x98\x02\n\x0cPolarization\x12\x1c\n\x18POLARIZATION_UNSPECIFIED\x10\x00\x12\x13\n\x0fPOLARIZATION_HH\x10\x01\x12\x13\n\x0fPOLARIZATION_HV\x10\x02\x12\x13\n\x0fPOLARIZATION_VH\x10\x03\x12\x13\n\x0fPOLARIZATION_VV\x10\x04\x12\x18\n\x14POLARIZATION_DUAL_HH\x10\x05\x12\x18\n\x14POLARIZATION_DUAL_HV\x10\x06\x12\x18\n\x14POLARIZATION_DUAL_VH\x10\x07\x12\x18\n\x14POLARIZATION_DUAL_VV\x10\x08\x12\x16\n\x12POLARIZATION_HH_HV\x10\t\x12\x16\n\x12POLARIZATION_VV_VH\x10\n*\xf1\x02\n\x0f\x41\x63quisitionMode\x12 \n\x1c\x41\x43QUISITION_MODE_UNSPECIFIED\x10\x00\x12\x17\n\x13\x41\x43QUISITION_MODE_SM\x10\x01\x12\x17\n\x13\x41\x43QUISITION_MODE_EW\x10\x02\x12\x17\n\x13\x41\x43QUISITION_MODE_IW\x10\x03\x12\x17\n\x13\x41\x43QUISITION_MODE_WV\x10\x04\x12\x1e\n\x1a\x41\x43QUISITION_MODE_SPOTLIGHT\x10\n\x12\x19\n\x15\x41\x43QUISITION_MODE_NOBS\x10\x14\x12\x19\n\x15\x41\x43QUISITION_MODE_EOBS\x10\x15\x12\x19\n\x15\x41\x43QUISITION_MODE_DASC\x10\x16\x12\x19\n\x15\x41\x43QUISITION_MODE_ABSR\x10\x17\x12\x18\n\x14\x41\x43QUISITION_MODE_VIC\x10\x18\x12\x18\n\x14\x41\x43QUISITION_MODE_RAW\x10\x19\x12\x18\n\x14\x41\x43QUISITION_MODE_TST\x10\x1a\x42x\n\x0f\x63om.datasets.v1B\x13WellKnownTypesProtoP\x01\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tasets.V1\xca\x02\x0b\x44\x61tasets\\V1\xe2\x02\x17\x44\x61tasets\\V1\\GPBMetadata\xea\x02\x0c\x44\x61tasets::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'datasets.v1.well_known_types_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\023WellKnownTypesProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1' + _globals['DESCRIPTOR']._serialized_options = b'\n\017com.datasets.v1B\023WellKnownTypesProtoP\001\242\002\003DXX\252\002\013Datasets.V1\312\002\013Datasets\\V1\342\002\027Datasets\\V1\\GPBMetadata\352\002\014Datasets::V1\222\003\002\010\002' _globals['_FLIGHTDIRECTION']._serialized_start=469 _globals['_FLIGHTDIRECTION']._serialized_end=585 _globals['_OBSERVATIONDIRECTION']._serialized_start=587 diff --git a/tilebox-datasets/tilebox/datasets/progress.py b/tilebox-datasets/tilebox/datasets/progress.py index 2432946..4e45189 100644 --- a/tilebox-datasets/tilebox/datasets/progress.py +++ b/tilebox-datasets/tilebox/datasets/progress.py @@ -5,7 +5,7 @@ from tqdm.auto import tqdm -from tilebox.datasets.data import TimeInterval +from tilebox.datasets.query.time_interval import TimeInterval ProgressCallback = Callable[[float], Any] diff --git a/tilebox-datasets/tilebox/datasets/query/__init__.py b/tilebox-datasets/tilebox/datasets/query/__init__.py new file mode 100644 index 0000000..c77fae8 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/query/__init__.py @@ -0,0 +1,8 @@ +from tilebox.datasets.query.time_interval import ( + TimeInterval, + TimeIntervalLike, + datetime_to_timestamp, + timestamp_to_datetime, +) + +__all__ = ["TimeInterval", "TimeIntervalLike", "datetime_to_timestamp", "timestamp_to_datetime"] diff --git a/tilebox-datasets/tilebox/datasets/query/id_interval.py b/tilebox-datasets/tilebox/datasets/query/id_interval.py new file mode 100644 index 0000000..628b2a3 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/query/id_interval.py @@ -0,0 +1,72 @@ +from dataclasses import dataclass +from typing import TypeAlias +from uuid import UUID + +from tilebox.datasets.tilebox.v1 import query_pb2 +from tilebox.datasets.uuid import uuid_message_to_uuid, uuid_to_uuid_message + +IDIntervalLike: TypeAlias = "tuple[str, str] | tuple[UUID, UUID] | IDInterval" + + +@dataclass(frozen=True) +class IDInterval: + start_id: UUID + end_id: UUID + start_exclusive: bool + end_inclusive: bool + + @classmethod + def from_message(cls, interval: query_pb2.IDInterval) -> "IDInterval": + return cls( + start_id=uuid_message_to_uuid(interval.start_id), + end_id=uuid_message_to_uuid(interval.end_id), + start_exclusive=interval.start_exclusive, + end_inclusive=interval.end_inclusive, + ) + + def to_message(self) -> query_pb2.IDInterval: + return query_pb2.IDInterval( + start_id=uuid_to_uuid_message(self.start_id), + end_id=uuid_to_uuid_message(self.end_id), + start_exclusive=self.start_exclusive, + end_inclusive=self.end_inclusive, + ) + + @classmethod + def parse(cls, arg: IDIntervalLike, start_exclusive: bool = False, end_inclusive: bool = True) -> "IDInterval": + """ + Convert a variety of input types to a IDInterval. + + Supported input types: + - IDInterval: Return the input as is + - tuple of two UUIDs: Return an IDInterval with start and end id set to the given values + - tuple of two strings: Return an IDInterval with start and end id set to the UUIDs parsed from the given strings + + Args: + arg: The input to convert + start_exclusive: Whether the start id is exclusive + end_inclusive: Whether the end id is inclusive + + Returns: + IDInterval: The parsed ID interval + """ + + match arg: + case IDInterval(_, _, _, _): + return arg + case (UUID(), UUID()): + start, end = arg + return IDInterval( + start_id=start, + end_id=end, + start_exclusive=start_exclusive, + end_inclusive=end_inclusive, + ) + case (str(), str()): + start, end = arg + return IDInterval( + start_id=UUID(start), + end_id=UUID(end), + start_exclusive=start_exclusive, + end_inclusive=end_inclusive, + ) diff --git a/tilebox-datasets/tilebox/datasets/data/pagination.py b/tilebox-datasets/tilebox/datasets/query/pagination.py similarity index 51% rename from tilebox-datasets/tilebox/datasets/data/pagination.py rename to tilebox-datasets/tilebox/datasets/query/pagination.py index 2ce0b7f..e5791b7 100644 --- a/tilebox-datasets/tilebox/datasets/data/pagination.py +++ b/tilebox-datasets/tilebox/datasets/query/pagination.py @@ -1,8 +1,8 @@ from dataclasses import dataclass from uuid import UUID -from tilebox.datasets.data.uuid import uuid_message_to_optional_uuid, uuid_to_uuid_message -from tilebox.datasets.datasets.v1 import core_pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 +from tilebox.datasets.uuid import uuid_message_to_optional_uuid, uuid_to_uuid_message @dataclass @@ -11,11 +11,11 @@ class Pagination: starting_after: UUID | None = None @classmethod - def from_message(cls, page: core_pb2.Pagination | None) -> "Pagination": + def from_message(cls, page: query_pb2.Pagination | None) -> "Pagination": if page is None: return cls() # convert falsish values (0 or empty string) to None return cls(limit=page.limit or None, starting_after=uuid_message_to_optional_uuid(page.starting_after)) - def to_message(self) -> core_pb2.Pagination: - return core_pb2.Pagination(limit=self.limit, starting_after=uuid_to_uuid_message(self.starting_after)) + def to_message(self) -> query_pb2.Pagination: + return query_pb2.Pagination(limit=self.limit, starting_after=uuid_to_uuid_message(self.starting_after)) diff --git a/tilebox-datasets/tilebox/datasets/query/time_interval.py b/tilebox-datasets/tilebox/datasets/query/time_interval.py new file mode 100644 index 0000000..35ae71e --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/query/time_interval.py @@ -0,0 +1,237 @@ +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone +from typing import TypeAlias + +import numpy as np +import xarray as xr +from google.protobuf.duration_pb2 import Duration +from google.protobuf.timestamp_pb2 import Timestamp +from pandas.core.tools.datetimes import DatetimeScalar, to_datetime + +from tilebox.datasets.tilebox.v1 import query_pb2 + +_SMALLEST_POSSIBLE_TIMEDELTA = timedelta(microseconds=1) +_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) + +# A type alias for the different types that can be used to specify a time interval +TimeIntervalLike: TypeAlias = ( + DatetimeScalar | tuple[DatetimeScalar, DatetimeScalar] | xr.DataArray | xr.Dataset | "TimeInterval" +) + + +@dataclass(frozen=True) +class TimeInterval: + """ + A time interval from a given start to a given end time. + Both the start and end time can be exclusive or inclusive. + """ + + start: datetime + end: datetime + + # We use exclusive for start and inclusive for end, because that way when both are false + # we have a half-open interval [start, end) which is the default behaviour we want to achieve. + start_exclusive: bool = False + end_inclusive: bool = False + + def __post_init__(self) -> None: + """Validate the time interval""" + if not isinstance(self.start, datetime): + raise TypeError(f"Start time {self.start} must be a datetime object") + if not isinstance(self.end, datetime): + raise TypeError(f"End time {self.end} must be a datetime object") + + # in case datetime objects are timezone naive, we assume they are in UTC + if self.start.tzinfo is None: + object.__setattr__(self, "start", self.start.replace(tzinfo=timezone.utc)) # since self is frozen + if self.end.tzinfo is None: + object.__setattr__(self, "end", self.end.replace(tzinfo=timezone.utc)) # since self is frozen + + def to_half_open(self) -> "TimeInterval": + """Convert the time interval to a half-open interval [start, end)""" + return TimeInterval( + start=self.start + int(self.start_exclusive) * _SMALLEST_POSSIBLE_TIMEDELTA, + end=self.end + int(self.end_inclusive) * _SMALLEST_POSSIBLE_TIMEDELTA, + start_exclusive=False, + end_inclusive=False, + ) + + def astimezone(self, tzinfo: timezone) -> "TimeInterval": + """Convert start and end time to the given timezone""" + return TimeInterval( + start=self.start.astimezone(tzinfo), + end=self.end.astimezone(tzinfo), + start_exclusive=self.start_exclusive, + end_inclusive=self.end_inclusive, + ) + + def __eq__(self, other: object) -> bool: + """Check whether two intervals are equal""" + if not isinstance(other, TimeInterval): + return False + + a, b = self.to_half_open(), other.to_half_open() + return (a.start, a.end) == (b.start, b.end) + + def __hash__(self) -> int: + """Hash the time interval""" + + # if two intervals are equal, they should have the same hash, so we convert to half-open intervals first + half_open = self.to_half_open() + return hash((half_open.start, half_open.end)) + + def __repr__(self) -> str: + return self.format() + + def format(self, endpoints: bool = True, sep: str = ", ", timespec: str = "milliseconds") -> str: + """Human readable representation of the time interval + + Args: + endpoints: Whether to format as interval using scientific interval notation [start, end]. + "[", "]" for closed intervals and "(", ")" for open intervals. + sep: The separator to use between the start and end of the interval. + timespec: Specifies the number of additional terms of the time to include. Valid options are 'auto', + 'hours', 'minutes', 'seconds', 'milliseconds' and 'microseconds'. + """ + if self == _EMPTY_TIME_INTERVAL: + return "" + + start = self.start.isoformat(timespec=timespec).replace("+00:00", " UTC") + end = self.end.isoformat(timespec=timespec).replace("+00:00", " UTC") + + formatted = f"{start}{sep}{end}" + if endpoints: + start_ch = "[" if not self.start_exclusive else "(" + end_ch = "]" if self.end_inclusive else ")" + formatted = f"{start_ch}{formatted}{end_ch}" + return formatted + + def __str__(self) -> str: + """Human readable representation of the time interval""" + return self.format() + + @classmethod + def parse(cls, arg: TimeIntervalLike) -> "TimeInterval": + """ + Convert a variety of input types to a TimeInterval. + + Supported input types: + - TimeInterval: Return the input as is + - DatetimeScalar: Return a TimeInterval with start and end time set to the same value and the end time inclusive + - tuple of two DatetimeScalar: Return a TimeInterval with start and end time + - xr.DataArray: Return a TimeInterval with start and end time set to the first and last value in the array and + the end time inclusive + - xr.Dataset: Return a TimeInterval with start and end time set to the first and last value in the time + coordinate of the dataset and the end time inclusive + + Args: + arg: The input to convert + + Returns: + TimeInterval: The parsed time interval + """ + + match arg: + case TimeInterval(_, _, _, _): + return arg + case (start, end): + return TimeInterval(start=_convert_to_datetime(start), end=_convert_to_datetime(end)) + case point_in_time if isinstance(point_in_time, DatetimeScalar | int): + dt = _convert_to_datetime(point_in_time) + return TimeInterval(start=dt, end=dt, start_exclusive=False, end_inclusive=True) + case arr if ( + isinstance(arr, xr.DataArray) + and arr.ndim == 1 + and arr.size > 0 + and arr.dtype == np.dtype("datetime64[ns]") + ): + start = arr.data[0] + end = arr.data[-1] + return TimeInterval( + start=_convert_to_datetime(start), + end=_convert_to_datetime(end), + start_exclusive=False, + end_inclusive=True, + ) + case ds if isinstance(ds, xr.Dataset) and "time" in ds.coords: + return cls.parse(ds.time) + + raise ValueError(f"Failed to convert {arg} ({type(arg)}) to TimeInterval)") + + @classmethod + def from_message( + cls, interval: query_pb2.TimeInterval + ) -> "TimeInterval": # lets use typing.Self once we require python >= 3.11 + """Convert a TimeInterval protobuf message to a TimeInterval object.""" + + start = timestamp_to_datetime(interval.start_time) + end = timestamp_to_datetime(interval.end_time) + if start == _EPOCH and end == _EPOCH and not interval.start_exclusive and not interval.end_inclusive: + return _EMPTY_TIME_INTERVAL + + return cls( + start=timestamp_to_datetime(interval.start_time), + end=timestamp_to_datetime(interval.end_time), + start_exclusive=interval.start_exclusive, + end_inclusive=interval.end_inclusive, + ) + + def to_message(self) -> query_pb2.TimeInterval: + """Convert a TimeInterval object to a TimeInterval protobuf message.""" + return query_pb2.TimeInterval( + start_time=datetime_to_timestamp(self.start), + end_time=datetime_to_timestamp(self.end), + start_exclusive=self.start_exclusive, + end_inclusive=self.end_inclusive, + ) + + +# A sentinel value to use for time interval that are empty +_EMPTY_TIME_INTERVAL = TimeInterval(_EPOCH, _EPOCH, start_exclusive=True, end_inclusive=False) + + +def _convert_to_datetime(arg: DatetimeScalar) -> datetime: + """Convert the given datetime scalar to a datetime object in the UTC timezone""" + dt: datetime = to_datetime(arg, utc=True).to_pydatetime() + if dt.tzinfo is None: + dt = dt.replace(tzinfo=timezone.utc) + return dt.astimezone(timezone.utc) + + +def timestamp_to_datetime(timestamp: Timestamp) -> datetime: + """Convert a protobuf timestamp to a datetime object.""" + # datetime.fromtimestamp() expects a timestamp in seconds, not microseconds + # if we pass it as a floating point number, we will run into rounding errors + offset = timedelta(seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000) + return _EPOCH + offset + + +def datetime_to_timestamp(dt: datetime) -> Timestamp: + """Convert a datetime object to a protobuf timestamp.""" + # manual epoch offset calculation to avoid rounding errors and support negative timestamps (before 1970) + offset_us = datetime_to_us(dt.astimezone(timezone.utc)) + seconds, us = divmod(offset_us, 10**6) + return Timestamp(seconds=seconds, nanos=us * 10**3) + + +def datetime_to_us(dt: datetime) -> int: + """Convert a datetime object to a timestamp in microseconds since the epoch.""" + offset = dt - _EPOCH + # implementation taken from timedelta.total_seconds() but without dividing by 10**6 at the end to avoid floating + # point rounding errors + return (offset.days * 86400 + offset.seconds) * 10**6 + offset.microseconds + + +def us_to_datetime(us: int) -> datetime: + """Convert a timestamp in microseconds since the epoch to a datetime object.""" + return _EPOCH + timedelta(microseconds=us) + + +def timedelta_to_duration(td: timedelta) -> Duration: + """Convert a timedelta to a duration protobuf message.""" + return Duration(seconds=int(td.total_seconds()), nanos=int(td.microseconds * 1000)) + + +def duration_to_timedelta(duration: Duration) -> timedelta: + """Convert a duration protobuf message to a timedelta.""" + return timedelta(seconds=duration.seconds, microseconds=duration.nanos // 1000) diff --git a/tilebox-datasets/tilebox/datasets/service.py b/tilebox-datasets/tilebox/datasets/service.py index ab7f34a..1224a65 100644 --- a/tilebox-datasets/tilebox/datasets/service.py +++ b/tilebox-datasets/tilebox/datasets/service.py @@ -7,15 +7,8 @@ from tilebox.datasets.data.collection import CollectionInfo from tilebox.datasets.data.data_access import QueryFilters -from tilebox.datasets.data.datapoint import ( - AnyMessage, - IngestResponse, - QueryResultPage, -) +from tilebox.datasets.data.datapoint import AnyMessage, IngestResponse, QueryResultPage from tilebox.datasets.data.datasets import Dataset, ListDatasetsResponse -from tilebox.datasets.data.pagination import Pagination -from tilebox.datasets.data.uuid import must_uuid_to_uuid_message, uuid_to_uuid_message -from tilebox.datasets.datasets.v1 import core_pb2 from tilebox.datasets.datasets.v1.collections_pb2 import ( CreateCollectionRequest, DeleteCollectionRequest, @@ -23,15 +16,15 @@ ListCollectionsRequest, ) from tilebox.datasets.datasets.v1.collections_pb2_grpc import CollectionServiceStub -from tilebox.datasets.datasets.v1.data_access_pb2 import ( - QueryByIDRequest, - QueryRequest, -) +from tilebox.datasets.datasets.v1.data_access_pb2 import QueryByIDRequest, QueryRequest from tilebox.datasets.datasets.v1.data_access_pb2_grpc import DataAccessServiceStub from tilebox.datasets.datasets.v1.data_ingestion_pb2 import DeleteRequest, IngestRequest from tilebox.datasets.datasets.v1.data_ingestion_pb2_grpc import DataIngestionServiceStub from tilebox.datasets.datasets.v1.datasets_pb2 import ClientInfo, GetDatasetRequest, ListDatasetsRequest, Package from tilebox.datasets.datasets.v1.datasets_pb2_grpc import DatasetServiceStub +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.tilebox.v1 import id_pb2 +from tilebox.datasets.uuid import must_uuid_to_uuid_message, uuid_to_uuid_message class TileboxDatasetService: @@ -176,7 +169,7 @@ def delete(self, collection_id: UUID, datapoints: list[UUID]) -> Promise[int]: """ req = DeleteRequest( collection_id=uuid_to_uuid_message(collection_id), - datapoint_ids=[core_pb2.ID(uuid=datapoint.bytes) for datapoint in datapoints], + datapoint_ids=[id_pb2.ID(uuid=datapoint.bytes) for datapoint in datapoints], ) return Promise.resolve(self._data_ingestion_service.Delete(req)).then( lambda response: response.num_deleted, diff --git a/tilebox-datasets/tilebox/datasets/sync/dataset.py b/tilebox-datasets/tilebox/datasets/sync/dataset.py index 362d862..45ec3b0 100644 --- a/tilebox-datasets/tilebox/datasets/sync/dataset.py +++ b/tilebox-datasets/tilebox/datasets/sync/dataset.py @@ -12,11 +12,8 @@ from _tilebox.grpc.producer_consumer import concurrent_producer_consumer from tilebox.datasets.data.collection import CollectionInfo from tilebox.datasets.data.data_access import QueryFilters, SpatialFilter, SpatialFilterLike -from tilebox.datasets.data.datapoint import DatapointInterval, DatapointIntervalLike, QueryResultPage +from tilebox.datasets.data.datapoint import QueryResultPage from tilebox.datasets.data.datasets import Dataset -from tilebox.datasets.data.pagination import Pagination -from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike -from tilebox.datasets.data.uuid import as_uuid from tilebox.datasets.message_pool import get_message_type from tilebox.datasets.progress import ProgressCallback from tilebox.datasets.protobuf_conversion.protobuf_xarray import MessageToXarrayConverter @@ -27,12 +24,16 @@ marshal_messages, to_messages, ) +from tilebox.datasets.query.id_interval import IDInterval, IDIntervalLike +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.query.time_interval import TimeInterval, TimeIntervalLike from tilebox.datasets.service import TileboxDatasetService from tilebox.datasets.sync.pagination import ( with_progressbar, with_time_progress_callback, with_time_progressbar, ) +from tilebox.datasets.uuid import as_uuid # allow private member access: we allow it here because we want to make as much private as possible so that we can # minimize the publicly facing API (which allows us to change internals later, and also limits to auto-completion) @@ -230,7 +231,7 @@ def find(self, datapoint_id: str | UUID, skip_data: bool = False) -> xr.Dataset: def _find_interval( self, - datapoint_id_interval: DatapointIntervalLike, + datapoint_id_interval: IDIntervalLike, end_inclusive: bool = True, *, skip_data: bool = False, @@ -249,9 +250,7 @@ def _find_interval( Returns: The datapoints in the given interval as an xarray dataset """ - filters = QueryFilters( - temporal_extent=DatapointInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive) - ) + filters = QueryFilters(temporal_extent=IDInterval.parse(datapoint_id_interval, end_inclusive=end_inclusive)) def request(page: PaginationProtocol) -> QueryResultPage: query_page = Pagination(page.limit, page.starting_after) diff --git a/tilebox-datasets/tilebox/datasets/sync/pagination.py b/tilebox-datasets/tilebox/datasets/sync/pagination.py index 7f314ae..33f84f8 100644 --- a/tilebox-datasets/tilebox/datasets/sync/pagination.py +++ b/tilebox-datasets/tilebox/datasets/sync/pagination.py @@ -5,11 +5,9 @@ from tqdm.auto import tqdm -from tilebox.datasets.data import ( - TimeInterval, -) from tilebox.datasets.data.datapoint import QueryResultPage from tilebox.datasets.progress import ProgressCallback, TimeIntervalProgressBar +from tilebox.datasets.query.time_interval import TimeInterval ResultPage = TypeVar("ResultPage", bound=QueryResultPage) diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.py b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.py new file mode 100644 index 0000000..8acdaf9 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: tilebox/v1/id.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'tilebox/v1/id.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13tilebox/v1/id.proto\x12\ntilebox.v1\"\x18\n\x02ID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuidBg\n\x0e\x63om.tilebox.v1B\x07IdProtoP\x01\xa2\x02\x03TXX\xaa\x02\nTilebox.V1\xca\x02\nTilebox\\V1\xe2\x02\x16Tilebox\\V1\\GPBMetadata\xea\x02\x0bTilebox::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tilebox.v1.id_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\016com.tilebox.v1B\007IdProtoP\001\242\002\003TXX\252\002\nTilebox.V1\312\002\nTilebox\\V1\342\002\026Tilebox\\V1\\GPBMetadata\352\002\013Tilebox::V1\222\003\002\010\002' + _globals['_ID']._serialized_start=35 + _globals['_ID']._serialized_end=59 +# @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.pyi b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.pyi new file mode 100644 index 0000000..2cc30bd --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2.pyi @@ -0,0 +1,11 @@ +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Optional as _Optional + +DESCRIPTOR: _descriptor.FileDescriptor + +class ID(_message.Message): + __slots__ = ("uuid",) + UUID_FIELD_NUMBER: _ClassVar[int] + uuid: bytes + def __init__(self, uuid: _Optional[bytes] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2_grpc.py b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2_grpc.py new file mode 100644 index 0000000..8a93939 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/id_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.py b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.py new file mode 100644 index 0000000..52ef2a2 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: tilebox/v1/query.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'tilebox/v1/query.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16tilebox/v1/query.proto\x12\ntilebox.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x13tilebox/v1/id.proto\"\xce\x01\n\x0cTimeInterval\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\xac\x01\n\nIDInterval\x12)\n\x08start_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x07startId\x12%\n\x06\x65nd_id\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05\x65ndId\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"g\n\nPagination\x12\x1b\n\x05limit\x18\x01 \x01(\x03\x42\x05\xaa\x01\x02\x08\x01R\x05limit\x12<\n\x0estarting_after\x18\x02 \x01(\x0b\x32\x0e.tilebox.v1.IDB\x05\xaa\x01\x02\x08\x01R\rstartingAfterBj\n\x0e\x63om.tilebox.v1B\nQueryProtoP\x01\xa2\x02\x03TXX\xaa\x02\nTilebox.V1\xca\x02\nTilebox\\V1\xe2\x02\x16Tilebox\\V1\\GPBMetadata\xea\x02\x0bTilebox::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tilebox.v1.query_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\016com.tilebox.v1B\nQueryProtoP\001\242\002\003TXX\252\002\nTilebox.V1\312\002\nTilebox\\V1\342\002\026Tilebox\\V1\\GPBMetadata\352\002\013Tilebox::V1\222\003\002\010\002' + _globals['_PAGINATION'].fields_by_name['limit']._loaded_options = None + _globals['_PAGINATION'].fields_by_name['limit']._serialized_options = b'\252\001\002\010\001' + _globals['_PAGINATION'].fields_by_name['starting_after']._loaded_options = None + _globals['_PAGINATION'].fields_by_name['starting_after']._serialized_options = b'\252\001\002\010\001' + _globals['_TIMEINTERVAL']._serialized_start=93 + _globals['_TIMEINTERVAL']._serialized_end=299 + _globals['_IDINTERVAL']._serialized_start=302 + _globals['_IDINTERVAL']._serialized_end=474 + _globals['_PAGINATION']._serialized_start=476 + _globals['_PAGINATION']._serialized_end=579 +# @@protoc_insertion_point(module_scope) diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.pyi b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.pyi new file mode 100644 index 0000000..2980ac9 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2.pyi @@ -0,0 +1,39 @@ +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class TimeInterval(_message.Message): + __slots__ = ("start_time", "end_time", "start_exclusive", "end_inclusive") + START_TIME_FIELD_NUMBER: _ClassVar[int] + END_TIME_FIELD_NUMBER: _ClassVar[int] + START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] + END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] + start_time: _timestamp_pb2.Timestamp + end_time: _timestamp_pb2.Timestamp + start_exclusive: bool + end_inclusive: bool + def __init__(self, start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... + +class IDInterval(_message.Message): + __slots__ = ("start_id", "end_id", "start_exclusive", "end_inclusive") + START_ID_FIELD_NUMBER: _ClassVar[int] + END_ID_FIELD_NUMBER: _ClassVar[int] + START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] + END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] + start_id: _id_pb2.ID + end_id: _id_pb2.ID + start_exclusive: bool + end_inclusive: bool + def __init__(self, start_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., end_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... + +class Pagination(_message.Message): + __slots__ = ("limit", "starting_after") + LIMIT_FIELD_NUMBER: _ClassVar[int] + STARTING_AFTER_FIELD_NUMBER: _ClassVar[int] + limit: int + starting_after: _id_pb2.ID + def __init__(self, limit: _Optional[int] = ..., starting_after: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... diff --git a/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2_grpc.py b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2_grpc.py new file mode 100644 index 0000000..8a93939 --- /dev/null +++ b/tilebox-datasets/tilebox/datasets/tilebox/v1/query_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/tilebox-datasets/tilebox/datasets/data/uuid.py b/tilebox-datasets/tilebox/datasets/uuid.py similarity index 54% rename from tilebox-datasets/tilebox/datasets/data/uuid.py rename to tilebox-datasets/tilebox/datasets/uuid.py index 8bbaca0..f684883 100644 --- a/tilebox-datasets/tilebox/datasets/data/uuid.py +++ b/tilebox-datasets/tilebox/datasets/uuid.py @@ -1,18 +1,19 @@ # allow the uuid module name which shadows the builtin: from uuid import UUID -from tilebox.datasets.datasets.v1 import core_pb2, well_known_types_pb2 +from tilebox.datasets.datasets.v1 import well_known_types_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 _NIL_UUID = UUID(int=0) -def uuid_message_to_uuid(uuid_message: core_pb2.ID | well_known_types_pb2.UUID) -> UUID: +def uuid_message_to_uuid(uuid_message: id_pb2.ID | well_known_types_pb2.UUID) -> UUID: if uuid_message.uuid == b"": return _NIL_UUID return UUID(bytes=uuid_message.uuid) -def uuid_message_to_optional_uuid(uuid_message: core_pb2.ID | well_known_types_pb2.UUID | None) -> UUID | None: +def uuid_message_to_optional_uuid(uuid_message: id_pb2.ID | well_known_types_pb2.UUID | None) -> UUID | None: if uuid_message is None: return None if uuid_message.uuid == b"": @@ -20,16 +21,16 @@ def uuid_message_to_optional_uuid(uuid_message: core_pb2.ID | well_known_types_p return UUID(bytes=uuid_message.uuid) -def uuid_to_uuid_message(uuid: UUID | None) -> core_pb2.ID | None: +def uuid_to_uuid_message(uuid: UUID | None) -> id_pb2.ID | None: if uuid is None or uuid == _NIL_UUID: return None - return core_pb2.ID(uuid=uuid.bytes) + return id_pb2.ID(uuid=uuid.bytes) -def must_uuid_to_uuid_message(uuid: UUID) -> core_pb2.ID: +def must_uuid_to_uuid_message(uuid: UUID) -> id_pb2.ID: if uuid == _NIL_UUID: raise ValueError("UUID must not be Nil") - return core_pb2.ID(uuid=uuid.bytes) + return id_pb2.ID(uuid=uuid.bytes) def as_uuid(uuid: str | UUID) -> UUID: diff --git a/tilebox-workflows/tests/automations/test_client.py b/tilebox-workflows/tests/automations/test_client.py index 12a6b31..f77cada 100644 --- a/tilebox-workflows/tests/automations/test_client.py +++ b/tilebox-workflows/tests/automations/test_client.py @@ -7,6 +7,8 @@ from tests.tasks_data import alphanumerical_text, cron_triggers, storage_event_triggers, task_identifiers from _tilebox.grpc.error import NotFoundError +from tilebox.datasets.tilebox.v1.id_pb2 import ID +from tilebox.datasets.uuid import uuid_message_to_uuid from tilebox.workflows.automations import CronTask, StorageEventTask from tilebox.workflows.automations.client import AutomationClient, AutomationService from tilebox.workflows.data import ( @@ -14,7 +16,6 @@ CronTrigger, StorageEventTrigger, TaskIdentifier, - uuid_message_to_uuid, uuid_to_uuid_message, ) from tilebox.workflows.workflows.v1.automation_pb2 import AutomationPrototype as AutomationPrototypeMessage @@ -22,7 +23,6 @@ from tilebox.workflows.workflows.v1.automation_pb2 import CronTrigger as CronTriggerMessage from tilebox.workflows.workflows.v1.automation_pb2 import StorageEventTrigger as StorageEventTriggerMessage from tilebox.workflows.workflows.v1.automation_pb2_grpc import AutomationServiceStub -from tilebox.workflows.workflows.v1.core_pb2 import UUID as UUIDMessage # noqa: N811 class MockAutomationService(AutomationServiceStub): @@ -59,7 +59,7 @@ def UpdateAutomation(self, req: AutomationPrototypeMessage) -> AutomationPrototy self.automations[automation_id] = req return req - def GetAutomation(self, req: UUIDMessage) -> AutomationPrototypeMessage: # noqa: N802 + def GetAutomation(self, req: ID) -> AutomationPrototypeMessage: # noqa: N802 automation_id = uuid_message_to_uuid(req) if automation_id in self.automations: return self.automations[automation_id] diff --git a/tilebox-workflows/tests/jobs/test_client.py b/tilebox-workflows/tests/jobs/test_client.py index 53f7ca3..630376a 100644 --- a/tilebox-workflows/tests/jobs/test_client.py +++ b/tilebox-workflows/tests/jobs/test_client.py @@ -6,7 +6,7 @@ from tests.tasks_data import jobs from _tilebox.grpc.error import NotFoundError -from tilebox.datasets.data.time_interval import datetime_to_timestamp +from tilebox.datasets.query.time_interval import datetime_to_timestamp from tilebox.workflows.data import Job, uuid_message_to_uuid, uuid_to_uuid_message from tilebox.workflows.jobs.client import JobClient from tilebox.workflows.jobs.service import JobService diff --git a/tilebox-workflows/tilebox/workflows/automations/service.py b/tilebox-workflows/tilebox/workflows/automations/service.py index 3698d2f..e4c83b8 100644 --- a/tilebox-workflows/tilebox/workflows/automations/service.py +++ b/tilebox-workflows/tilebox/workflows/automations/service.py @@ -4,10 +4,10 @@ from grpc import Channel from _tilebox.grpc.error import with_pythonic_errors +from tilebox.datasets.uuid import uuid_to_uuid_message from tilebox.workflows.data import ( AutomationPrototype, StorageLocation, - uuid_to_uuid_message, ) from tilebox.workflows.workflows.v1.automation_pb2 import AutomationPrototype as AutomationPrototypeMessage from tilebox.workflows.workflows.v1.automation_pb2 import Automations, DeleteAutomationRequest, StorageLocations diff --git a/tilebox-workflows/tilebox/workflows/data.py b/tilebox-workflows/tilebox/workflows/data.py index 4382b45..cc99e77 100644 --- a/tilebox-workflows/tilebox/workflows/data.py +++ b/tilebox-workflows/tilebox/workflows/data.py @@ -1,11 +1,10 @@ import re import warnings from dataclasses import dataclass, field -from datetime import datetime, timezone +from datetime import datetime from enum import Enum from functools import lru_cache from pathlib import Path -from typing import TypeAlias from uuid import UUID import boto3 @@ -13,6 +12,11 @@ from google.cloud.storage.bucket import Bucket from google.protobuf.duration_pb2 import Duration +from tilebox.datasets.query.id_interval import IDInterval +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.query.time_interval import TimeInterval, datetime_to_timestamp, timestamp_to_datetime +from tilebox.datasets.uuid import uuid_message_to_optional_uuid, uuid_message_to_uuid, uuid_to_uuid_message + try: # let's not make this a hard dependency, but if it's installed we can use its types from mypy_boto3_s3.client import S3Client @@ -20,7 +24,6 @@ from typing import Any as S3Client from opentelemetry.trace import ProxyTracerProvider, Tracer -from tilebox.datasets.data import datetime_to_timestamp, timestamp_to_datetime from tilebox.datasets.sync.client import Client as DatasetsClient from tilebox.workflows.workflows.v1 import automation_pb2 as automation_pb from tilebox.workflows.workflows.v1 import core_pb2, job_pb2, task_pb2 @@ -290,24 +293,6 @@ def _parse_version(version: str) -> tuple[int, int]: raise ValueError(f"Invalid version string: {version}") -def uuid_message_to_uuid(uuid_message: core_pb2.UUID) -> UUID: - return UUID(bytes=uuid_message.uuid) - - -def uuid_message_to_optional_uuid(uuid_message: core_pb2.UUID | None) -> UUID | None: - if uuid_message is None: - return None - if uuid_message.uuid == b"": - return None - return UUID(bytes=uuid_message.uuid) - - -def uuid_to_uuid_message(uuid: UUID) -> core_pb2.UUID | None: - if uuid.int == 0: - return None - return core_pb2.UUID(uuid=uuid.bytes) - - class StorageType(Enum): GCS = automation_pb.STORAGE_TYPE_GCS # Google Cloud Storage S3 = automation_pb.STORAGE_TYPE_S3 # Amazon Web Services S3 @@ -532,27 +517,6 @@ def _default_google_storage_client(location: str) -> Bucket: return GoogleStorageClient(project=project).bucket(bucket) -@dataclass -class Pagination: - limit: int | None = None - starting_after: UUID | None = None - - @classmethod - def from_message(cls, page: core_pb2.Pagination | None) -> "Pagination": - if page is None: - return cls() - # convert falsish values (0 or empty string) to None - return cls( - limit=page.limit or None, - starting_after=uuid_message_to_optional_uuid(page.starting_after), - ) - - def to_message(self) -> core_pb2.Pagination: - return core_pb2.Pagination( - limit=self.limit, starting_after=uuid_to_uuid_message(self.starting_after) if self.starting_after else None - ) - - @dataclass(frozen=True) class QueryJobsResponse: jobs: list[Job] @@ -572,132 +536,16 @@ def to_message(self) -> job_pb2.QueryJobsResponse: ) -IDIntervalLike: TypeAlias = "tuple[str, str] | tuple[UUID, UUID] | IDInterval" - - -# note: unify the below classes with the ones from tilebox.datasets.data as soon as we update the protobuf messages -@dataclass(frozen=True) -class IDInterval: - start_id: UUID - end_id: UUID - start_exclusive: bool - end_inclusive: bool - - @classmethod - def from_message(cls, interval: core_pb2.IDInterval) -> "IDInterval": - return cls( - start_id=uuid_message_to_uuid(interval.start_id), - end_id=uuid_message_to_uuid(interval.end_id), - start_exclusive=interval.start_exclusive, - end_inclusive=interval.end_inclusive, - ) - - def to_message(self) -> core_pb2.IDInterval: - return core_pb2.IDInterval( - start_id=uuid_to_uuid_message(self.start_id), - end_id=uuid_to_uuid_message(self.end_id), - start_exclusive=self.start_exclusive, - end_inclusive=self.end_inclusive, - ) - - @classmethod - def parse(cls, arg: IDIntervalLike) -> "IDInterval": - """ - Convert a variety of input types to an IDInterval. - - Supported input types: - - IDInterval: Return the input as is - - tuple of two UUIDs: Return an IDInterval with start and end id set to the given values - - tuple of two strings: Return an IDInterval with start and end id set to the UUIDs parsed from the given strings - - Args: - arg: The input to convert - - Returns: - IDInterval: The parsed ID interval - """ - - match arg: - case IDInterval(_, _, _, _): - return arg - case (UUID(), UUID()): - start, end = arg - return IDInterval( - start_id=start, - end_id=end, - start_exclusive=False, - end_inclusive=True, - ) - case (str(), str()): - start, end = arg - return IDInterval( - start_id=UUID(start), - end_id=UUID(end), - start_exclusive=False, - end_inclusive=True, - ) - - -_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) - - -@dataclass(frozen=True) -class _TimeInterval: - """ - A time interval from a given start to a given end time. - Both the start and end time can be exclusive or inclusive. - """ - - start: datetime - end: datetime - - # We use exclusive for start and inclusive for end, because that way when both are false - # we have a half-open interval [start, end) which is the default behaviour we want to achieve. - start_exclusive: bool = False - end_inclusive: bool = False - - @classmethod - def from_message( - cls, interval: core_pb2.TimeInterval - ) -> "_TimeInterval": # lets use typing.Self once we require python >= 3.11 - """Convert a TimeInterval protobuf message to a TimeInterval object.""" - - start = timestamp_to_datetime(interval.start_time) - end = timestamp_to_datetime(interval.end_time) - if start == _EPOCH and end == _EPOCH and not interval.start_exclusive and not interval.end_inclusive: - return _EMPTY_TIME_INTERVAL - - return cls( - start=timestamp_to_datetime(interval.start_time), - end=timestamp_to_datetime(interval.end_time), - start_exclusive=interval.start_exclusive, - end_inclusive=interval.end_inclusive, - ) - - def to_message(self) -> core_pb2.TimeInterval: - """Convert a TimeInterval object to a TimeInterval protobuf message.""" - return core_pb2.TimeInterval( - start_time=datetime_to_timestamp(self.start), - end_time=datetime_to_timestamp(self.end), - start_exclusive=self.start_exclusive, - end_inclusive=self.end_inclusive, - ) - - -# A sentinel value to use for time interval that are empty -_EMPTY_TIME_INTERVAL = _TimeInterval(_EPOCH, _EPOCH, start_exclusive=True, end_inclusive=False) - - @dataclass(frozen=True) class QueryFilters: - time_interval: _TimeInterval | None = None + time_interval: TimeInterval | None = None id_interval: IDInterval | None = None automation_id: UUID | None = None @classmethod def from_message(cls, filters: job_pb2.QueryFilters) -> "QueryFilters": return cls( - time_interval=_TimeInterval.from_message(filters.time_interval), + time_interval=TimeInterval.from_message(filters.time_interval), id_interval=IDInterval.from_message(filters.id_interval), automation_id=uuid_message_to_optional_uuid(filters.automation_id), ) diff --git a/tilebox-workflows/tilebox/workflows/jobs/client.py b/tilebox-workflows/tilebox/workflows/jobs/client.py index e3200f3..81dca21 100644 --- a/tilebox-workflows/tilebox/workflows/jobs/client.py +++ b/tilebox-workflows/tilebox/workflows/jobs/client.py @@ -3,16 +3,14 @@ from _tilebox.grpc.pagination import Pagination as PaginationProtocol from _tilebox.grpc.pagination import paginated_request -from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike +from tilebox.datasets.query.id_interval import IDInterval, IDIntervalLike +from tilebox.datasets.query.pagination import Pagination +from tilebox.datasets.query.time_interval import TimeInterval, TimeIntervalLike from tilebox.workflows.clusters.client import ClusterSlugLike, to_cluster_slug from tilebox.workflows.data import ( - IDInterval, - IDIntervalLike, Job, - Pagination, QueryFilters, QueryJobsResponse, - _TimeInterval, ) from tilebox.workflows.jobs.service import JobService from tilebox.workflows.observability.tracing import WorkflowTracer, get_trace_parent_of_current_span @@ -178,7 +176,7 @@ def query(self, temporal_extent: TimeIntervalLike | IDIntervalLike, automation_i Returns: A list of jobs. """ - time_interval: _TimeInterval | None = None + time_interval: TimeInterval | None = None id_interval: IDInterval | None = None match temporal_extent: case (str(), str()): @@ -187,7 +185,7 @@ def query(self, temporal_extent: TimeIntervalLike | IDIntervalLike, automation_i id_interval = IDInterval.parse(temporal_extent) except ValueError: dataset_time_interval = TimeInterval.parse(temporal_extent) - time_interval = _TimeInterval( + time_interval = TimeInterval( start=dataset_time_interval.start, end=dataset_time_interval.end, start_exclusive=dataset_time_interval.start_exclusive, @@ -197,7 +195,7 @@ def query(self, temporal_extent: TimeIntervalLike | IDIntervalLike, automation_i id_interval = IDInterval.parse(temporal_extent) case _: dataset_time_interval = TimeInterval.parse(temporal_extent) - time_interval = _TimeInterval( + time_interval = TimeInterval( start=dataset_time_interval.start, end=dataset_time_interval.end, start_exclusive=dataset_time_interval.start_exclusive, diff --git a/tilebox-workflows/tilebox/workflows/jobs/service.py b/tilebox-workflows/tilebox/workflows/jobs/service.py index c808185..79541aa 100644 --- a/tilebox-workflows/tilebox/workflows/jobs/service.py +++ b/tilebox-workflows/tilebox/workflows/jobs/service.py @@ -3,9 +3,9 @@ from grpc import Channel from _tilebox.grpc.error import with_pythonic_errors +from tilebox.datasets.query.pagination import Pagination from tilebox.workflows.data import ( Job, - Pagination, QueryFilters, QueryJobsResponse, TaskSubmission, diff --git a/tilebox-workflows/tilebox/workflows/timeseries.py b/tilebox-workflows/tilebox/workflows/timeseries.py index 7a304f0..3874ceb 100644 --- a/tilebox-workflows/tilebox/workflows/timeseries.py +++ b/tilebox-workflows/tilebox/workflows/timeseries.py @@ -10,9 +10,9 @@ from typing_extensions import dataclass_transform, override from tilebox.datasets.data.collection import Collection, CollectionInfo -from tilebox.datasets.data.datapoint import DatapointInterval -from tilebox.datasets.data.time_interval import TimeInterval, TimeIntervalLike from tilebox.datasets.data.timeseries import TimeChunk, TimeseriesDatasetChunk +from tilebox.datasets.query.id_interval import IDInterval +from tilebox.datasets.query.time_interval import TimeInterval, TimeIntervalLike from tilebox.datasets.sync.dataset import CollectionClient from tilebox.workflows.interceptors import ForwardExecution, execution_interceptor from tilebox.workflows.task import ExecutionContext, Task @@ -76,7 +76,7 @@ def _timeseries_dataset_chunk(task: Task, call_next: ForwardExecution, context: interval_chunk = replace( chunk, time_interval=None, - datapoint_interval=DatapointInterval( + datapoint_interval=IDInterval( start_id=page.min_id, end_id=page.max_id, start_exclusive=False, end_inclusive=True ), ) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.py index 3efb1e0..49d96b1 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.py @@ -24,41 +24,42 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 from tilebox.workflows.workflows.v1 import core_pb2 as workflows_dot_v1_dot_core__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dworkflows/v1/automation.proto\x12\x0cworkflows.v1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17workflows/v1/core.proto\"\x80\x01\n\x0fStorageLocation\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x1a\n\x08location\x18\x02 \x01(\tR\x08location\x12-\n\x04type\x18\x03 \x01(\x0e\x32\x19.workflows.v1.StorageTypeR\x04type\"O\n\x10StorageLocations\x12;\n\tlocations\x18\x01 \x03(\x0b\x32\x1d.workflows.v1.StorageLocationR\tlocations\"\xa2\x02\n\x13\x41utomationPrototype\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12:\n\tprototype\x18\x03 \x01(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\tprototype\x12W\n\x16storage_event_triggers\x18\x04 \x03(\x0b\x32!.workflows.v1.StorageEventTriggerR\x14storageEventTriggers\x12>\n\rcron_triggers\x18\x05 \x03(\x0b\x32\x19.workflows.v1.CronTriggerR\x0c\x63ronTriggers\"R\n\x0b\x41utomations\x12\x43\n\x0b\x61utomations\x18\x01 \x03(\x0b\x32!.workflows.v1.AutomationPrototypeR\x0b\x61utomations\"\xa6\x01\n\x13StorageEventTrigger\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12H\n\x10storage_location\x18\x02 \x01(\x0b\x32\x1d.workflows.v1.StorageLocationR\x0fstorageLocation\x12!\n\x0cglob_pattern\x18\x03 \x01(\tR\x0bglobPattern\"M\n\x0b\x43ronTrigger\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x1a\n\x08schedule\x18\x02 \x01(\tR\x08schedule\"E\n\nAutomation\x12#\n\rtrigger_event\x18\x01 \x01(\x0cR\x0ctriggerEvent\x12\x12\n\x04\x61rgs\x18\x02 \x01(\x0cR\x04\x61rgs\"\xab\x01\n\x15TriggeredStorageEvent\x12\x42\n\x13storage_location_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x11storageLocationId\x12\x32\n\x04type\x18\x02 \x01(\x0e\x32\x1e.workflows.v1.StorageEventTypeR\x04type\x12\x1a\n\x08location\x18\x03 \x01(\tR\x08location\"S\n\x12TriggeredCronEvent\x12=\n\x0ctrigger_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0btriggerTime\"s\n\x17\x44\x65leteAutomationRequest\x12\x37\n\rautomation_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x0c\x61utomationId\x12\x1f\n\x0b\x63\x61ncel_jobs\x18\x02 \x01(\x08R\ncancelJobs*k\n\x0bStorageType\x12\x1c\n\x18STORAGE_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10STORAGE_TYPE_GCS\x10\x01\x12\x13\n\x0fSTORAGE_TYPE_S3\x10\x02\x12\x13\n\x0fSTORAGE_TYPE_FS\x10\x03*V\n\x10StorageEventType\x12\"\n\x1eSTORAGE_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x1e\n\x1aSTORAGE_EVENT_TYPE_CREATED\x10\x01\x32\xdd\x05\n\x11\x41utomationService\x12N\n\x14ListStorageLocations\x12\x16.google.protobuf.Empty\x1a\x1e.workflows.v1.StorageLocations\x12G\n\x12GetStorageLocation\x12\x12.workflows.v1.UUID\x1a\x1d.workflows.v1.StorageLocation\x12U\n\x15\x43reateStorageLocation\x12\x1d.workflows.v1.StorageLocation\x1a\x1d.workflows.v1.StorageLocation\x12\x43\n\x15\x44\x65leteStorageLocation\x12\x12.workflows.v1.UUID\x1a\x16.google.protobuf.Empty\x12\x44\n\x0fListAutomations\x12\x16.google.protobuf.Empty\x1a\x19.workflows.v1.Automations\x12\x46\n\rGetAutomation\x12\x12.workflows.v1.UUID\x1a!.workflows.v1.AutomationPrototype\x12X\n\x10\x43reateAutomation\x12!.workflows.v1.AutomationPrototype\x1a!.workflows.v1.AutomationPrototype\x12X\n\x10UpdateAutomation\x12!.workflows.v1.AutomationPrototype\x1a!.workflows.v1.AutomationPrototype\x12Q\n\x10\x44\x65leteAutomation\x12%.workflows.v1.DeleteAutomationRequest\x1a\x16.google.protobuf.EmptyBt\n\x10\x63om.workflows.v1B\x0f\x41utomationProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dworkflows/v1/automation.proto\x12\x0cworkflows.v1\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x13tilebox/v1/id.proto\x1a\x17workflows/v1/core.proto\"|\n\x0fStorageLocation\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x1a\n\x08location\x18\x02 \x01(\tR\x08location\x12-\n\x04type\x18\x03 \x01(\x0e\x32\x19.workflows.v1.StorageTypeR\x04type\"O\n\x10StorageLocations\x12;\n\tlocations\x18\x01 \x03(\x0b\x32\x1d.workflows.v1.StorageLocationR\tlocations\"\x9e\x02\n\x13\x41utomationPrototype\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12:\n\tprototype\x18\x03 \x01(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\tprototype\x12W\n\x16storage_event_triggers\x18\x04 \x03(\x0b\x32!.workflows.v1.StorageEventTriggerR\x14storageEventTriggers\x12>\n\rcron_triggers\x18\x05 \x03(\x0b\x32\x19.workflows.v1.CronTriggerR\x0c\x63ronTriggers\"R\n\x0b\x41utomations\x12\x43\n\x0b\x61utomations\x18\x01 \x03(\x0b\x32!.workflows.v1.AutomationPrototypeR\x0b\x61utomations\"\xa2\x01\n\x13StorageEventTrigger\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12H\n\x10storage_location\x18\x02 \x01(\x0b\x32\x1d.workflows.v1.StorageLocationR\x0fstorageLocation\x12!\n\x0cglob_pattern\x18\x03 \x01(\tR\x0bglobPattern\"I\n\x0b\x43ronTrigger\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x1a\n\x08schedule\x18\x02 \x01(\tR\x08schedule\"E\n\nAutomation\x12#\n\rtrigger_event\x18\x01 \x01(\x0cR\x0ctriggerEvent\x12\x12\n\x04\x61rgs\x18\x02 \x01(\x0cR\x04\x61rgs\"\xa7\x01\n\x15TriggeredStorageEvent\x12>\n\x13storage_location_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x11storageLocationId\x12\x32\n\x04type\x18\x02 \x01(\x0e\x32\x1e.workflows.v1.StorageEventTypeR\x04type\x12\x1a\n\x08location\x18\x03 \x01(\tR\x08location\"S\n\x12TriggeredCronEvent\x12=\n\x0ctrigger_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0btriggerTime\"o\n\x17\x44\x65leteAutomationRequest\x12\x33\n\rautomation_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x61utomationId\x12\x1f\n\x0b\x63\x61ncel_jobs\x18\x02 \x01(\x08R\ncancelJobs*k\n\x0bStorageType\x12\x1c\n\x18STORAGE_TYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10STORAGE_TYPE_GCS\x10\x01\x12\x13\n\x0fSTORAGE_TYPE_S3\x10\x02\x12\x13\n\x0fSTORAGE_TYPE_FS\x10\x03*V\n\x10StorageEventType\x12\"\n\x1eSTORAGE_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x1e\n\x1aSTORAGE_EVENT_TYPE_CREATED\x10\x01\x32\xd1\x05\n\x11\x41utomationService\x12N\n\x14ListStorageLocations\x12\x16.google.protobuf.Empty\x1a\x1e.workflows.v1.StorageLocations\x12\x43\n\x12GetStorageLocation\x12\x0e.tilebox.v1.ID\x1a\x1d.workflows.v1.StorageLocation\x12U\n\x15\x43reateStorageLocation\x12\x1d.workflows.v1.StorageLocation\x1a\x1d.workflows.v1.StorageLocation\x12?\n\x15\x44\x65leteStorageLocation\x12\x0e.tilebox.v1.ID\x1a\x16.google.protobuf.Empty\x12\x44\n\x0fListAutomations\x12\x16.google.protobuf.Empty\x1a\x19.workflows.v1.Automations\x12\x42\n\rGetAutomation\x12\x0e.tilebox.v1.ID\x1a!.workflows.v1.AutomationPrototype\x12X\n\x10\x43reateAutomation\x12!.workflows.v1.AutomationPrototype\x1a!.workflows.v1.AutomationPrototype\x12X\n\x10UpdateAutomation\x12!.workflows.v1.AutomationPrototype\x1a!.workflows.v1.AutomationPrototype\x12Q\n\x10\x44\x65leteAutomation\x12%.workflows.v1.DeleteAutomationRequest\x1a\x16.google.protobuf.EmptyBy\n\x10\x63om.workflows.v1B\x0f\x41utomationProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.automation_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\017AutomationProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' - _globals['_STORAGETYPE']._serialized_start=1418 - _globals['_STORAGETYPE']._serialized_end=1525 - _globals['_STORAGEEVENTTYPE']._serialized_start=1527 - _globals['_STORAGEEVENTTYPE']._serialized_end=1613 - _globals['_STORAGELOCATION']._serialized_start=135 - _globals['_STORAGELOCATION']._serialized_end=263 - _globals['_STORAGELOCATIONS']._serialized_start=265 - _globals['_STORAGELOCATIONS']._serialized_end=344 - _globals['_AUTOMATIONPROTOTYPE']._serialized_start=347 - _globals['_AUTOMATIONPROTOTYPE']._serialized_end=637 - _globals['_AUTOMATIONS']._serialized_start=639 - _globals['_AUTOMATIONS']._serialized_end=721 - _globals['_STORAGEEVENTTRIGGER']._serialized_start=724 - _globals['_STORAGEEVENTTRIGGER']._serialized_end=890 - _globals['_CRONTRIGGER']._serialized_start=892 - _globals['_CRONTRIGGER']._serialized_end=969 - _globals['_AUTOMATION']._serialized_start=971 - _globals['_AUTOMATION']._serialized_end=1040 - _globals['_TRIGGEREDSTORAGEEVENT']._serialized_start=1043 + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\017AutomationProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' + _globals['_STORAGETYPE']._serialized_start=1414 + _globals['_STORAGETYPE']._serialized_end=1521 + _globals['_STORAGEEVENTTYPE']._serialized_start=1523 + _globals['_STORAGEEVENTTYPE']._serialized_end=1609 + _globals['_STORAGELOCATION']._serialized_start=155 + _globals['_STORAGELOCATION']._serialized_end=279 + _globals['_STORAGELOCATIONS']._serialized_start=281 + _globals['_STORAGELOCATIONS']._serialized_end=360 + _globals['_AUTOMATIONPROTOTYPE']._serialized_start=363 + _globals['_AUTOMATIONPROTOTYPE']._serialized_end=649 + _globals['_AUTOMATIONS']._serialized_start=651 + _globals['_AUTOMATIONS']._serialized_end=733 + _globals['_STORAGEEVENTTRIGGER']._serialized_start=736 + _globals['_STORAGEEVENTTRIGGER']._serialized_end=898 + _globals['_CRONTRIGGER']._serialized_start=900 + _globals['_CRONTRIGGER']._serialized_end=973 + _globals['_AUTOMATION']._serialized_start=975 + _globals['_AUTOMATION']._serialized_end=1044 + _globals['_TRIGGEREDSTORAGEEVENT']._serialized_start=1047 _globals['_TRIGGEREDSTORAGEEVENT']._serialized_end=1214 _globals['_TRIGGEREDCRONEVENT']._serialized_start=1216 _globals['_TRIGGEREDCRONEVENT']._serialized_end=1299 _globals['_DELETEAUTOMATIONREQUEST']._serialized_start=1301 - _globals['_DELETEAUTOMATIONREQUEST']._serialized_end=1416 - _globals['_AUTOMATIONSERVICE']._serialized_start=1616 - _globals['_AUTOMATIONSERVICE']._serialized_end=2349 + _globals['_DELETEAUTOMATIONREQUEST']._serialized_end=1412 + _globals['_AUTOMATIONSERVICE']._serialized_start=1612 + _globals['_AUTOMATIONSERVICE']._serialized_end=2333 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.pyi b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.pyi index d933f46..0fe5638 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.pyi +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2.pyi @@ -1,5 +1,6 @@ from google.protobuf import empty_pb2 as _empty_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from tilebox.workflows.workflows.v1 import core_pb2 as _core_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper @@ -32,10 +33,10 @@ class StorageLocation(_message.Message): ID_FIELD_NUMBER: _ClassVar[int] LOCATION_FIELD_NUMBER: _ClassVar[int] TYPE_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.UUID + id: _id_pb2.ID location: str type: StorageType - def __init__(self, id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., location: _Optional[str] = ..., type: _Optional[_Union[StorageType, str]] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., location: _Optional[str] = ..., type: _Optional[_Union[StorageType, str]] = ...) -> None: ... class StorageLocations(_message.Message): __slots__ = ("locations",) @@ -50,12 +51,12 @@ class AutomationPrototype(_message.Message): PROTOTYPE_FIELD_NUMBER: _ClassVar[int] STORAGE_EVENT_TRIGGERS_FIELD_NUMBER: _ClassVar[int] CRON_TRIGGERS_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.UUID + id: _id_pb2.ID name: str prototype: _core_pb2.TaskSubmission storage_event_triggers: _containers.RepeatedCompositeFieldContainer[StorageEventTrigger] cron_triggers: _containers.RepeatedCompositeFieldContainer[CronTrigger] - def __init__(self, id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., name: _Optional[str] = ..., prototype: _Optional[_Union[_core_pb2.TaskSubmission, _Mapping]] = ..., storage_event_triggers: _Optional[_Iterable[_Union[StorageEventTrigger, _Mapping]]] = ..., cron_triggers: _Optional[_Iterable[_Union[CronTrigger, _Mapping]]] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ..., prototype: _Optional[_Union[_core_pb2.TaskSubmission, _Mapping]] = ..., storage_event_triggers: _Optional[_Iterable[_Union[StorageEventTrigger, _Mapping]]] = ..., cron_triggers: _Optional[_Iterable[_Union[CronTrigger, _Mapping]]] = ...) -> None: ... class Automations(_message.Message): __slots__ = ("automations",) @@ -68,18 +69,18 @@ class StorageEventTrigger(_message.Message): ID_FIELD_NUMBER: _ClassVar[int] STORAGE_LOCATION_FIELD_NUMBER: _ClassVar[int] GLOB_PATTERN_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.UUID + id: _id_pb2.ID storage_location: StorageLocation glob_pattern: str - def __init__(self, id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., storage_location: _Optional[_Union[StorageLocation, _Mapping]] = ..., glob_pattern: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., storage_location: _Optional[_Union[StorageLocation, _Mapping]] = ..., glob_pattern: _Optional[str] = ...) -> None: ... class CronTrigger(_message.Message): __slots__ = ("id", "schedule") ID_FIELD_NUMBER: _ClassVar[int] SCHEDULE_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.UUID + id: _id_pb2.ID schedule: str - def __init__(self, id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., schedule: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., schedule: _Optional[str] = ...) -> None: ... class Automation(_message.Message): __slots__ = ("trigger_event", "args") @@ -94,10 +95,10 @@ class TriggeredStorageEvent(_message.Message): STORAGE_LOCATION_ID_FIELD_NUMBER: _ClassVar[int] TYPE_FIELD_NUMBER: _ClassVar[int] LOCATION_FIELD_NUMBER: _ClassVar[int] - storage_location_id: _core_pb2.UUID + storage_location_id: _id_pb2.ID type: StorageEventType location: str - def __init__(self, storage_location_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., type: _Optional[_Union[StorageEventType, str]] = ..., location: _Optional[str] = ...) -> None: ... + def __init__(self, storage_location_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., type: _Optional[_Union[StorageEventType, str]] = ..., location: _Optional[str] = ...) -> None: ... class TriggeredCronEvent(_message.Message): __slots__ = ("trigger_time",) @@ -109,6 +110,6 @@ class DeleteAutomationRequest(_message.Message): __slots__ = ("automation_id", "cancel_jobs") AUTOMATION_ID_FIELD_NUMBER: _ClassVar[int] CANCEL_JOBS_FIELD_NUMBER: _ClassVar[int] - automation_id: _core_pb2.UUID + automation_id: _id_pb2.ID cancel_jobs: bool - def __init__(self, automation_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., cancel_jobs: bool = ...) -> None: ... + def __init__(self, automation_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., cancel_jobs: bool = ...) -> None: ... diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2_grpc.py b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2_grpc.py index b0c03d4..d3afc4c 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2_grpc.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/automation_pb2_grpc.py @@ -3,8 +3,8 @@ import grpc from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 from tilebox.workflows.workflows.v1 import automation_pb2 as workflows_dot_v1_dot_automation__pb2 -from tilebox.workflows.workflows.v1 import core_pb2 as workflows_dot_v1_dot_core__pb2 class AutomationServiceStub(object): @@ -26,7 +26,7 @@ def __init__(self, channel): _registered_method=True) self.GetStorageLocation = channel.unary_unary( '/workflows.v1.AutomationService/GetStorageLocation', - request_serializer=workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + request_serializer=tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, response_deserializer=workflows_dot_v1_dot_automation__pb2.StorageLocation.FromString, _registered_method=True) self.CreateStorageLocation = channel.unary_unary( @@ -36,7 +36,7 @@ def __init__(self, channel): _registered_method=True) self.DeleteStorageLocation = channel.unary_unary( '/workflows.v1.AutomationService/DeleteStorageLocation', - request_serializer=workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + request_serializer=tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, _registered_method=True) self.ListAutomations = channel.unary_unary( @@ -46,7 +46,7 @@ def __init__(self, channel): _registered_method=True) self.GetAutomation = channel.unary_unary( '/workflows.v1.AutomationService/GetAutomation', - request_serializer=workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + request_serializer=tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, response_deserializer=workflows_dot_v1_dot_automation__pb2.AutomationPrototype.FromString, _registered_method=True) self.CreateAutomation = channel.unary_unary( @@ -145,7 +145,7 @@ def add_AutomationServiceServicer_to_server(servicer, server): ), 'GetStorageLocation': grpc.unary_unary_rpc_method_handler( servicer.GetStorageLocation, - request_deserializer=workflows_dot_v1_dot_core__pb2.UUID.FromString, + request_deserializer=tilebox_dot_v1_dot_id__pb2.ID.FromString, response_serializer=workflows_dot_v1_dot_automation__pb2.StorageLocation.SerializeToString, ), 'CreateStorageLocation': grpc.unary_unary_rpc_method_handler( @@ -155,7 +155,7 @@ def add_AutomationServiceServicer_to_server(servicer, server): ), 'DeleteStorageLocation': grpc.unary_unary_rpc_method_handler( servicer.DeleteStorageLocation, - request_deserializer=workflows_dot_v1_dot_core__pb2.UUID.FromString, + request_deserializer=tilebox_dot_v1_dot_id__pb2.ID.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'ListAutomations': grpc.unary_unary_rpc_method_handler( @@ -165,7 +165,7 @@ def add_AutomationServiceServicer_to_server(servicer, server): ), 'GetAutomation': grpc.unary_unary_rpc_method_handler( servicer.GetAutomation, - request_deserializer=workflows_dot_v1_dot_core__pb2.UUID.FromString, + request_deserializer=tilebox_dot_v1_dot_id__pb2.ID.FromString, response_serializer=workflows_dot_v1_dot_automation__pb2.AutomationPrototype.SerializeToString, ), 'CreateAutomation': grpc.unary_unary_rpc_method_handler( @@ -239,7 +239,7 @@ def GetStorageLocation(request, request, target, '/workflows.v1.AutomationService/GetStorageLocation', - workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, workflows_dot_v1_dot_automation__pb2.StorageLocation.FromString, options, channel_credentials, @@ -293,7 +293,7 @@ def DeleteStorageLocation(request, request, target, '/workflows.v1.AutomationService/DeleteStorageLocation', - workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, @@ -347,7 +347,7 @@ def GetAutomation(request, request, target, '/workflows.v1.AutomationService/GetAutomation', - workflows_dot_v1_dot_core__pb2.UUID.SerializeToString, + tilebox_dot_v1_dot_id__pb2.ID.SerializeToString, workflows_dot_v1_dot_automation__pb2.AutomationPrototype.FromString, options, channel_credentials, diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.py index 0c0172b..e4286d7 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.py @@ -24,42 +24,39 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17workflows/v1/core.proto\x12\x0cworkflows.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"^\n\x07\x43luster\x12\x12\n\x04slug\x18\x02 \x01(\tR\x04slug\x12!\n\x0c\x64isplay_name\x18\x03 \x01(\tR\x0b\x64isplayName\x12\x1c\n\tdeletable\x18\x04 \x01(\x08R\tdeletable\"\xa5\x03\n\x03Job\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12!\n\x0ctrace_parent\x18\x03 \x01(\tR\x0btraceParent\x12\x1a\n\x08\x63\x61nceled\x18\x05 \x01(\x08R\x08\x63\x61nceled\x12,\n\x05state\x18\x06 \x01(\x0e\x32\x16.workflows.v1.JobStateR\x05state\x12=\n\x0csubmitted_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bsubmittedAt\x12\x39\n\nstarted_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12@\n\x0etask_summaries\x18\t \x03(\x0b\x32\x19.workflows.v1.TaskSummaryR\rtaskSummaries\x12\x37\n\rautomation_id\x18\n \x01(\x0b\x32\x12.workflows.v1.UUIDR\x0c\x61utomationIdJ\x04\x08\x04\x10\x05\"\xa7\x02\n\x0bTaskSummary\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12-\n\x05state\x18\x03 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\x12/\n\tparent_id\x18\x04 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x08parentId\x12\x39\n\nstarted_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x39\n\nstopped_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstoppedAtJ\x04\x08\x05\x10\x06\"\xc0\x03\n\x04Task\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12<\n\nidentifier\x18\x02 \x01(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\nidentifier\x12-\n\x05state\x18\x03 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\x12\x19\n\x05input\x18\x04 \x01(\x0cH\x00R\x05input\x88\x01\x01\x12\x1d\n\x07\x64isplay\x18\x05 \x01(\tH\x01R\x07\x64isplay\x88\x01\x01\x12#\n\x03job\x18\x06 \x01(\x0b\x32\x11.workflows.v1.JobR\x03job\x12/\n\tparent_id\x18\x07 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x08parentId\x12\x31\n\ndepends_on\x18\x08 \x03(\x0b\x32\x12.workflows.v1.UUIDR\tdependsOn\x12-\n\x05lease\x18\t \x01(\x0b\x32\x17.workflows.v1.TaskLeaseR\x05lease\x12\x1f\n\x0bretry_count\x18\n \x01(\x03R\nretryCountB\x08\n\x06_inputB\n\n\x08_display\">\n\x0eTaskIdentifier\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\"1\n\x05Tasks\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x12.workflows.v1.TaskR\x05tasks\"\xe6\x01\n\x0eTaskSubmission\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\x12<\n\nidentifier\x18\x02 \x01(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\nidentifier\x12\x14\n\x05input\x18\x03 \x01(\x0cR\x05input\x12\x18\n\x07\x64isplay\x18\x04 \x01(\tR\x07\x64isplay\x12\"\n\x0c\x64\x65pendencies\x18\x05 \x03(\x03R\x0c\x64\x65pendencies\x12\x1f\n\x0bmax_retries\x18\x06 \x01(\x03R\nmaxRetries\"\x1a\n\x04UUID\x12\x12\n\x04uuid\x18\x01 \x01(\x0cR\x04uuid\"\xa9\x01\n\tTaskLease\x12/\n\x05lease\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x05lease\x12k\n%recommended_wait_until_next_extension\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR!recommendedWaitUntilNextExtension\"\xce\x01\n\x0cTimeInterval\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\xb4\x01\n\nIDInterval\x12-\n\x08start_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x07startId\x12)\n\x06\x65nd_id\x18\x02 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05\x65ndId\x12\'\n\x0fstart_exclusive\x18\x03 \x01(\x08R\x0estartExclusive\x12#\n\rend_inclusive\x18\x04 \x01(\x08R\x0c\x65ndInclusive\"\x84\x01\n\nPagination\x12\x19\n\x05limit\x18\x01 \x01(\x03H\x00R\x05limit\x88\x01\x01\x12>\n\x0estarting_after\x18\x02 \x01(\x0b\x32\x12.workflows.v1.UUIDH\x01R\rstartingAfter\x88\x01\x01\x42\x08\n\x06_limitB\x11\n\x0f_starting_after*k\n\x08JobState\x12\x19\n\x15JOB_STATE_UNSPECIFIED\x10\x00\x12\x14\n\x10JOB_STATE_QUEUED\x10\x01\x12\x15\n\x11JOB_STATE_STARTED\x10\x02\x12\x17\n\x13JOB_STATE_COMPLETED\x10\x03*\xa0\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x15\n\x11TASK_STATE_QUEUED\x10\x01\x12\x16\n\x12TASK_STATE_RUNNING\x10\x02\x12\x17\n\x13TASK_STATE_COMPUTED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x18\n\x14TASK_STATE_CANCELLED\x10\x05\x42n\n\x10\x63om.workflows.v1B\tCoreProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17workflows/v1/core.proto\x12\x0cworkflows.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x13tilebox/v1/id.proto\"^\n\x07\x43luster\x12\x12\n\x04slug\x18\x02 \x01(\tR\x04slug\x12!\n\x0c\x64isplay_name\x18\x03 \x01(\tR\x0b\x64isplayName\x12\x1c\n\tdeletable\x18\x04 \x01(\x08R\tdeletable\"\x9d\x03\n\x03Job\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12!\n\x0ctrace_parent\x18\x03 \x01(\tR\x0btraceParent\x12\x1a\n\x08\x63\x61nceled\x18\x05 \x01(\x08R\x08\x63\x61nceled\x12,\n\x05state\x18\x06 \x01(\x0e\x32\x16.workflows.v1.JobStateR\x05state\x12=\n\x0csubmitted_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bsubmittedAt\x12\x39\n\nstarted_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12@\n\x0etask_summaries\x18\t \x03(\x0b\x32\x19.workflows.v1.TaskSummaryR\rtaskSummaries\x12\x33\n\rautomation_id\x18\n \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x61utomationIdJ\x04\x08\x04\x10\x05\"\x9f\x02\n\x0bTaskSummary\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12-\n\x05state\x18\x03 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\x12+\n\tparent_id\x18\x04 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x08parentId\x12\x39\n\nstarted_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x39\n\nstopped_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstoppedAtJ\x04\x08\x05\x10\x06\"\xa2\x03\n\x04Task\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12<\n\nidentifier\x18\x02 \x01(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\nidentifier\x12-\n\x05state\x18\x03 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\x12\x1b\n\x05input\x18\x04 \x01(\x0c\x42\x05\xaa\x01\x02\x08\x01R\x05input\x12\x1f\n\x07\x64isplay\x18\x05 \x01(\tB\x05\xaa\x01\x02\x08\x01R\x07\x64isplay\x12#\n\x03job\x18\x06 \x01(\x0b\x32\x11.workflows.v1.JobR\x03job\x12+\n\tparent_id\x18\x07 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x08parentId\x12-\n\ndepends_on\x18\x08 \x03(\x0b\x32\x0e.tilebox.v1.IDR\tdependsOn\x12-\n\x05lease\x18\t \x01(\x0b\x32\x17.workflows.v1.TaskLeaseR\x05lease\x12\x1f\n\x0bretry_count\x18\n \x01(\x03R\nretryCount\">\n\x0eTaskIdentifier\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\"1\n\x05Tasks\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x12.workflows.v1.TaskR\x05tasks\"\xe6\x01\n\x0eTaskSubmission\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\x12<\n\nidentifier\x18\x02 \x01(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\nidentifier\x12\x14\n\x05input\x18\x03 \x01(\x0cR\x05input\x12\x18\n\x07\x64isplay\x18\x04 \x01(\tR\x07\x64isplay\x12\"\n\x0c\x64\x65pendencies\x18\x05 \x03(\x03R\x0c\x64\x65pendencies\x12\x1f\n\x0bmax_retries\x18\x06 \x01(\x03R\nmaxRetries\"\xa9\x01\n\tTaskLease\x12/\n\x05lease\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x05lease\x12k\n%recommended_wait_until_next_extension\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationR!recommendedWaitUntilNextExtension*k\n\x08JobState\x12\x19\n\x15JOB_STATE_UNSPECIFIED\x10\x00\x12\x14\n\x10JOB_STATE_QUEUED\x10\x01\x12\x15\n\x11JOB_STATE_STARTED\x10\x02\x12\x17\n\x13JOB_STATE_COMPLETED\x10\x03*\xa0\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x15\n\x11TASK_STATE_QUEUED\x10\x01\x12\x16\n\x12TASK_STATE_RUNNING\x10\x02\x12\x17\n\x13TASK_STATE_COMPUTED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x18\n\x14TASK_STATE_CANCELLED\x10\x05\x42s\n\x10\x63om.workflows.v1B\tCoreProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.core_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\tCoreProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' - _globals['_JOBSTATE']._serialized_start=2450 - _globals['_JOBSTATE']._serialized_end=2557 - _globals['_TASKSTATE']._serialized_start=2560 - _globals['_TASKSTATE']._serialized_end=2720 - _globals['_CLUSTER']._serialized_start=106 - _globals['_CLUSTER']._serialized_end=200 - _globals['_JOB']._serialized_start=203 - _globals['_JOB']._serialized_end=624 - _globals['_TASKSUMMARY']._serialized_start=627 - _globals['_TASKSUMMARY']._serialized_end=922 - _globals['_TASK']._serialized_start=925 - _globals['_TASK']._serialized_end=1373 - _globals['_TASKIDENTIFIER']._serialized_start=1375 - _globals['_TASKIDENTIFIER']._serialized_end=1437 - _globals['_TASKS']._serialized_start=1439 - _globals['_TASKS']._serialized_end=1488 - _globals['_TASKSUBMISSION']._serialized_start=1491 - _globals['_TASKSUBMISSION']._serialized_end=1721 - _globals['_UUID']._serialized_start=1723 - _globals['_UUID']._serialized_end=1749 - _globals['_TASKLEASE']._serialized_start=1752 - _globals['_TASKLEASE']._serialized_end=1921 - _globals['_TIMEINTERVAL']._serialized_start=1924 - _globals['_TIMEINTERVAL']._serialized_end=2130 - _globals['_IDINTERVAL']._serialized_start=2133 - _globals['_IDINTERVAL']._serialized_end=2313 - _globals['_PAGINATION']._serialized_start=2316 - _globals['_PAGINATION']._serialized_end=2448 + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\tCoreProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' + _globals['_TASK'].fields_by_name['input']._loaded_options = None + _globals['_TASK'].fields_by_name['input']._serialized_options = b'\252\001\002\010\001' + _globals['_TASK'].fields_by_name['display']._loaded_options = None + _globals['_TASK'].fields_by_name['display']._serialized_options = b'\252\001\002\010\001' + _globals['_JOBSTATE']._serialized_start=1870 + _globals['_JOBSTATE']._serialized_end=1977 + _globals['_TASKSTATE']._serialized_start=1980 + _globals['_TASKSTATE']._serialized_end=2140 + _globals['_CLUSTER']._serialized_start=127 + _globals['_CLUSTER']._serialized_end=221 + _globals['_JOB']._serialized_start=224 + _globals['_JOB']._serialized_end=637 + _globals['_TASKSUMMARY']._serialized_start=640 + _globals['_TASKSUMMARY']._serialized_end=927 + _globals['_TASK']._serialized_start=930 + _globals['_TASK']._serialized_end=1348 + _globals['_TASKIDENTIFIER']._serialized_start=1350 + _globals['_TASKIDENTIFIER']._serialized_end=1412 + _globals['_TASKS']._serialized_start=1414 + _globals['_TASKS']._serialized_end=1463 + _globals['_TASKSUBMISSION']._serialized_start=1466 + _globals['_TASKSUBMISSION']._serialized_end=1696 + _globals['_TASKLEASE']._serialized_start=1699 + _globals['_TASKLEASE']._serialized_end=1868 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.pyi b/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.pyi index e3b7627..5e1804e 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.pyi +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/core_pb2.pyi @@ -1,5 +1,6 @@ from google.protobuf import duration_pb2 as _duration_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -55,7 +56,7 @@ class Job(_message.Message): STARTED_AT_FIELD_NUMBER: _ClassVar[int] TASK_SUMMARIES_FIELD_NUMBER: _ClassVar[int] AUTOMATION_ID_FIELD_NUMBER: _ClassVar[int] - id: UUID + id: _id_pb2.ID name: str trace_parent: str canceled: bool @@ -63,8 +64,8 @@ class Job(_message.Message): submitted_at: _timestamp_pb2.Timestamp started_at: _timestamp_pb2.Timestamp task_summaries: _containers.RepeatedCompositeFieldContainer[TaskSummary] - automation_id: UUID - def __init__(self, id: _Optional[_Union[UUID, _Mapping]] = ..., name: _Optional[str] = ..., trace_parent: _Optional[str] = ..., canceled: bool = ..., state: _Optional[_Union[JobState, str]] = ..., submitted_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., started_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., task_summaries: _Optional[_Iterable[_Union[TaskSummary, _Mapping]]] = ..., automation_id: _Optional[_Union[UUID, _Mapping]] = ...) -> None: ... + automation_id: _id_pb2.ID + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., name: _Optional[str] = ..., trace_parent: _Optional[str] = ..., canceled: bool = ..., state: _Optional[_Union[JobState, str]] = ..., submitted_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., started_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., task_summaries: _Optional[_Iterable[_Union[TaskSummary, _Mapping]]] = ..., automation_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class TaskSummary(_message.Message): __slots__ = ("id", "display", "state", "parent_id", "started_at", "stopped_at") @@ -74,13 +75,13 @@ class TaskSummary(_message.Message): PARENT_ID_FIELD_NUMBER: _ClassVar[int] STARTED_AT_FIELD_NUMBER: _ClassVar[int] STOPPED_AT_FIELD_NUMBER: _ClassVar[int] - id: UUID + id: _id_pb2.ID display: str state: TaskState - parent_id: UUID + parent_id: _id_pb2.ID started_at: _timestamp_pb2.Timestamp stopped_at: _timestamp_pb2.Timestamp - def __init__(self, id: _Optional[_Union[UUID, _Mapping]] = ..., display: _Optional[str] = ..., state: _Optional[_Union[TaskState, str]] = ..., parent_id: _Optional[_Union[UUID, _Mapping]] = ..., started_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., stopped_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., display: _Optional[str] = ..., state: _Optional[_Union[TaskState, str]] = ..., parent_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., started_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., stopped_at: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... class Task(_message.Message): __slots__ = ("id", "identifier", "state", "input", "display", "job", "parent_id", "depends_on", "lease", "retry_count") @@ -94,17 +95,17 @@ class Task(_message.Message): DEPENDS_ON_FIELD_NUMBER: _ClassVar[int] LEASE_FIELD_NUMBER: _ClassVar[int] RETRY_COUNT_FIELD_NUMBER: _ClassVar[int] - id: UUID + id: _id_pb2.ID identifier: TaskIdentifier state: TaskState input: bytes display: str job: Job - parent_id: UUID - depends_on: _containers.RepeatedCompositeFieldContainer[UUID] + parent_id: _id_pb2.ID + depends_on: _containers.RepeatedCompositeFieldContainer[_id_pb2.ID] lease: TaskLease retry_count: int - def __init__(self, id: _Optional[_Union[UUID, _Mapping]] = ..., identifier: _Optional[_Union[TaskIdentifier, _Mapping]] = ..., state: _Optional[_Union[TaskState, str]] = ..., input: _Optional[bytes] = ..., display: _Optional[str] = ..., job: _Optional[_Union[Job, _Mapping]] = ..., parent_id: _Optional[_Union[UUID, _Mapping]] = ..., depends_on: _Optional[_Iterable[_Union[UUID, _Mapping]]] = ..., lease: _Optional[_Union[TaskLease, _Mapping]] = ..., retry_count: _Optional[int] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., identifier: _Optional[_Union[TaskIdentifier, _Mapping]] = ..., state: _Optional[_Union[TaskState, str]] = ..., input: _Optional[bytes] = ..., display: _Optional[str] = ..., job: _Optional[_Union[Job, _Mapping]] = ..., parent_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., depends_on: _Optional[_Iterable[_Union[_id_pb2.ID, _Mapping]]] = ..., lease: _Optional[_Union[TaskLease, _Mapping]] = ..., retry_count: _Optional[int] = ...) -> None: ... class TaskIdentifier(_message.Message): __slots__ = ("name", "version") @@ -136,12 +137,6 @@ class TaskSubmission(_message.Message): max_retries: int def __init__(self, cluster_slug: _Optional[str] = ..., identifier: _Optional[_Union[TaskIdentifier, _Mapping]] = ..., input: _Optional[bytes] = ..., display: _Optional[str] = ..., dependencies: _Optional[_Iterable[int]] = ..., max_retries: _Optional[int] = ...) -> None: ... -class UUID(_message.Message): - __slots__ = ("uuid",) - UUID_FIELD_NUMBER: _ClassVar[int] - uuid: bytes - def __init__(self, uuid: _Optional[bytes] = ...) -> None: ... - class TaskLease(_message.Message): __slots__ = ("lease", "recommended_wait_until_next_extension") LEASE_FIELD_NUMBER: _ClassVar[int] @@ -149,35 +144,3 @@ class TaskLease(_message.Message): lease: _duration_pb2.Duration recommended_wait_until_next_extension: _duration_pb2.Duration def __init__(self, lease: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., recommended_wait_until_next_extension: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... - -class TimeInterval(_message.Message): - __slots__ = ("start_time", "end_time", "start_exclusive", "end_inclusive") - START_TIME_FIELD_NUMBER: _ClassVar[int] - END_TIME_FIELD_NUMBER: _ClassVar[int] - START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] - END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] - start_time: _timestamp_pb2.Timestamp - end_time: _timestamp_pb2.Timestamp - start_exclusive: bool - end_inclusive: bool - def __init__(self, start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... - -class IDInterval(_message.Message): - __slots__ = ("start_id", "end_id", "start_exclusive", "end_inclusive") - START_ID_FIELD_NUMBER: _ClassVar[int] - END_ID_FIELD_NUMBER: _ClassVar[int] - START_EXCLUSIVE_FIELD_NUMBER: _ClassVar[int] - END_INCLUSIVE_FIELD_NUMBER: _ClassVar[int] - start_id: UUID - end_id: UUID - start_exclusive: bool - end_inclusive: bool - def __init__(self, start_id: _Optional[_Union[UUID, _Mapping]] = ..., end_id: _Optional[_Union[UUID, _Mapping]] = ..., start_exclusive: bool = ..., end_inclusive: bool = ...) -> None: ... - -class Pagination(_message.Message): - __slots__ = ("limit", "starting_after") - LIMIT_FIELD_NUMBER: _ClassVar[int] - STARTING_AFTER_FIELD_NUMBER: _ClassVar[int] - limit: int - starting_after: UUID - def __init__(self, limit: _Optional[int] = ..., starting_after: _Optional[_Union[UUID, _Mapping]] = ...) -> None: ... diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/diagram_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/diagram_pb2.py index 979b18e..1f536a4 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/diagram_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/diagram_pb2.py @@ -24,20 +24,22 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aworkflows/v1/diagram.proto\x12\x0cworkflows.v1\"t\n\x14RenderDiagramRequest\x12\x18\n\x07\x64iagram\x18\x01 \x01(\tR\x07\x64iagram\x12\x42\n\x0erender_options\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.RenderOptionsR\rrenderOptions\"\xa6\x01\n\rRenderOptions\x12\x16\n\x06layout\x18\x01 \x01(\tR\x06layout\x12\x1e\n\x08theme_id\x18\x02 \x01(\x03H\x00R\x07themeId\x88\x01\x01\x12\x18\n\x07sketchy\x18\x03 \x01(\x08R\x07sketchy\x12\x18\n\x07padding\x18\x04 \x01(\x03R\x07padding\x12\x1c\n\tdirection\x18\x05 \x01(\tR\tdirectionB\x0b\n\t_theme_id\"\x1b\n\x07\x44iagram\x12\x10\n\x03svg\x18\x01 \x01(\x0cR\x03svg2U\n\x0e\x44iagramService\x12\x43\n\x06Render\x12\".workflows.v1.RenderDiagramRequest\x1a\x15.workflows.v1.DiagramBq\n\x10\x63om.workflows.v1B\x0c\x44iagramProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1aworkflows/v1/diagram.proto\x12\x0cworkflows.v1\"t\n\x14RenderDiagramRequest\x12\x18\n\x07\x64iagram\x18\x01 \x01(\tR\x07\x64iagram\x12\x42\n\x0erender_options\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.RenderOptionsR\rrenderOptions\"\x9b\x01\n\rRenderOptions\x12\x16\n\x06layout\x18\x01 \x01(\tR\x06layout\x12 \n\x08theme_id\x18\x02 \x01(\x03\x42\x05\xaa\x01\x02\x08\x01R\x07themeId\x12\x18\n\x07sketchy\x18\x03 \x01(\x08R\x07sketchy\x12\x18\n\x07padding\x18\x04 \x01(\x03R\x07padding\x12\x1c\n\tdirection\x18\x05 \x01(\tR\tdirection\"\x1b\n\x07\x44iagram\x12\x10\n\x03svg\x18\x01 \x01(\x0cR\x03svg2U\n\x0e\x44iagramService\x12\x43\n\x06Render\x12\".workflows.v1.RenderDiagramRequest\x1a\x15.workflows.v1.DiagramBv\n\x10\x63om.workflows.v1B\x0c\x44iagramProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.diagram_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\014DiagramProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\014DiagramProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' + _globals['_RENDEROPTIONS'].fields_by_name['theme_id']._loaded_options = None + _globals['_RENDEROPTIONS'].fields_by_name['theme_id']._serialized_options = b'\252\001\002\010\001' _globals['_RENDERDIAGRAMREQUEST']._serialized_start=44 _globals['_RENDERDIAGRAMREQUEST']._serialized_end=160 _globals['_RENDEROPTIONS']._serialized_start=163 - _globals['_RENDEROPTIONS']._serialized_end=329 - _globals['_DIAGRAM']._serialized_start=331 - _globals['_DIAGRAM']._serialized_end=358 - _globals['_DIAGRAMSERVICE']._serialized_start=360 - _globals['_DIAGRAMSERVICE']._serialized_end=445 + _globals['_RENDEROPTIONS']._serialized_end=318 + _globals['_DIAGRAM']._serialized_start=320 + _globals['_DIAGRAM']._serialized_end=347 + _globals['_DIAGRAMSERVICE']._serialized_start=349 + _globals['_DIAGRAMSERVICE']._serialized_end=434 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.py index 8ef9a56..efc51c9 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.py @@ -22,46 +22,54 @@ _sym_db = _symbol_database.Default() +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as tilebox_dot_v1_dot_query__pb2 from tilebox.workflows.workflows.v1 import core_pb2 as workflows_dot_v1_dot_core__pb2 from tilebox.workflows.workflows.v1 import diagram_pb2 as workflows_dot_v1_dot_diagram__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16workflows/v1/job.proto\x12\x0cworkflows.v1\x1a\x17workflows/v1/core.proto\x1a\x1aworkflows/v1/diagram.proto\"\xbd\x01\n\x10SubmitJobRequest\x12\x32\n\x05tasks\x18\x01 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x05tasks\x12\x19\n\x08job_name\x18\x02 \x01(\tR\x07jobName\x12!\n\x0ctrace_parent\x18\x03 \x01(\tR\x0btraceParent\x12\x37\n\rautomation_id\x18\x04 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x0c\x61utomationId\":\n\rGetJobRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\"<\n\x0fRetryJobRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\"F\n\x10RetryJobResponse\x12\x32\n\x15num_tasks_rescheduled\x18\x01 \x01(\x03R\x13numTasksRescheduled\"=\n\x10\x43\x61ncelJobRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\"\x13\n\x11\x43\x61ncelJobResponse\"\xe8\x01\n\x13VisualizeJobRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\x12\x42\n\x0erender_options\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.RenderOptionsR\rrenderOptions\x12\x38\n\x05theme\x18\x03 \x01(\x0e\x32\".workflows.v1.WorkflowDiagramThemeR\x05theme\x12(\n\x10include_job_name\x18\x04 \x01(\x08R\x0eincludeJobName\"\xf1\x01\n\x0cQueryFilters\x12\x41\n\rtime_interval\x18\x01 \x01(\x0b\x32\x1a.workflows.v1.TimeIntervalH\x00R\x0ctimeInterval\x12;\n\x0bid_interval\x18\x02 \x01(\x0b\x32\x18.workflows.v1.IDIntervalH\x00R\nidInterval\x12<\n\rautomation_id\x18\x03 \x01(\x0b\x32\x12.workflows.v1.UUIDH\x01R\x0c\x61utomationId\x88\x01\x01\x42\x11\n\x0ftemporal_extentB\x10\n\x0e_automation_id\"\x84\x01\n\x10QueryJobsRequest\x12\x34\n\x07\x66ilters\x18\x01 \x01(\x0b\x32\x1a.workflows.v1.QueryFiltersR\x07\x66ilters\x12\x31\n\x04page\x18\x02 \x01(\x0b\x32\x18.workflows.v1.PaginationH\x00R\x04page\x88\x01\x01\x42\x07\n\x05_page\"\x84\x01\n\x11QueryJobsResponse\x12%\n\x04jobs\x18\x01 \x03(\x0b\x32\x11.workflows.v1.JobR\x04jobs\x12:\n\tnext_page\x18\x03 \x01(\x0b\x32\x18.workflows.v1.PaginationH\x00R\x08nextPage\x88\x01\x01\x42\x0c\n\n_next_page\"C\n\x16GetJobPrototypeRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\"q\n\x17GetJobPrototypeResponse\x12;\n\nroot_tasks\x18\x01 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\trootTasks\x12\x19\n\x08job_name\x18\x02 \x01(\tR\x07jobName\"\xa7\x01\n\x0f\x43loneJobRequest\x12)\n\x06job_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x05jobId\x12N\n\x14root_tasks_overrides\x18\x02 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x12rootTasksOverrides\x12\x19\n\x08job_name\x18\x03 \x01(\tR\x07jobName*\xd4\x01\n\x14WorkflowDiagramTheme\x12&\n\"WORKFLOW_DIAGRAM_THEME_UNSPECIFIED\x10\x00\x12 \n\x1cWORKFLOW_DIAGRAM_THEME_LIGHT\x10\x01\x12\x1f\n\x1bWORKFLOW_DIAGRAM_THEME_DARK\x10\x02\x12(\n$WORKFLOW_DIAGRAM_THEME_CONSOLE_LIGHT\x10\x03\x12\'\n#WORKFLOW_DIAGRAM_THEME_CONSOLE_DARK\x10\x04\x32\xd5\x04\n\nJobService\x12>\n\tSubmitJob\x12\x1e.workflows.v1.SubmitJobRequest\x1a\x11.workflows.v1.Job\x12\x38\n\x06GetJob\x12\x1b.workflows.v1.GetJobRequest\x1a\x11.workflows.v1.Job\x12I\n\x08RetryJob\x12\x1d.workflows.v1.RetryJobRequest\x1a\x1e.workflows.v1.RetryJobResponse\x12L\n\tCancelJob\x12\x1e.workflows.v1.CancelJobRequest\x1a\x1f.workflows.v1.CancelJobResponse\x12H\n\x0cVisualizeJob\x12!.workflows.v1.VisualizeJobRequest\x1a\x15.workflows.v1.Diagram\x12L\n\tQueryJobs\x12\x1e.workflows.v1.QueryJobsRequest\x1a\x1f.workflows.v1.QueryJobsResponse\x12^\n\x0fGetJobPrototype\x12$.workflows.v1.GetJobPrototypeRequest\x1a%.workflows.v1.GetJobPrototypeResponse\x12<\n\x08\x43loneJob\x12\x1d.workflows.v1.CloneJobRequest\x1a\x11.workflows.v1.JobBm\n\x10\x63om.workflows.v1B\x08JobProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x16workflows/v1/job.proto\x12\x0cworkflows.v1\x1a\x13tilebox/v1/id.proto\x1a\x16tilebox/v1/query.proto\x1a\x17workflows/v1/core.proto\x1a\x1aworkflows/v1/diagram.proto\"\xb9\x01\n\x10SubmitJobRequest\x12\x32\n\x05tasks\x18\x01 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x05tasks\x12\x19\n\x08job_name\x18\x02 \x01(\tR\x07jobName\x12!\n\x0ctrace_parent\x18\x03 \x01(\tR\x0btraceParent\x12\x33\n\rautomation_id\x18\x04 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x0c\x61utomationId\"6\n\rGetJobRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\"8\n\x0fRetryJobRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\"F\n\x10RetryJobResponse\x12\x32\n\x15num_tasks_rescheduled\x18\x01 \x01(\x03R\x13numTasksRescheduled\"9\n\x10\x43\x61ncelJobRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\"\x13\n\x11\x43\x61ncelJobResponse\"\xe4\x01\n\x13VisualizeJobRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\x12\x42\n\x0erender_options\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.RenderOptionsR\rrenderOptions\x12\x38\n\x05theme\x18\x03 \x01(\x0e\x32\".workflows.v1.WorkflowDiagramThemeR\x05theme\x12(\n\x10include_job_name\x18\x04 \x01(\x08R\x0eincludeJobName\"\xd9\x01\n\x0cQueryFilters\x12?\n\rtime_interval\x18\x01 \x01(\x0b\x32\x18.tilebox.v1.TimeIntervalH\x00R\x0ctimeInterval\x12\x39\n\x0bid_interval\x18\x02 \x01(\x0b\x32\x16.tilebox.v1.IDIntervalH\x00R\nidInterval\x12:\n\rautomation_id\x18\x03 \x01(\x0b\x32\x0e.tilebox.v1.IDB\x05\xaa\x01\x02\x08\x01R\x0c\x61utomationIdB\x11\n\x0ftemporal_extent\"{\n\x10QueryJobsRequest\x12\x34\n\x07\x66ilters\x18\x01 \x01(\x0b\x32\x1a.workflows.v1.QueryFiltersR\x07\x66ilters\x12\x31\n\x04page\x18\x02 \x01(\x0b\x32\x16.tilebox.v1.PaginationB\x05\xaa\x01\x02\x08\x01R\x04page\"v\n\x11QueryJobsResponse\x12%\n\x04jobs\x18\x01 \x03(\x0b\x32\x11.workflows.v1.JobR\x04jobs\x12:\n\tnext_page\x18\x03 \x01(\x0b\x32\x16.tilebox.v1.PaginationB\x05\xaa\x01\x02\x08\x01R\x08nextPage\"?\n\x16GetJobPrototypeRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\"q\n\x17GetJobPrototypeResponse\x12;\n\nroot_tasks\x18\x01 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\trootTasks\x12\x19\n\x08job_name\x18\x02 \x01(\tR\x07jobName\"\xa3\x01\n\x0f\x43loneJobRequest\x12%\n\x06job_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x05jobId\x12N\n\x14root_tasks_overrides\x18\x02 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x12rootTasksOverrides\x12\x19\n\x08job_name\x18\x03 \x01(\tR\x07jobName*\xd4\x01\n\x14WorkflowDiagramTheme\x12&\n\"WORKFLOW_DIAGRAM_THEME_UNSPECIFIED\x10\x00\x12 \n\x1cWORKFLOW_DIAGRAM_THEME_LIGHT\x10\x01\x12\x1f\n\x1bWORKFLOW_DIAGRAM_THEME_DARK\x10\x02\x12(\n$WORKFLOW_DIAGRAM_THEME_CONSOLE_LIGHT\x10\x03\x12\'\n#WORKFLOW_DIAGRAM_THEME_CONSOLE_DARK\x10\x04\x32\xd5\x04\n\nJobService\x12>\n\tSubmitJob\x12\x1e.workflows.v1.SubmitJobRequest\x1a\x11.workflows.v1.Job\x12\x38\n\x06GetJob\x12\x1b.workflows.v1.GetJobRequest\x1a\x11.workflows.v1.Job\x12I\n\x08RetryJob\x12\x1d.workflows.v1.RetryJobRequest\x1a\x1e.workflows.v1.RetryJobResponse\x12L\n\tCancelJob\x12\x1e.workflows.v1.CancelJobRequest\x1a\x1f.workflows.v1.CancelJobResponse\x12H\n\x0cVisualizeJob\x12!.workflows.v1.VisualizeJobRequest\x1a\x15.workflows.v1.Diagram\x12L\n\tQueryJobs\x12\x1e.workflows.v1.QueryJobsRequest\x1a\x1f.workflows.v1.QueryJobsResponse\x12^\n\x0fGetJobPrototype\x12$.workflows.v1.GetJobPrototypeRequest\x1a%.workflows.v1.GetJobPrototypeResponse\x12<\n\x08\x43loneJob\x12\x1d.workflows.v1.CloneJobRequest\x1a\x11.workflows.v1.JobBr\n\x10\x63om.workflows.v1B\x08JobProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.job_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\010JobProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' - _globals['_WORKFLOWDIAGRAMTHEME']._serialized_start=1667 - _globals['_WORKFLOWDIAGRAMTHEME']._serialized_end=1879 - _globals['_SUBMITJOBREQUEST']._serialized_start=94 - _globals['_SUBMITJOBREQUEST']._serialized_end=283 - _globals['_GETJOBREQUEST']._serialized_start=285 - _globals['_GETJOBREQUEST']._serialized_end=343 - _globals['_RETRYJOBREQUEST']._serialized_start=345 - _globals['_RETRYJOBREQUEST']._serialized_end=405 - _globals['_RETRYJOBRESPONSE']._serialized_start=407 - _globals['_RETRYJOBRESPONSE']._serialized_end=477 - _globals['_CANCELJOBREQUEST']._serialized_start=479 - _globals['_CANCELJOBREQUEST']._serialized_end=540 - _globals['_CANCELJOBRESPONSE']._serialized_start=542 - _globals['_CANCELJOBRESPONSE']._serialized_end=561 - _globals['_VISUALIZEJOBREQUEST']._serialized_start=564 - _globals['_VISUALIZEJOBREQUEST']._serialized_end=796 - _globals['_QUERYFILTERS']._serialized_start=799 - _globals['_QUERYFILTERS']._serialized_end=1040 + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\010JobProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' + _globals['_QUERYFILTERS'].fields_by_name['automation_id']._loaded_options = None + _globals['_QUERYFILTERS'].fields_by_name['automation_id']._serialized_options = b'\252\001\002\010\001' + _globals['_QUERYJOBSREQUEST'].fields_by_name['page']._loaded_options = None + _globals['_QUERYJOBSREQUEST'].fields_by_name['page']._serialized_options = b'\252\001\002\010\001' + _globals['_QUERYJOBSRESPONSE'].fields_by_name['next_page']._loaded_options = None + _globals['_QUERYJOBSRESPONSE'].fields_by_name['next_page']._serialized_options = b'\252\001\002\010\001' + _globals['_WORKFLOWDIAGRAMTHEME']._serialized_start=1635 + _globals['_WORKFLOWDIAGRAMTHEME']._serialized_end=1847 + _globals['_SUBMITJOBREQUEST']._serialized_start=139 + _globals['_SUBMITJOBREQUEST']._serialized_end=324 + _globals['_GETJOBREQUEST']._serialized_start=326 + _globals['_GETJOBREQUEST']._serialized_end=380 + _globals['_RETRYJOBREQUEST']._serialized_start=382 + _globals['_RETRYJOBREQUEST']._serialized_end=438 + _globals['_RETRYJOBRESPONSE']._serialized_start=440 + _globals['_RETRYJOBRESPONSE']._serialized_end=510 + _globals['_CANCELJOBREQUEST']._serialized_start=512 + _globals['_CANCELJOBREQUEST']._serialized_end=569 + _globals['_CANCELJOBRESPONSE']._serialized_start=571 + _globals['_CANCELJOBRESPONSE']._serialized_end=590 + _globals['_VISUALIZEJOBREQUEST']._serialized_start=593 + _globals['_VISUALIZEJOBREQUEST']._serialized_end=821 + _globals['_QUERYFILTERS']._serialized_start=824 + _globals['_QUERYFILTERS']._serialized_end=1041 _globals['_QUERYJOBSREQUEST']._serialized_start=1043 - _globals['_QUERYJOBSREQUEST']._serialized_end=1175 - _globals['_QUERYJOBSRESPONSE']._serialized_start=1178 - _globals['_QUERYJOBSRESPONSE']._serialized_end=1310 - _globals['_GETJOBPROTOTYPEREQUEST']._serialized_start=1312 - _globals['_GETJOBPROTOTYPEREQUEST']._serialized_end=1379 - _globals['_GETJOBPROTOTYPERESPONSE']._serialized_start=1381 - _globals['_GETJOBPROTOTYPERESPONSE']._serialized_end=1494 - _globals['_CLONEJOBREQUEST']._serialized_start=1497 - _globals['_CLONEJOBREQUEST']._serialized_end=1664 - _globals['_JOBSERVICE']._serialized_start=1882 - _globals['_JOBSERVICE']._serialized_end=2479 + _globals['_QUERYJOBSREQUEST']._serialized_end=1166 + _globals['_QUERYJOBSRESPONSE']._serialized_start=1168 + _globals['_QUERYJOBSRESPONSE']._serialized_end=1286 + _globals['_GETJOBPROTOTYPEREQUEST']._serialized_start=1288 + _globals['_GETJOBPROTOTYPEREQUEST']._serialized_end=1351 + _globals['_GETJOBPROTOTYPERESPONSE']._serialized_start=1353 + _globals['_GETJOBPROTOTYPERESPONSE']._serialized_end=1466 + _globals['_CLONEJOBREQUEST']._serialized_start=1469 + _globals['_CLONEJOBREQUEST']._serialized_end=1632 + _globals['_JOBSERVICE']._serialized_start=1850 + _globals['_JOBSERVICE']._serialized_end=2447 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.pyi b/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.pyi index 550a9b3..9df4eb4 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.pyi +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/job_pb2.pyi @@ -1,3 +1,5 @@ +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 +from tilebox.datasets.tilebox.v1 import query_pb2 as _query_pb2 from tilebox.workflows.workflows.v1 import core_pb2 as _core_pb2 from tilebox.workflows.workflows.v1 import diagram_pb2 as _diagram_pb2 from google.protobuf.internal import containers as _containers @@ -30,20 +32,20 @@ class SubmitJobRequest(_message.Message): tasks: _containers.RepeatedCompositeFieldContainer[_core_pb2.TaskSubmission] job_name: str trace_parent: str - automation_id: _core_pb2.UUID - def __init__(self, tasks: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ..., job_name: _Optional[str] = ..., trace_parent: _Optional[str] = ..., automation_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + automation_id: _id_pb2.ID + def __init__(self, tasks: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ..., job_name: _Optional[str] = ..., trace_parent: _Optional[str] = ..., automation_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class GetJobRequest(_message.Message): __slots__ = ("job_id",) JOB_ID_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + job_id: _id_pb2.ID + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class RetryJobRequest(_message.Message): __slots__ = ("job_id",) JOB_ID_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + job_id: _id_pb2.ID + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class RetryJobResponse(_message.Message): __slots__ = ("num_tasks_rescheduled",) @@ -54,8 +56,8 @@ class RetryJobResponse(_message.Message): class CancelJobRequest(_message.Message): __slots__ = ("job_id",) JOB_ID_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + job_id: _id_pb2.ID + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class CancelJobResponse(_message.Message): __slots__ = () @@ -67,43 +69,43 @@ class VisualizeJobRequest(_message.Message): RENDER_OPTIONS_FIELD_NUMBER: _ClassVar[int] THEME_FIELD_NUMBER: _ClassVar[int] INCLUDE_JOB_NAME_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID + job_id: _id_pb2.ID render_options: _diagram_pb2.RenderOptions theme: WorkflowDiagramTheme include_job_name: bool - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., render_options: _Optional[_Union[_diagram_pb2.RenderOptions, _Mapping]] = ..., theme: _Optional[_Union[WorkflowDiagramTheme, str]] = ..., include_job_name: bool = ...) -> None: ... + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., render_options: _Optional[_Union[_diagram_pb2.RenderOptions, _Mapping]] = ..., theme: _Optional[_Union[WorkflowDiagramTheme, str]] = ..., include_job_name: bool = ...) -> None: ... class QueryFilters(_message.Message): __slots__ = ("time_interval", "id_interval", "automation_id") TIME_INTERVAL_FIELD_NUMBER: _ClassVar[int] ID_INTERVAL_FIELD_NUMBER: _ClassVar[int] AUTOMATION_ID_FIELD_NUMBER: _ClassVar[int] - time_interval: _core_pb2.TimeInterval - id_interval: _core_pb2.IDInterval - automation_id: _core_pb2.UUID - def __init__(self, time_interval: _Optional[_Union[_core_pb2.TimeInterval, _Mapping]] = ..., id_interval: _Optional[_Union[_core_pb2.IDInterval, _Mapping]] = ..., automation_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + time_interval: _query_pb2.TimeInterval + id_interval: _query_pb2.IDInterval + automation_id: _id_pb2.ID + def __init__(self, time_interval: _Optional[_Union[_query_pb2.TimeInterval, _Mapping]] = ..., id_interval: _Optional[_Union[_query_pb2.IDInterval, _Mapping]] = ..., automation_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class QueryJobsRequest(_message.Message): __slots__ = ("filters", "page") FILTERS_FIELD_NUMBER: _ClassVar[int] PAGE_FIELD_NUMBER: _ClassVar[int] filters: QueryFilters - page: _core_pb2.Pagination - def __init__(self, filters: _Optional[_Union[QueryFilters, _Mapping]] = ..., page: _Optional[_Union[_core_pb2.Pagination, _Mapping]] = ...) -> None: ... + page: _query_pb2.Pagination + def __init__(self, filters: _Optional[_Union[QueryFilters, _Mapping]] = ..., page: _Optional[_Union[_query_pb2.Pagination, _Mapping]] = ...) -> None: ... class QueryJobsResponse(_message.Message): __slots__ = ("jobs", "next_page") JOBS_FIELD_NUMBER: _ClassVar[int] NEXT_PAGE_FIELD_NUMBER: _ClassVar[int] jobs: _containers.RepeatedCompositeFieldContainer[_core_pb2.Job] - next_page: _core_pb2.Pagination - def __init__(self, jobs: _Optional[_Iterable[_Union[_core_pb2.Job, _Mapping]]] = ..., next_page: _Optional[_Union[_core_pb2.Pagination, _Mapping]] = ...) -> None: ... + next_page: _query_pb2.Pagination + def __init__(self, jobs: _Optional[_Iterable[_Union[_core_pb2.Job, _Mapping]]] = ..., next_page: _Optional[_Union[_query_pb2.Pagination, _Mapping]] = ...) -> None: ... class GetJobPrototypeRequest(_message.Message): __slots__ = ("job_id",) JOB_ID_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ...) -> None: ... + job_id: _id_pb2.ID + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ...) -> None: ... class GetJobPrototypeResponse(_message.Message): __slots__ = ("root_tasks", "job_name") @@ -118,7 +120,7 @@ class CloneJobRequest(_message.Message): JOB_ID_FIELD_NUMBER: _ClassVar[int] ROOT_TASKS_OVERRIDES_FIELD_NUMBER: _ClassVar[int] JOB_NAME_FIELD_NUMBER: _ClassVar[int] - job_id: _core_pb2.UUID + job_id: _id_pb2.ID root_tasks_overrides: _containers.RepeatedCompositeFieldContainer[_core_pb2.TaskSubmission] job_name: str - def __init__(self, job_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., root_tasks_overrides: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ..., job_name: _Optional[str] = ...) -> None: ... + def __init__(self, job_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., root_tasks_overrides: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ..., job_name: _Optional[str] = ...) -> None: ... diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.py index e457f07..da01929 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.py @@ -23,31 +23,38 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as tilebox_dot_v1_dot_id__pb2 from tilebox.workflows.workflows.v1 import core_pb2 as workflows_dot_v1_dot_core__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17workflows/v1/task.proto\x12\x0cworkflows.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x17workflows/v1/core.proto\"\xc9\x01\n\x0fNextTaskRequest\x12\x44\n\rcomputed_task\x18\x01 \x01(\x0b\x32\x1a.workflows.v1.ComputedTaskH\x00R\x0c\x63omputedTask\x88\x01\x01\x12I\n\x10next_task_to_run\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.NextTaskToRunH\x01R\rnextTaskToRun\x88\x01\x01\x42\x10\n\x0e_computed_taskB\x13\n\x11_next_task_to_run\"r\n\rNextTaskToRun\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\x12>\n\x0bidentifiers\x18\x02 \x03(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\x0bidentifiers\"\x87\x01\n\x0c\x43omputedTask\x12\"\n\x02id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x02id\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12\x39\n\tsub_tasks\x18\x03 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x08subTasks\"C\n\x10NextTaskResponse\x12/\n\tnext_task\x18\x01 \x01(\x0b\x32\x12.workflows.v1.TaskR\x08nextTask\"y\n\x11TaskFailedRequest\x12+\n\x07task_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x06taskId\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12\x1d\n\ncancel_job\x18\x03 \x01(\x08R\tcancelJob\"B\n\x11TaskStateResponse\x12-\n\x05state\x18\x01 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\"\x9c\x01\n\x10TaskLeaseRequest\x12+\n\x07task_id\x18\x01 \x01(\x0b\x32\x12.workflows.v1.UUIDR\x06taskId\x12G\n\x0frequested_lease\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00R\x0erequestedLease\x88\x01\x01\x42\x12\n\x10_requested_lease2\xf4\x01\n\x0bTaskService\x12I\n\x08NextTask\x12\x1d.workflows.v1.NextTaskRequest\x1a\x1e.workflows.v1.NextTaskResponse\x12N\n\nTaskFailed\x12\x1f.workflows.v1.TaskFailedRequest\x1a\x1f.workflows.v1.TaskStateResponse\x12J\n\x0f\x45xtendTaskLease\x12\x1e.workflows.v1.TaskLeaseRequest\x1a\x17.workflows.v1.TaskLeaseBn\n\x10\x63om.workflows.v1B\tTaskProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17workflows/v1/task.proto\x12\x0cworkflows.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x13tilebox/v1/id.proto\x1a\x17workflows/v1/core.proto\"\xa6\x01\n\x0fNextTaskRequest\x12\x46\n\rcomputed_task\x18\x01 \x01(\x0b\x32\x1a.workflows.v1.ComputedTaskB\x05\xaa\x01\x02\x08\x01R\x0c\x63omputedTask\x12K\n\x10next_task_to_run\x18\x02 \x01(\x0b\x32\x1b.workflows.v1.NextTaskToRunB\x05\xaa\x01\x02\x08\x01R\rnextTaskToRun\"r\n\rNextTaskToRun\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\x12>\n\x0bidentifiers\x18\x02 \x03(\x0b\x32\x1c.workflows.v1.TaskIdentifierR\x0bidentifiers\"\x83\x01\n\x0c\x43omputedTask\x12\x1e\n\x02id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x02id\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12\x39\n\tsub_tasks\x18\x03 \x03(\x0b\x32\x1c.workflows.v1.TaskSubmissionR\x08subTasks\"C\n\x10NextTaskResponse\x12/\n\tnext_task\x18\x01 \x01(\x0b\x32\x12.workflows.v1.TaskR\x08nextTask\"u\n\x11TaskFailedRequest\x12\'\n\x07task_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x06taskId\x12\x18\n\x07\x64isplay\x18\x02 \x01(\tR\x07\x64isplay\x12\x1d\n\ncancel_job\x18\x03 \x01(\x08R\tcancelJob\"B\n\x11TaskStateResponse\x12-\n\x05state\x18\x01 \x01(\x0e\x32\x17.workflows.v1.TaskStateR\x05state\"\x86\x01\n\x10TaskLeaseRequest\x12\'\n\x07task_id\x18\x01 \x01(\x0b\x32\x0e.tilebox.v1.IDR\x06taskId\x12I\n\x0frequested_lease\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x05\xaa\x01\x02\x08\x01R\x0erequestedLease2\xf4\x01\n\x0bTaskService\x12I\n\x08NextTask\x12\x1d.workflows.v1.NextTaskRequest\x1a\x1e.workflows.v1.NextTaskResponse\x12N\n\nTaskFailed\x12\x1f.workflows.v1.TaskFailedRequest\x1a\x1f.workflows.v1.TaskStateResponse\x12J\n\x0f\x45xtendTaskLease\x12\x1e.workflows.v1.TaskLeaseRequest\x1a\x17.workflows.v1.TaskLeaseBs\n\x10\x63om.workflows.v1B\tTaskProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.task_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\tTaskProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' - _globals['_NEXTTASKREQUEST']._serialized_start=99 - _globals['_NEXTTASKREQUEST']._serialized_end=300 - _globals['_NEXTTASKTORUN']._serialized_start=302 - _globals['_NEXTTASKTORUN']._serialized_end=416 - _globals['_COMPUTEDTASK']._serialized_start=419 - _globals['_COMPUTEDTASK']._serialized_end=554 - _globals['_NEXTTASKRESPONSE']._serialized_start=556 - _globals['_NEXTTASKRESPONSE']._serialized_end=623 - _globals['_TASKFAILEDREQUEST']._serialized_start=625 - _globals['_TASKFAILEDREQUEST']._serialized_end=746 - _globals['_TASKSTATERESPONSE']._serialized_start=748 - _globals['_TASKSTATERESPONSE']._serialized_end=814 - _globals['_TASKLEASEREQUEST']._serialized_start=817 - _globals['_TASKLEASEREQUEST']._serialized_end=973 - _globals['_TASKSERVICE']._serialized_start=976 - _globals['_TASKSERVICE']._serialized_end=1220 + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\tTaskProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' + _globals['_NEXTTASKREQUEST'].fields_by_name['computed_task']._loaded_options = None + _globals['_NEXTTASKREQUEST'].fields_by_name['computed_task']._serialized_options = b'\252\001\002\010\001' + _globals['_NEXTTASKREQUEST'].fields_by_name['next_task_to_run']._loaded_options = None + _globals['_NEXTTASKREQUEST'].fields_by_name['next_task_to_run']._serialized_options = b'\252\001\002\010\001' + _globals['_TASKLEASEREQUEST'].fields_by_name['requested_lease']._loaded_options = None + _globals['_TASKLEASEREQUEST'].fields_by_name['requested_lease']._serialized_options = b'\252\001\002\010\001' + _globals['_NEXTTASKREQUEST']._serialized_start=120 + _globals['_NEXTTASKREQUEST']._serialized_end=286 + _globals['_NEXTTASKTORUN']._serialized_start=288 + _globals['_NEXTTASKTORUN']._serialized_end=402 + _globals['_COMPUTEDTASK']._serialized_start=405 + _globals['_COMPUTEDTASK']._serialized_end=536 + _globals['_NEXTTASKRESPONSE']._serialized_start=538 + _globals['_NEXTTASKRESPONSE']._serialized_end=605 + _globals['_TASKFAILEDREQUEST']._serialized_start=607 + _globals['_TASKFAILEDREQUEST']._serialized_end=724 + _globals['_TASKSTATERESPONSE']._serialized_start=726 + _globals['_TASKSTATERESPONSE']._serialized_end=792 + _globals['_TASKLEASEREQUEST']._serialized_start=795 + _globals['_TASKLEASEREQUEST']._serialized_end=929 + _globals['_TASKSERVICE']._serialized_start=932 + _globals['_TASKSERVICE']._serialized_end=1176 # @@protoc_insertion_point(module_scope) diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.pyi b/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.pyi index 6c0ebec..041d641 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.pyi +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/task_pb2.pyi @@ -1,4 +1,5 @@ from google.protobuf import duration_pb2 as _duration_pb2 +from tilebox.datasets.tilebox.v1 import id_pb2 as _id_pb2 from tilebox.workflows.workflows.v1 import core_pb2 as _core_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor @@ -28,10 +29,10 @@ class ComputedTask(_message.Message): ID_FIELD_NUMBER: _ClassVar[int] DISPLAY_FIELD_NUMBER: _ClassVar[int] SUB_TASKS_FIELD_NUMBER: _ClassVar[int] - id: _core_pb2.UUID + id: _id_pb2.ID display: str sub_tasks: _containers.RepeatedCompositeFieldContainer[_core_pb2.TaskSubmission] - def __init__(self, id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., display: _Optional[str] = ..., sub_tasks: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ...) -> None: ... + def __init__(self, id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., display: _Optional[str] = ..., sub_tasks: _Optional[_Iterable[_Union[_core_pb2.TaskSubmission, _Mapping]]] = ...) -> None: ... class NextTaskResponse(_message.Message): __slots__ = ("next_task",) @@ -44,10 +45,10 @@ class TaskFailedRequest(_message.Message): TASK_ID_FIELD_NUMBER: _ClassVar[int] DISPLAY_FIELD_NUMBER: _ClassVar[int] CANCEL_JOB_FIELD_NUMBER: _ClassVar[int] - task_id: _core_pb2.UUID + task_id: _id_pb2.ID display: str cancel_job: bool - def __init__(self, task_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., display: _Optional[str] = ..., cancel_job: bool = ...) -> None: ... + def __init__(self, task_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., display: _Optional[str] = ..., cancel_job: bool = ...) -> None: ... class TaskStateResponse(_message.Message): __slots__ = ("state",) @@ -59,6 +60,6 @@ class TaskLeaseRequest(_message.Message): __slots__ = ("task_id", "requested_lease") TASK_ID_FIELD_NUMBER: _ClassVar[int] REQUESTED_LEASE_FIELD_NUMBER: _ClassVar[int] - task_id: _core_pb2.UUID + task_id: _id_pb2.ID requested_lease: _duration_pb2.Duration - def __init__(self, task_id: _Optional[_Union[_core_pb2.UUID, _Mapping]] = ..., requested_lease: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... + def __init__(self, task_id: _Optional[_Union[_id_pb2.ID, _Mapping]] = ..., requested_lease: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... diff --git a/tilebox-workflows/tilebox/workflows/workflows/v1/workflows_pb2.py b/tilebox-workflows/tilebox/workflows/workflows/v1/workflows_pb2.py index 2ef21bc..5fc3e29 100644 --- a/tilebox-workflows/tilebox/workflows/workflows/v1/workflows_pb2.py +++ b/tilebox-workflows/tilebox/workflows/workflows/v1/workflows_pb2.py @@ -25,14 +25,14 @@ from tilebox.workflows.workflows.v1 import core_pb2 as workflows_dot_v1_dot_core__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cworkflows/v1/workflows.proto\x12\x0cworkflows.v1\x1a\x17workflows/v1/core.proto\"*\n\x14\x43reateClusterRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"6\n\x11GetClusterRequest\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\"9\n\x14\x44\x65leteClusterRequest\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\"\x17\n\x15\x44\x65leteClusterResponse\"\x15\n\x13ListClustersRequest\"I\n\x14ListClustersResponse\x12\x31\n\x08\x63lusters\x18\x01 \x03(\x0b\x32\x15.workflows.v1.ClusterR\x08\x63lusters2\xd5\x02\n\x10WorkflowsService\x12J\n\rCreateCluster\x12\".workflows.v1.CreateClusterRequest\x1a\x15.workflows.v1.Cluster\x12\x44\n\nGetCluster\x12\x1f.workflows.v1.GetClusterRequest\x1a\x15.workflows.v1.Cluster\x12X\n\rDeleteCluster\x12\".workflows.v1.DeleteClusterRequest\x1a#.workflows.v1.DeleteClusterResponse\x12U\n\x0cListClusters\x12!.workflows.v1.ListClustersRequest\x1a\".workflows.v1.ListClustersResponseBs\n\x10\x63om.workflows.v1B\x0eWorkflowsProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cworkflows/v1/workflows.proto\x12\x0cworkflows.v1\x1a\x17workflows/v1/core.proto\"*\n\x14\x43reateClusterRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"6\n\x11GetClusterRequest\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\"9\n\x14\x44\x65leteClusterRequest\x12!\n\x0c\x63luster_slug\x18\x01 \x01(\tR\x0b\x63lusterSlug\"\x17\n\x15\x44\x65leteClusterResponse\"\x15\n\x13ListClustersRequest\"I\n\x14ListClustersResponse\x12\x31\n\x08\x63lusters\x18\x01 \x03(\x0b\x32\x15.workflows.v1.ClusterR\x08\x63lusters2\xd5\x02\n\x10WorkflowsService\x12J\n\rCreateCluster\x12\".workflows.v1.CreateClusterRequest\x1a\x15.workflows.v1.Cluster\x12\x44\n\nGetCluster\x12\x1f.workflows.v1.GetClusterRequest\x1a\x15.workflows.v1.Cluster\x12X\n\rDeleteCluster\x12\".workflows.v1.DeleteClusterRequest\x1a#.workflows.v1.DeleteClusterResponse\x12U\n\x0cListClusters\x12!.workflows.v1.ListClustersRequest\x1a\".workflows.v1.ListClustersResponseBx\n\x10\x63om.workflows.v1B\x0eWorkflowsProtoP\x01\xa2\x02\x03WXX\xaa\x02\x0cWorkflows.V1\xca\x02\x0cWorkflows\\V1\xe2\x02\x18Workflows\\V1\\GPBMetadata\xea\x02\rWorkflows::V1\x92\x03\x02\x08\x02\x62\x08\x65\x64itionsp\xe8\x07') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'workflows.v1.workflows_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\016WorkflowsProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1' + _globals['DESCRIPTOR']._serialized_options = b'\n\020com.workflows.v1B\016WorkflowsProtoP\001\242\002\003WXX\252\002\014Workflows.V1\312\002\014Workflows\\V1\342\002\030Workflows\\V1\\GPBMetadata\352\002\rWorkflows::V1\222\003\002\010\002' _globals['_CREATECLUSTERREQUEST']._serialized_start=71 _globals['_CREATECLUSTERREQUEST']._serialized_end=113 _globals['_GETCLUSTERREQUEST']._serialized_start=115 diff --git a/tools/generate_protobuf.py b/tools/generate_protobuf.py index 001a14d..6197c33 100755 --- a/tools/generate_protobuf.py +++ b/tools/generate_protobuf.py @@ -27,13 +27,20 @@ def main() -> None: os.system("buf generate --template buf.gen.datasets.yaml") # noqa: S605, S607 os.system("buf generate --template buf.gen.workflows.yaml") # noqa: S605, S607 - for package in ["datasets", "workflows"]: - package_mapping = { - f"from {package}.v1 import": f"from tilebox.{package}.{package}.v1 import", - } - folder = clients_repo / f"tilebox-{package}" / "tilebox" / package / package / "v1" - - for pattern in ["*.py", "*.pyi"]: + package_mapping = { + "from datasets.v1 import": "from tilebox.datasets.datasets.v1 import", + "from tilebox.v1 import": "from tilebox.datasets.tilebox.v1 import", + "from workflows.v1 import": "from tilebox.workflows.workflows.v1 import", + } + + folders = ( + clients_repo / "tilebox-datasets" / "tilebox" / "datasets" / "datasets" / "v1", + clients_repo / "tilebox-datasets" / "tilebox" / "datasets" / "tilebox" / "v1", + clients_repo / "tilebox-workflows" / "tilebox" / "workflows" / "workflows" / "v1", + ) + + for folder in folders: + for pattern in ("*.py", "*.pyi"): for py_file in folder.rglob(pattern): fix_imports(py_file, package_mapping)