|
6 | 6 |
|
7 | 7 | from contextlib import closing |
8 | 8 | from pathlib import Path |
9 | | -from typing import Literal, Optional, TYPE_CHECKING, Union, overload |
10 | | -from collections.abc import Iterable, Mapping, Sequence |
| 9 | +from typing import Literal, Optional, TYPE_CHECKING, TypedDict, TypeVar, Union, overload |
| 10 | +from collections.abc import Iterable, Sequence |
11 | 11 |
|
12 | 12 | from tableauserverclient.helpers.headers import fix_filename |
13 | 13 | from tableauserverclient.models.dqw_item import DQWItem |
|
50 | 50 | FileObject = Union[io.BufferedReader, io.BytesIO] |
51 | 51 | PathOrFile = Union[FilePath, FileObject] |
52 | 52 |
|
53 | | -FilePath = Union[str, os.PathLike] |
54 | 53 | FileObjectR = Union[io.BufferedReader, io.BytesIO] |
55 | 54 | FileObjectW = Union[io.BufferedWriter, io.BytesIO] |
56 | 55 | PathOrFileR = Union[FilePath, FileObjectR] |
57 | 56 | PathOrFileW = Union[FilePath, FileObjectW] |
58 | 57 |
|
59 | 58 |
|
| 59 | +HyperActionCondition = TypedDict( |
| 60 | + "HyperActionCondition", |
| 61 | + { |
| 62 | + "op": str, |
| 63 | + "target-col": str, |
| 64 | + "source-col": str, |
| 65 | + }, |
| 66 | +) |
| 67 | + |
| 68 | +HyperActionRow = TypedDict( |
| 69 | + "HyperActionRow", |
| 70 | + { |
| 71 | + "action": Literal[ |
| 72 | + "update", |
| 73 | + "upsert", |
| 74 | + "delete", |
| 75 | + ], |
| 76 | + "source-table": str, |
| 77 | + "target-table": str, |
| 78 | + "condition": HyperActionCondition, |
| 79 | + }, |
| 80 | +) |
| 81 | + |
| 82 | +HyperActionTable = TypedDict( |
| 83 | + "HyperActionTable", |
| 84 | + { |
| 85 | + "action": Literal[ |
| 86 | + "insert", |
| 87 | + "replace", |
| 88 | + ], |
| 89 | + "source-table": str, |
| 90 | + "target-table": str, |
| 91 | + }, |
| 92 | +) |
| 93 | + |
| 94 | +HyperAction = Union[HyperActionTable, HyperActionRow] |
| 95 | + |
| 96 | + |
60 | 97 | class Datasources(QuerysetEndpoint[DatasourceItem], TaggingMixin[DatasourceItem]): |
61 | 98 | def __init__(self, parent_srv: "Server") -> None: |
62 | 99 | super().__init__(parent_srv) |
@@ -191,16 +228,34 @@ def delete(self, datasource_id: str) -> None: |
191 | 228 | self.delete_request(url) |
192 | 229 | logger.info(f"Deleted single datasource (ID: {datasource_id})") |
193 | 230 |
|
| 231 | + T = TypeVar("T", bound=FileObjectW) |
| 232 | + |
| 233 | + @overload |
| 234 | + def download( |
| 235 | + self, |
| 236 | + datasource_id: str, |
| 237 | + filepath: T, |
| 238 | + include_extract: bool = True, |
| 239 | + ) -> T: ... |
| 240 | + |
| 241 | + @overload |
| 242 | + def download( |
| 243 | + self, |
| 244 | + datasource_id: str, |
| 245 | + filepath: Optional[FilePath] = None, |
| 246 | + include_extract: bool = True, |
| 247 | + ) -> str: ... |
| 248 | + |
194 | 249 | # Download 1 datasource by id |
195 | 250 | @api(version="2.0") |
196 | 251 | @parameter_added_in(no_extract="2.5") |
197 | 252 | @parameter_added_in(include_extract="2.5") |
198 | 253 | def download( |
199 | 254 | self, |
200 | | - datasource_id: str, |
201 | | - filepath: Optional[PathOrFileW] = None, |
202 | | - include_extract: bool = True, |
203 | | - ) -> PathOrFileW: |
| 255 | + datasource_id, |
| 256 | + filepath=None, |
| 257 | + include_extract=True, |
| 258 | + ): |
204 | 259 | """ |
205 | 260 | Downloads the specified data source from a site. The data source is |
206 | 261 | downloaded as a .tdsx file. |
@@ -479,13 +534,13 @@ def publish( |
479 | 534 | @parameter_added_in(as_job="3.0") |
480 | 535 | def publish( |
481 | 536 | self, |
482 | | - datasource_item: DatasourceItem, |
483 | | - file: PathOrFileR, |
484 | | - mode: str, |
485 | | - connection_credentials: Optional[ConnectionCredentials] = None, |
486 | | - connections: Optional[Sequence[ConnectionItem]] = None, |
487 | | - as_job: bool = False, |
488 | | - ) -> Union[DatasourceItem, JobItem]: |
| 537 | + datasource_item, |
| 538 | + file, |
| 539 | + mode, |
| 540 | + connection_credentials=None, |
| 541 | + connections=None, |
| 542 | + as_job=False, |
| 543 | + ): |
489 | 544 | """ |
490 | 545 | Publishes a data source to a server, or appends data to an existing |
491 | 546 | data source. |
@@ -631,7 +686,7 @@ def update_hyper_data( |
631 | 686 | datasource_or_connection_item: Union[DatasourceItem, ConnectionItem, str], |
632 | 687 | *, |
633 | 688 | request_id: str, |
634 | | - actions: Sequence[Mapping], |
| 689 | + actions: Sequence[HyperAction], |
635 | 690 | payload: Optional[FilePath] = None, |
636 | 691 | ) -> JobItem: |
637 | 692 | """ |
@@ -898,15 +953,35 @@ def _get_datasource_revisions( |
898 | 953 | revisions = RevisionItem.from_response(server_response.content, self.parent_srv.namespace, datasource_item) |
899 | 954 | return revisions |
900 | 955 |
|
901 | | - # Download 1 datasource revision by revision number |
902 | | - @api(version="2.3") |
| 956 | + T = TypeVar("T", bound=FileObjectW) |
| 957 | + |
| 958 | + @overload |
| 959 | + def download_revision( |
| 960 | + self, |
| 961 | + datasource_id: str, |
| 962 | + revision_number: Optional[str], |
| 963 | + filepath: T, |
| 964 | + include_extract: bool = True, |
| 965 | + ) -> T: ... |
| 966 | + |
| 967 | + @overload |
903 | 968 | def download_revision( |
904 | 969 | self, |
905 | 970 | datasource_id: str, |
906 | 971 | revision_number: Optional[str], |
907 | | - filepath: Optional[PathOrFileW] = None, |
| 972 | + filepath: Optional[FilePath] = None, |
908 | 973 | include_extract: bool = True, |
909 | | - ) -> PathOrFileW: |
| 974 | + ) -> str: ... |
| 975 | + |
| 976 | + # Download 1 datasource revision by revision number |
| 977 | + @api(version="2.3") |
| 978 | + def download_revision( |
| 979 | + self, |
| 980 | + datasource_id, |
| 981 | + revision_number, |
| 982 | + filepath=None, |
| 983 | + include_extract=True, |
| 984 | + ): |
910 | 985 | """ |
911 | 986 | Downloads a specific version of a data source prior to the current one |
912 | 987 | in .tdsx format. To download the current version of a data source set |
|
0 commit comments