Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 8 additions & 10 deletions stubs/transformers-stubs/models/auto/auto_factory.pyi
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
import os
from collections import OrderedDict
from collections.abc import Iterator
from typing import Any
from typing import Any, TypeVar, Union
from typing_extensions import TypeAlias

from transformers.configuration_utils import PretrainedConfig
from transformers.tokenization_utils_fast import PreTrainedTokenizerFast

_LazyAutoMappingValue: TypeAlias = tuple[
# Tokenizers will depend on packages installed, too much variance and there are no common base or Protocol
type[Any | None],
type[PreTrainedTokenizerFast | None],
]
_T = TypeVar("_T")
# Tokenizers will depend on packages installed, too much variance and there are no common base or Protocol
_LazyAutoMappingValue: TypeAlias = tuple[type[Any] | None, type[Any] | None]

CLASS_DOCSTRING: str
FROM_CONFIG_DOCSTRING: str
Expand All @@ -26,7 +24,7 @@ class _BaseAutoModelClass:
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], *model_args, **kwargs): ...
@classmethod
def register(cls, config_class, model_class) -> None: ...
def register(cls, config_class, model_class, exist_ok=False) -> None: ...

def insert_head_doc(docstring, head_doc: str = ""): ...
def auto_class_update(cls, checkpoint_for_example: str = "bert-base-cased", head_doc: str = ""): ...
Expand All @@ -38,10 +36,10 @@ class _LazyAutoMapping(OrderedDict[type[PretrainedConfig], _LazyAutoMappingValue
def __len__(self) -> int: ...
def __getitem__(self, key: type[PretrainedConfig]) -> _LazyAutoMappingValue: ...
def keys(self) -> list[type[PretrainedConfig]]: ...
def get(self, key: type[PretrainedConfig], default: _LazyAutoMappingValue) -> _LazyAutoMappingValue: ...
def get(self, key: type[PretrainedConfig], default: _T) -> _LazyAutoMappingValue | _T: ...
def __bool__(self) -> bool: ...
def values(self) -> list[_LazyAutoMappingValue]: ...
def items(self) -> list[tuple[type[PretrainedConfig], _LazyAutoMappingValue]]: ...
def __iter__(self) -> Iterator[type[PretrainedConfig]]: ...
def __contains__(self, item: object) -> bool: ...
def register(self, key: type[PretrainedConfig], value: _LazyAutoMappingValue) -> None: ...
def __contains__(self, item: type) -> bool: ...
def register(self, key: type[PretrainedConfig], value: _LazyAutoMappingValue, exist_ok=False) -> None: ...
9 changes: 3 additions & 6 deletions stubs/transformers-stubs/models/auto/configuration_auto.pyi
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import os
from collections import OrderedDict
from collections.abc import Callable, Iterator, KeysView, ValuesView
from typing import Any, TypeVar
from typing import Any, NoReturn, TypeVar

from transformers.configuration_utils import PretrainedConfig

_F = TypeVar("_F", bound=Callable[..., Any])

CONFIG_MAPPING_NAMES: OrderedDict[str, str]
CONFIG_ARCHIVE_MAP_MAPPING_NAMES: OrderedDict[str, str]
MODEL_NAMES_MAPPING: OrderedDict[str, str]
SPECIAL_MODEL_TYPE_TO_MODULE_NAME: OrderedDict[str, str]

Expand All @@ -23,7 +22,7 @@ class _LazyConfigMapping(OrderedDict[str, type[PretrainedConfig]]):
def items(self) -> list[tuple[str, type[PretrainedConfig]]]: ...
def __iter__(self) -> Iterator[str]: ...
def __contains__(self, item: object) -> bool: ...
def register(self, key: str, value: type[PretrainedConfig]) -> None: ...
def register(self, key: str, value: type[PretrainedConfig], exist_ok=False) -> None: ...

CONFIG_MAPPING: _LazyConfigMapping

Expand All @@ -36,8 +35,6 @@ class _LazyLoadAllMappings(OrderedDict[str, str]):
def __iter__(self) -> Iterator[str]: ...
def __contains__(self, item: object) -> bool: ...

ALL_PRETRAINED_CONFIG_ARCHIVE_MAP: _LazyLoadAllMappings

def replace_list_option_in_docstrings(config_to_class=None, use_model_types: bool = True) -> Callable[[_F], _F]: ...

class AutoConfig:
Expand All @@ -47,4 +44,4 @@ class AutoConfig:
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], **kwargs): ...
@staticmethod
def register(model_type, config) -> None: ...
def register(model_type, config, exist_ok=False) -> None: ...
16 changes: 7 additions & 9 deletions stubs/transformers-stubs/models/auto/tokenization_auto.pyi
Original file line number Diff line number Diff line change
@@ -1,24 +1,24 @@
import os
from collections import OrderedDict
from typing import Any
from typing_extensions import TypeAlias

from transformers.configuration_utils import PretrainedConfig
from transformers.models.auto.auto_factory import _LazyAutoMapping
from transformers.tokenization_utils import PreTrainedTokenizer
from transformers.tokenization_utils_fast import PreTrainedTokenizerFast

TOKENIZER_MAPPING_NAMES: OrderedDict[str, tuple[str | None, str | None]]
TOKENIZER_MAPPING: _LazyAutoMapping
CONFIG_TO_TYPE: dict[type[PretrainedConfig], str]
CONFIG_TO_TYPE: dict[str, str]

def tokenizer_class_from_name(class_name: str) -> PreTrainedTokenizer | PreTrainedTokenizerFast: ...
def tokenizer_class_from_name(class_name: str) -> type[Any] | None: ...
def get_tokenizer_config(
pretrained_model_name_or_path: str | os.PathLike[str],
cache_dir: str | os.PathLike[str] | None = None,
force_download: bool = False,
resume_download: bool = False,
resume_download: bool | None = None,
proxies: dict[str, str] | None = None,
use_auth_token: bool | str | None = None,
token: bool | str | None = None,
revision: str | None = None,
local_files_only: bool = False,
subfolder: str = "",
Expand All @@ -28,7 +28,5 @@ def get_tokenizer_config(
class AutoTokenizer:
def __init__(self) -> None: ...
@classmethod
def from_pretrained(
cls, pretrained_model_name_or_path: str | os.PathLike[str], *inputs, **kwargs
) -> PreTrainedTokenizer | PreTrainedTokenizerFast: ...
def register(config_class, slow_tokenizer_class=None, fast_tokenizer_class=None) -> None: ...
def from_pretrained(cls, pretrained_model_name_or_path: str | os.PathLike[str], *inputs, **kwargs): ...
def register(config_class, slow_tokenizer_class=None, fast_tokenizer_class=None, exist_ok=False) -> None: ...