-
Notifications
You must be signed in to change notification settings - Fork 635
Expand file tree
/
Copy pathsetup.py
More file actions
101 lines (91 loc) · 3.33 KB
/
setup.py
File metadata and controls
101 lines (91 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import re
from setuptools import find_namespace_packages, setup
# Ensure we match the version set in src/optimum/version.py
try:
filepath = "optimum/version.py"
with open(filepath) as version_file:
(__version__,) = re.findall('__version__ = "(.*)"', version_file.read())
except Exception as error:
assert False, "Error: Could not open '%s' due %s\n" % (filepath, error)
REQUIRED_PKGS = [
"transformers>=4.29",
"torch>=1.11",
"packaging",
"numpy",
"huggingface_hub>=0.8.0",
]
TESTS_REQUIRE = [
"pytest",
"accelerate",
"requests",
"parameterized",
"pytest-xdist",
"Pillow",
"sacremoses",
"torchvision",
"torchaudio",
"einops",
"timm",
"scikit-learn",
"sentencepiece",
"rjieba",
"hf_xet",
]
QUALITY_REQUIRE = ["black~=23.1", "ruff==0.1.5"]
BENCHMARK_REQUIRE = ["optuna", "tqdm", "scikit-learn", "seqeval", "torchvision", "evaluate>=0.2.0"]
EXTRAS_REQUIRE = {
"amd": "optimum-amd",
"furiosa": "optimum-furiosa",
"graphcore": "optimum-graphcore",
"habana": "optimum-habana>=1.17.0",
"intel": "optimum-intel>=1.23.0",
"ipex": "optimum-intel[ipex]>=1.23.0",
"nncf": "optimum-intel[nncf]>=1.23.0",
"neural-compressor": "optimum-intel[neural-compressor]>=1.23.0",
"onnx": "optimum-onnx",
"onnxruntime": "optimum-onnx[onnxruntime]",
"onnxruntime-gpu": "optimum-onnx[onnxruntime-gpu]",
"openvino": "optimum-intel[openvino]>=1.23.0",
"quanto": "optimum-quanto>=0.2.4",
###########################################################################
"dev": TESTS_REQUIRE + QUALITY_REQUIRE,
"tests": TESTS_REQUIRE,
"quality": QUALITY_REQUIRE,
"benchmark": BENCHMARK_REQUIRE,
"doc-build": ["accelerate"],
}
setup(
name="optimum",
version=__version__,
description="Optimum Library is an extension of the Hugging Face Transformers library, providing a framework to "
"integrate third-party libraries from Hardware Partners and interface with their specific "
"functionality.",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
keywords="transformers, quantization, pruning, optimization, training, inference, onnx, onnx runtime, intel, "
"habana, graphcore, neural compressor, ipu, hpu",
url="https://github.com/huggingface/optimum",
author="HuggingFace Inc. Special Ops Team",
author_email="hardware@huggingface.co",
license="Apache",
packages=find_namespace_packages(include=["optimum*"]),
install_requires=REQUIRED_PKGS,
extras_require=EXTRAS_REQUIRE,
python_requires=">=3.9.0",
include_package_data=True,
zip_safe=False,
entry_points={"console_scripts": ["optimum-cli=optimum.commands.optimum_cli:main"]},
)