forked from hpcaitech/ColossalAI
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
150 lines (124 loc) · 4.29 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import os
import sys
from typing import List
from setuptools import find_packages, setup
try:
import torch # noqa
from torch.utils.cpp_extension import BuildExtension
TORCH_AVAILABLE = True
except ImportError:
TORCH_AVAILABLE = False
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_EXT = int(os.environ.get("BUILD_EXT", "0")) == 1
# we do not support windows currently
if sys.platform == "win32":
raise RuntimeError("Windows is not supported yet. Please try again within the Windows Subsystem for Linux (WSL).")
def fetch_requirements(path) -> List[str]:
"""
This function reads the requirements file.
Args:
path (str): the path to the requirements file.
Returns:
The lines in the requirements file.
"""
with open(path, "r") as fd:
return [r.strip() for r in fd.readlines()]
def fetch_readme() -> str:
"""
This function reads the README.md file in the current directory.
Returns:
The lines in the README file.
"""
with open("README.md", encoding="utf-8") as f:
return f.read()
def get_version() -> str:
"""
This function reads the version.txt and generates the colossalai/version.py file.
Returns:
The library version stored in version.txt.
"""
setup_file_path = os.path.abspath(__file__)
project_path = os.path.dirname(setup_file_path)
version_txt_path = os.path.join(project_path, "version.txt")
version_py_path = os.path.join(project_path, "colossalai/version.py")
with open(version_txt_path) as f:
version = f.read().strip()
# write version into version.py
with open(version_py_path, "w") as f:
f.write(f"__version__ = '{version}'\n")
return version
if BUILD_EXT:
if not TORCH_AVAILABLE:
raise ModuleNotFoundError(
"[extension] PyTorch is not found while BUILD_EXT=1. You need to install PyTorch first in order to build CUDA extensions"
)
from extensions import ALL_EXTENSIONS
op_names = []
ext_modules = []
for ext_cls in ALL_EXTENSIONS:
ext = ext_cls()
if ext.support_aot and ext.is_available():
ext.assert_compatible()
op_names.append(ext.name)
ext_modules.append(ext.build_aot())
# show log
if len(ext_modules) == 0:
raise RuntimeError("[extension] Could not find any kernel compatible with the current environment.")
else:
op_name_list = ", ".join(op_names)
print(f"[extension] Building extensions{op_name_list}")
else:
ext_modules = []
version = get_version()
package_name = "colossalai"
setup(
name=package_name,
version=version,
packages=find_packages(
exclude=(
"extensions",
"benchmark",
"docker",
"tests",
"docs",
"examples",
"tests",
"scripts",
"requirements",
"*.egg-info",
),
),
description="An integrated large-scale model training system with efficient parallelization techniques",
long_description=fetch_readme(),
long_description_content_type="text/markdown",
license="Apache Software License 2.0",
url="https://www.colossalai.org",
project_urls={
"Forum": "https://github.com/hpcaitech/ColossalAI/discussions",
"Bug Tracker": "https://github.com/hpcaitech/ColossalAI/issues",
"Examples": "https://github.com/hpcaitech/ColossalAI-Examples",
"Documentation": "http://colossalai.readthedocs.io",
"Github": "https://github.com/hpcaitech/ColossalAI",
},
ext_modules=ext_modules,
cmdclass={"build_ext": BuildExtension} if ext_modules else {},
install_requires=fetch_requirements("requirements/requirements.txt"),
entry_points="""
[console_scripts]
colossalai=colossalai.cli:cli
""",
python_requires=">=3.6",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Environment :: GPU :: NVIDIA CUDA",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: System :: Distributed Computing",
],
package_data={
"colossalai": [
"kernel/extensions/csrc/**/*",
"kernel/extensions/pybind/**/*",
]
},
)