-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
90 lines (74 loc) · 2.59 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import os
import re
import sys
from setuptools import setup, find_packages
# from optimus.version import __version__
# Get version without importing, which avoids dependency issues
def get_version():
with open('optimus/version.py') as version_file:
return re.search(r"""__version__\s+=\s+(['"])(?P<version>.+?)\1""",
version_file.read()).group('version')
# Requirements
try:
import google.colab
IN_COLAB = True
except ImportError:
IN_COLAB = False
if "DATABRICKS_RUNTIME_VERSION" in os.environ:
with open('requirements-databricks.txt') as f:
required = f.read().splitlines()
elif IN_COLAB:
with open('requirements-google-colab.txt') as f:
required = f.read().splitlines()
else:
with open('requirements.txt') as f:
required = f.read().splitlines()
if sys.version_info < (3, 6):
raise RuntimeError('This version requires Python 3.6+') # pragma: no cover
def readme():
with open('README.md') as f:
return f.read()
lint_requires = [
'pep8',
'pyflakes'
]
tests_require = ['pytest', 'mock', 'nose']
dependency_links = []
setup_requires = ['pytest-runner']
if 'nosetests' in sys.argv[1:]:
setup_requires.append('nose')
setup(
name='optimuspyspark',
version=get_version(),
author='Favio Vazquez and Argenis Leon',
author_email='[email protected]',
url='https://github.com/ironmussa/Optimus/',
download_url='https://github.com/ironmussa/Optimus/archive/2.2.31.tar.gz',
description=('Optimus is the missing framework for cleaning and pre-processing data in a distributed fashion with '
'pyspark.'),
long_description=readme(),
long_description_content_type='text/markdown',
license='APACHE',
packages=find_packages(),
install_requires=required,
tests_require=tests_require,
setup_requires=setup_requires,
extras_require={
'test': tests_require,
'all': required + tests_require,
'docs': ['sphinx'] + tests_require,
'lint': lint_requires
},
dependency_links=dependency_links,
test_suite='nose.collector',
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords=['datacleaner', 'apachespark', 'spark', 'pyspark', 'data-wrangling', 'data-cleansing', 'data-profiling'],
)