forked from dbt-labs/dbt-spark
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
93 lines (83 loc) · 3.17 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
#!/usr/bin/env python
import os
import sys
import re
# require python 3.8 or newer
if sys.version_info < (3, 8):
print("Error: dbt does not support this version of Python.")
print("Please upgrade to Python 3.8 or higher.")
sys.exit(1)
# require version of setuptools that supports find_namespace_packages
from setuptools import setup
try:
from setuptools import find_namespace_packages
except ImportError:
# the user has a downlevel version of setuptools.
print("Error: dbt requires setuptools v40.1.0 or higher.")
print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again")
sys.exit(1)
# pull long description from README
this_directory = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_directory, "README.md"), "r", encoding="utf8") as f:
long_description = f.read()
# get this package's version from dbt/adapters/<name>/__version__.py
def _get_plugin_version_dict():
_version_path = os.path.join(this_directory, "dbt", "adapters", "spark", "__version__.py")
_semver = r"""(?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)"""
_pre = r"""((?P<prekind>a|b|rc)(?P<pre>\d+))?"""
_build = r"""(\+build[0-9]+)?"""
_version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}{_build}["']"""
with open(_version_path) as f:
match = re.search(_version_pattern, f.read().strip())
if match is None:
raise ValueError(f"invalid version at {_version_path}")
return match.groupdict()
package_name = "dbt-spark"
package_version = "1.9.0a1"
description = """The Apache Spark adapter plugin for dbt"""
odbc_extras = ["pyodbc~=5.1.0"]
pyhive_extras = [
"PyHive[hive_pure_sasl]~=0.7.0",
"thrift>=0.11.0,<0.17.0",
]
session_extras = ["pyspark>=3.0.0,<4.0.0"]
all_extras = odbc_extras + pyhive_extras + session_extras
setup(
name=package_name,
version=package_version,
description=description,
long_description=long_description,
long_description_content_type="text/markdown",
author="dbt Labs",
author_email="[email protected]",
url="https://github.com/dbt-labs/dbt-spark",
packages=find_namespace_packages(include=["dbt", "dbt.*"]),
include_package_data=True,
install_requires=[
"sqlparams>=3.0.0",
"dbt-common>=1.10,<2.0",
"dbt-adapters>=1.7,<2.0",
# add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
"dbt-core>=1.8.0",
],
extras_require={
"ODBC": odbc_extras,
"PyHive": pyhive_extras,
"session": session_extras,
"all": all_extras,
},
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
],
python_requires=">=3.8",
)