-
Notifications
You must be signed in to change notification settings - Fork 234
/
hatch.toml
59 lines (53 loc) · 1.68 KB
/
hatch.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
[version]
path = "dbt/adapters/spark/__version__.py"
[build.targets.sdist]
packages = ["dbt"]
[build.targets.wheel]
packages = ["dbt"]
[envs.default]
dependencies = [
"dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git",
"dbt-common @ git+https://github.com/dbt-labs/dbt-common.git",
"dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter",
"dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core",
"ddtrace==2.3.0",
"ipdb~=0.13.13",
"pre-commit==3.7.0",
"freezegun",
"pytest>=7.0,<8.0",
"pytest-csv~=3.0",
"pytest-dotenv",
"pytest-logbook~=1.2",
"pytest-mock",
"pytest-xdist",
]
features=["all"]
[envs.default.scripts]
setup = "pre-commit install"
code-quality = "pre-commit run --all-files"
unit-tests = "python -m pytest {args:tests/unit}"
integration-tests = "python dagger/run_dbt_spark_tests.py {args:--profile apache_spark}"
docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ."
[envs.build]
detached = true
dependencies = [
"wheel",
"twine",
"check-wheel-contents",
]
[envs.build.scripts]
check-all = [
"- check-wheel",
"- check-sdist",
]
check-wheel = [
"twine check dist/*",
"find ./dist/dbt_spark-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
"pip freeze | grep dbt-spark",
]
check-sdist = [
"check-wheel-contents dist/*.whl --ignore W007,W008",
"find ./dist/dbt_spark-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/",
"pip freeze | grep dbt-spark",
]
docker-prod = "docker build -f docker/Dockerfile -t dbt-spark ."