-
Notifications
You must be signed in to change notification settings - Fork 26
/
Makefile
80 lines (63 loc) · 3.62 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
test-coverage:
export PYSPARK_PYTHON=python3 && export PYSPARK_DRIVER_PYTHON=python3 && \
python3 -m coverage run -m pytest -o python_files=*_test.py
python3 -m coverage report -m
- mkdir build
- rm -r build/coverage
python3 -m coverage html
unit-test:
export PYSPARK_PYTHON=python3 && export PYSPARK_DRIVER_PYTHON=python3 && \
python3 -m pytest
# Note: env var named PG_URL and CLICKHOUSE_URL must be set to run e2e test
e2e-test:
cd test && docker build . --build-arg PG_URL=${PG_URL} --build-arg CLICKHOUSE_URL=${CLICKHOUSE_URL}
echo-var:
echo ${PG_URL} ${CLICKHOUSE_URL}
e2e-test-spark:
python3 -m easy_sql.data_process -f test/sample_etl.spark.sql
e2e-test-postgres:
python3 -m easy_sql.data_process -f test/sample_etl.postgres.sql
e2e-test-clickhouse:
python3 -m easy_sql.data_process -f test/sample_etl.clickhouse.sql
e2e-test-flink-postgres:
python3 -m easy_sql.data_process -f test/sample_etl.flink.postgres.sql
e2e-test-flink-streaming:
python3 -m easy_sql.data_process -f test/sample_etl.flink.postgres-cdc.sql
python3 -m easy_sql.data_process -f test/sample_etl.flink.postgres-cdc.multi-sink.sql
python3 -m easy_sql.data_process -f test/sample_etl.flink.postgres-hudi.sql
e2e-test-flink-hive:
python3 -m easy_sql.data_process -f test/sample_etl.flink.hive.sql
test-coverage-all:
export PYSPARK_PYTHON=python3 && export PYSPARK_DRIVER_PYTHON=python3 && \
PG_URL=${PG_URL} CLICKHOUSE_URL=${CLICKHOUSE_URL} python3 -m coverage run -m pytest -o python_files=*test.py
python3 -m coverage report -m
python3 -m coverage xml
package-zip:
- rm build/easysql.zip
mkdir -p build
zip -r --exclude=*__pycache__* build/easysql.zip easy_sql
package-pip:
poetry build
upload-test-pip:
rm -rf ./dist
poetry publish -r testpypi --build
install-test-pip:
pip3 uninstall easy_sql-easy_sql
python3 -m pip install --index-url https://test.pypi.org/simple/ 'easy-sql-easy-sql[cli]'
upload-pip:
rm -rf ./dist
poetry publish --build
prepare-flink-hadoop:
test -f test/flink/tools/hadoop/hadoop-3.3.5.tar.gz || ( \
mkdir -pv test/flink/tools/hadoop && \
wget -P test/flink/tools/hadoop https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz && \
cd test/flink/tools/hadoop && \
tar xf hadoop-3.3.5.tar.gz )
download-flink-jars:
test -f test/flink/jars/flink-connector-jdbc-1.15.1.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/org/apache/flink/flink-connector-jdbc/1.15.1/flink-connector-jdbc-1.15.1.jar
test -f test/flink/jars/flink-sql-connector-hive-3.1.2_2.12-1.15.1.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/org/apache/flink/flink-sql-connector-hive-3.1.2_2.12/1.15.1/flink-sql-connector-hive-3.1.2_2.12-1.15.1.jar
test -f test/flink/jars/postgresql-42.2.14.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/org/postgresql/postgresql/42.2.14/postgresql-42.2.14.jar
test -f test/flink/jars/flink-sql-connector-postgres-cdc-2.3.0.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/com/ververica/flink-sql-connector-postgres-cdc/2.3.0/flink-sql-connector-postgres-cdc-2.3.0.jar
test -f test/flink/jars/hudi-flink1.15-bundle-0.12.2.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/org/apache/hudi/hudi-flink1.15-bundle/0.12.2/hudi-flink1.15-bundle-0.12.2.jar
test -f test/flink/jars/flink-sql-connector-kafka-1.15.2.jar || wget -P test/flink/jars https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-kafka/1.15.2/flink-sql-connector-kafka-1.15.2.jar
test -f test/flink/jars/kafka-clients-3.3.2.jar || wget -P test/flink/jars https://repo1.maven.org/maven2/org/apache/kafka/kafka-clients/3.3.2/kafka-clients-3.3.2.jar