Skip to content

[CI] pre-commit: auto add license headers to all YAML files #3326

[CI] pre-commit: auto add license headers to all YAML files

[CI] pre-commit: auto add license headers to all YAML files #3326

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
name: Python build
on:
push:
branches:
- master
paths:
- 'common/**'
- 'spark/**'
- 'spark-shaded/**'
- 'pom.xml'
- 'python/**'
- '.github/workflows/python.yml'
pull_request:
branches:
- '*'
paths:
- 'common/**'
- 'spark/**'
- 'spark-shaded/**'
- 'pom.xml'
- 'python/**'
- '.github/workflows/python.yml'
env:
MAVEN_OPTS: -Dmaven.wagon.httpconnectionManager.ttlSeconds=60
JAI_CORE_VERSION: "1.1.3"
JAI_CODEC_VERSION: "1.1.3"
JAI_IMAGEIO_VERSION: "1.1"
DO_NOT_TRACK: true
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-22.04
strategy:
matrix:
include:
- spark: '3.5.0'
scala: '2.12.8'
python: '3.10'
shapely: '1'
- spark: '3.5.0'
scala: '2.12.8'
python: '3.10'
- spark: '3.5.0'
scala: '2.12.8'
python: '3.9'
- spark: '3.5.0'
scala: '2.12.8'
python: '3.8'
- spark: '3.4.0'
scala: '2.12.8'
python: '3.10'
- spark: '3.4.0'
scala: '2.12.8'
python: '3.9'
- spark: '3.4.0'
scala: '2.12.8'
python: '3.8'
- spark: '3.4.0'
scala: '2.12.8'
python: '3.7'
- spark: '3.4.0'
scala: '2.12.8'
python: '3.7'
shapely: '1'
- spark: '3.3.0'
scala: '2.12.8'
python: '3.8'
env:
VENV_PATH: /home/runner/.local/share/virtualenvs/python-${{ matrix.python }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '8'
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
- name: Cache Maven packages
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- env:
SPARK_VERSION: ${{ matrix.spark }}
SCALA_VERSION: ${{ matrix.scala }}
run: |
SPARK_COMPAT_VERSION=${SPARK_VERSION:0:3}
mvn -q clean install -DskipTests -Dspark=${SPARK_COMPAT_VERSION} -Dscala=${SCALA_VERSION:0:4} -Dgeotools
- run: sudo apt-get -y install python3-pip python-dev-is-python3
- run: sudo pip3 install -U setuptools
- run: sudo pip3 install -U wheel
- run: sudo pip3 install -U virtualenvwrapper
- run: python3 -m pip install pipenv
- run: cd python; python3 setup.py build_ext --inplace
- env:
SPARK_VERSION: ${{ matrix.spark }}
PYTHON_VERSION: ${{ matrix.python }}
SHAPELY_VERSION: ${{ matrix.shapely }}
run: |
cd python
if [ "${SHAPELY_VERSION}" == "1" ]; then
echo "Patching Pipfile to use Shapely 1.x"
sed -i 's/^shapely.*$/shapely="<2.0.0"/g' Pipfile
fi
export PIPENV_CUSTOM_VENV_NAME=python-${PYTHON_VERSION}
pipenv --python ${PYTHON_VERSION}
pipenv install pyspark==${SPARK_VERSION}
pipenv install --dev
pipenv graph
- env:
PYTHON_VERSION: ${{ matrix.python }}
run: |
wget --retry-connrefused --waitretry=10 --read-timeout=20 --timeout=15 --tries=5 https://repo.osgeo.org/repository/release/javax/media/jai_core/${JAI_CORE_VERSION}/jai_core-${JAI_CORE_VERSION}.jar
wget --retry-connrefused --waitretry=10 --read-timeout=20 --timeout=15 --tries=5 https://repo.osgeo.org/repository/release/javax/media/jai_codec/${JAI_CODEC_VERSION}/jai_codec-${JAI_CODEC_VERSION}.jar
wget --retry-connrefused --waitretry=10 --read-timeout=20 --timeout=15 --tries=5 https://repo.osgeo.org/repository/release/javax/media/jai_imageio/${JAI_IMAGEIO_VERSION}/jai_imageio-${JAI_IMAGEIO_VERSION}.jar
mv -v jai_core-${JAI_CORE_VERSION}.jar ${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark/jars
mv -v jai_codec-${JAI_CODEC_VERSION}.jar ${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark/jars
mv -v jai_imageio-${JAI_IMAGEIO_VERSION}.jar ${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark/jars
- env:
PYTHON_VERSION: ${{ matrix.python }}
run: find spark-shaded/target -name sedona-*.jar -exec cp {} ${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark/jars/ \;
- env:
PYTHON_VERSION: ${{ matrix.python }}
run: |
export SPARK_HOME=${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark
cd python
source ${VENV_PATH}/bin/activate
pytest tests
- env:
PYTHON_VERSION: ${{ matrix.python }}
run: |
if [ ! -f "${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark/sbin/start-connect-server.sh" ]
then
echo "Skipping connect tests for Spark $SPARK_VERSION"
exit
fi
export SPARK_HOME=${VENV_PATH}/lib/python${PYTHON_VERSION}/site-packages/pyspark
export SPARK_REMOTE=local
cd python
source ${VENV_PATH}/bin/activate
pip install "pyspark[connect]==${SPARK_VERSION}"
pytest tests/sql/test_dataframe_api.py