diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 7ae04a9bd808..ebc40e927913 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -580,7 +580,7 @@ jobs: - name: Install dependencies for Python code generation check run: | # See more in "Installation" https://docs.buf.build/installation#tarball - curl -LO https://github.com/bufbuild/buf/releases/download/v1.15.0/buf-Linux-x86_64.tar.gz + curl -LO https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-x86_64.tar.gz mkdir -p $HOME/buf tar -xvzf buf-Linux-x86_64.tar.gz -C $HOME/buf --strip-components 1 python3.9 -m pip install 'protobuf==3.19.5' 'mypy-protobuf==3.3.0' diff --git a/python/docs/source/development/contributing.rst b/python/docs/source/development/contributing.rst index 3b12de725460..2d58c86b15e7 100644 --- a/python/docs/source/development/contributing.rst +++ b/python/docs/source/development/contributing.rst @@ -120,7 +120,7 @@ Prerequisite PySpark development requires to build Spark that needs a proper JDK installed, etc. See `Building Spark `_ for more details. -Note that if you intend to contribute to Spark Connect in Python, ``buf`` version ``1.15.0`` is required, see `Buf Installation `_ for more details. +Note that if you intend to contribute to Spark Connect in Python, ``buf`` version ``1.15.1`` is required, see `Buf Installation `_ for more details. Conda ~~~~~