Skip to content

feat: add back ptype check for BitPackedArray (#872) #586

feat: add back ptype check for BitPackedArray (#872)

feat: add back ptype check for BitPackedArray (#872) #586

Workflow file for this run

name: Benchmarks
on:
push:
branches: [ develop ]
permissions:
actions: read
contents: write
deployments: write
jobs:
bench:
strategy:
matrix:
version:
- id: tpch_benchmark
# this was the original name which we must preserve until we change the name of all the
# records in the gh-pages-bench branch "Vortex benchmarks"
name: Vortex benchmarks
- id: compress_benchmark
name: Vortex Compression
- id: bytes_at
name: Vortex bytes_at
- id: datafusion_benchmark
name: Vortex DataFusion
- id: random_access
name: Vortex random_access
runs-on: ubuntu-latest-large
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/cleanup
- uses: ./.github/actions/setup-rust
- uses: ./.github/actions/setup-python
# The compression benchmarks rely on DuckDB being installed to convert CSV to Parquet
- name: Install DuckDB
uses: opt-nc/[email protected]
with:
version: v1.0.0
- name: Run benchmark
shell: bash
run: cargo bench --bench ${{ matrix.version.id }} -- --output-format bencher | tee ${{ matrix.version.id }}.txt
- name: Store benchmark result
if: '!cancelled()'
uses: benchmark-action/github-action-benchmark@v1
with:
name: ${{ matrix.version.name }}
tool: 'cargo'
gh-pages-branch: gh-pages-bench
github-token: ${{ secrets.GITHUB_TOKEN }}
output-file-path: ${{ matrix.version.id }}.txt
summary-always: true
auto-push: true
fail-on-alert: false
env:
# AWS Credentials for R2 storage tests
AWS_BUCKET: vortex-test
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}