diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 49e836d9b20..ad205f3ec96 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1602,33 +1602,33 @@ jobs: run: | bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp - ggml-ci-x64-amd-vulkan: - runs-on: [self-hosted, Linux, X64, AMD] - - steps: - - name: Clone - id: checkout - uses: actions/checkout@v4 - - - name: Test - id: ggml-ci - run: | - vulkaninfo --summary - GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp - - ggml-ci-x64-amd-rocm: - runs-on: [self-hosted, Linux, X64, AMD] - - steps: - - name: Clone - id: checkout - uses: actions/checkout@v4 - - - name: Test - id: ggml-ci - run: | - amd-smi static - GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp + # ggml-ci-x64-amd-vulkan: + # runs-on: [self-hosted, Linux, X64, AMD] + + # steps: + # - name: Clone + # id: checkout + # uses: actions/checkout@v4 + + # - name: Test + # id: ggml-ci + # run: | + # vulkaninfo --summary + # GG_BUILD_VULKAN=1 bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp + + # ggml-ci-x64-amd-rocm: + # runs-on: [self-hosted, Linux, X64, AMD] + + # steps: + # - name: Clone + # id: checkout + # uses: actions/checkout@v4 + + # - name: Test + # id: ggml-ci + # run: | + # amd-smi static + # GG_BUILD_ROCM=1 GG_BUILD_AMDGPU_TARGETS="gfx1101" bash ./ci/run.sh ~/results/llama.cpp /mnt/llama.cpp ggml-ci-mac-metal: runs-on: [self-hosted, macOS, ARM64]