diff --git a/.github/actions/locate-vcvarsall-and-setup-env/action.yml b/.github/actions/locate-vcvarsall-and-setup-env/action.yml index a5291e22a1dca..c4fdc48a7bd63 100644 --- a/.github/actions/locate-vcvarsall-and-setup-env/action.yml +++ b/.github/actions/locate-vcvarsall-and-setup-env/action.yml @@ -14,7 +14,7 @@ runs: steps: - name: Setup VCPKG - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' diff --git a/.github/workflows/android.yml b/.github/workflows/android.yml index 321c900e3fe21..b788bb792b23d 100644 --- a/.github/workflows/android.yml +++ b/.github/workflows/android.yml @@ -37,7 +37,7 @@ jobs: ndk-version: 28.0.13004108 - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -122,7 +122,7 @@ jobs: architecture: x64 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' diff --git a/.github/workflows/ios.yml b/.github/workflows/ios.yml index edba798ecbd49..0d2046b980783 100644 --- a/.github/workflows/ios.yml +++ b/.github/workflows/ios.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v5 with: submodules: false - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: 735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc diff --git a/.github/workflows/linux-wasm-ci-build-and-test-workflow.yml b/.github/workflows/linux-wasm-ci-build-and-test-workflow.yml index 0e24ee5e8fcb6..c30a8cb023f50 100644 --- a/.github/workflows/linux-wasm-ci-build-and-test-workflow.yml +++ b/.github/workflows/linux-wasm-ci-build-and-test-workflow.yml @@ -56,7 +56,7 @@ jobs: python-version: "3.12" architecture: ${{ env.buildArch }} - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' diff --git a/.github/workflows/linux_cuda_ci.yml b/.github/workflows/linux_cuda_ci.yml index fa7507968fc67..9a9dace777c83 100644 --- a/.github/workflows/linux_cuda_ci.yml +++ b/.github/workflows/linux_cuda_ci.yml @@ -50,7 +50,7 @@ jobs: - name: Checkout code uses: actions/checkout@v5 - - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda @@ -93,7 +93,7 @@ jobs: # So build.py --build_dir build/Release inside the container correctly finds the artifacts. - name: Test ONNX Runtime id: test_step - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} build_config: Release diff --git a/.github/workflows/linux_minimal_build.yml b/.github/workflows/linux_minimal_build.yml index b49f0b68e7d0f..92cdbb70e9858 100644 --- a/.github/workflows/linux_minimal_build.yml +++ b/.github/workflows/linux_minimal_build.yml @@ -37,7 +37,7 @@ jobs: with: node-version: 20 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' @@ -47,7 +47,7 @@ jobs: disable-terrapin: 'true' - name: Build Full ORT and Prepare Test Files - uses: microsoft/onnxruntime-github-actions/build-and-prep-ort-files@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-and-prep-ort-files@v0.0.9 - name: Upload Test Data Artifact uses: actions/upload-artifact@v4 @@ -74,7 +74,7 @@ jobs: node-version: 20 - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -85,7 +85,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Run Build 2 (Update) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -100,7 +100,7 @@ jobs: --enable_training_ops - name: Run Build 2 (Build) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -131,8 +131,8 @@ jobs: - uses: actions/setup-node@v4 with: node-version: 20 - - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' @@ -142,7 +142,7 @@ jobs: disable-terrapin: 'true' - name: Build Full ORT and Prepare Test Files - uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.9 with: reduced-ops-config-file: required_ops.ort_models.config enable-custom-ops: 'true' @@ -166,7 +166,7 @@ jobs: with: node-version: 20 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' @@ -175,7 +175,7 @@ jobs: add-cmake-to-path: 'true' disable-terrapin: 'true' - name: Build Full ORT and Prepare Test Files - uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.9 with: reduced-ops-config-file: required_ops_and_types.ort_models.config enable-type-reduction: 'true' @@ -198,7 +198,7 @@ jobs: with: node-version: 20 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: '735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc' @@ -208,7 +208,7 @@ jobs: disable-terrapin: 'true' - name: Build Full ORT and Prepare Test Files - uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-minimal-ort-and-run-tests@v0.0.9 with: globally_allowed_types: 'bool,float,int8_t,uint8_t' enable-type-reduction: 'true' @@ -233,7 +233,7 @@ jobs: node-version: 20 - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -245,7 +245,7 @@ jobs: - name: Run Build 5 (Update) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -258,7 +258,7 @@ jobs: --minimal_build extended - name: Run Build 5 (Build) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -270,7 +270,7 @@ jobs: --use_binskim_compliant_compile_flags --minimal_build extended - name: Run Build 5 (Test) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -297,7 +297,7 @@ jobs: submodules: false - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -314,7 +314,7 @@ jobs: touch ${{ runner.temp }}/.test_data/include_no_operators.config - name: Run Build 6a (Update) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -330,7 +330,7 @@ jobs: --cmake_extra_defines onnxruntime_BUILD_UNIT_TESTS=OFF - name: Run Build 6a (Build) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -347,7 +347,7 @@ jobs: - name: Run Build 6a (Test) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -383,7 +383,7 @@ jobs: touch ${{ runner.temp }}/.test_data/include_no_operators.config - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -394,7 +394,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Run Build 6b (Update) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -413,7 +413,7 @@ jobs: --cmake_extra_defines onnxruntime_BUILD_UNIT_TESTS=OFF - name: Run Build 6b (Build) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -452,7 +452,7 @@ jobs: touch ${{ runner.temp }}/.test_data/include_no_operators.config - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -469,7 +469,7 @@ jobs: touch ${{ runner.temp }}/.test_data/include_no_operators.config - name: Run Build 6c (Update) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -488,7 +488,7 @@ jobs: --cmake_extra_defines onnxruntime_BUILD_UNIT_TESTS=OFF - name: Run Build 6c (Build) - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} @@ -531,7 +531,7 @@ jobs: path: ${{ runner.temp }}/.test_data/ - name: Get Docker Image using Action - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile diff --git a/.github/workflows/linux_tensorrt_ci.yml b/.github/workflows/linux_tensorrt_ci.yml index 4574f8ee6ebd6..043eb0b218e2f 100644 --- a/.github/workflows/linux_tensorrt_ci.yml +++ b/.github/workflows/linux_tensorrt_ci.yml @@ -52,7 +52,7 @@ jobs: # --- Build the Docker image needed for testing --- - name: Build Docker Image for Testing - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda @@ -95,7 +95,7 @@ jobs: # So build.py --build_dir build/Release inside the container correctly finds the artifacts. - name: Test ONNX Runtime id: test_step - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} build_config: Release diff --git a/.github/workflows/linux_webgpu.yml b/.github/workflows/linux_webgpu.yml index 9207e92466a50..f7161754895c5 100644 --- a/.github/workflows/linux_webgpu.yml +++ b/.github/workflows/linux_webgpu.yml @@ -51,7 +51,7 @@ jobs: # - name: Checkout code # uses: actions/checkout@v4 - # - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + # - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 # id: build_docker_image_step # with: # dockerfile: ${{ github.workspace }}/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_webgpu @@ -91,7 +91,7 @@ jobs: # - name: Test ONNX Runtime # id: test_step - # uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + # uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 # with: # docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} # build_config: Release diff --git a/.github/workflows/mac.yml b/.github/workflows/mac.yml index 49e74306690de..af2b36c870201 100644 --- a/.github/workflows/mac.yml +++ b/.github/workflows/mac.yml @@ -65,7 +65,7 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v5 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: 735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc @@ -113,7 +113,7 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v5 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: 735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc diff --git a/.github/workflows/macos-ci-build-and-test-workflow.yml b/.github/workflows/macos-ci-build-and-test-workflow.yml index e7735fb0112a8..281538336b0c1 100644 --- a/.github/workflows/macos-ci-build-and-test-workflow.yml +++ b/.github/workflows/macos-ci-build-and-test-workflow.yml @@ -62,7 +62,7 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v5 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: 735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc diff --git a/.github/workflows/publish-objectivec-apidocs.yml b/.github/workflows/publish-objectivec-apidocs.yml index e213be990a435..7f1611fdff315 100644 --- a/.github/workflows/publish-objectivec-apidocs.yml +++ b/.github/workflows/publish-objectivec-apidocs.yml @@ -24,7 +24,7 @@ jobs: runs-on: macos-latest steps: - uses: actions/checkout@v5 - - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.8 + - uses: microsoft/onnxruntime-github-actions/setup-build-tools@v0.0.9 with: vcpkg-version: '2025.06.13' vcpkg-hash: 735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc diff --git a/.github/workflows/reusable_linux_build.yml b/.github/workflows/reusable_linux_build.yml index 504c6627f4ac1..1a9c0e0a72031 100644 --- a/.github/workflows/reusable_linux_build.yml +++ b/.github/workflows/reusable_linux_build.yml @@ -83,7 +83,7 @@ jobs: python-version: ${{ inputs.python_version }} - name: Build Docker Image (${{ inputs.architecture }} / ${{ inputs.build_config }}) - uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.8 + uses: microsoft/onnxruntime-github-actions/build-docker-image@v0.0.9 id: build_docker_image_step with: dockerfile: ${{ github.workspace }}/${{ inputs.dockerfile_path }} @@ -97,7 +97,7 @@ jobs: # ------------- Update Step (CMake Generation) ------------- - name: Generate Build Files (CMake) (${{ inputs.architecture }} / ${{ inputs.build_config }}) id: update_step - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} build_config: ${{ inputs.build_config }} @@ -109,7 +109,7 @@ jobs: # ------------- Build Step (Compilation) ------------- - name: Build ONNX Runtime (${{ inputs.architecture }} / ${{ inputs.build_config }}) id: build_step - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} build_config: ${{ inputs.build_config }} @@ -122,7 +122,7 @@ jobs: - name: Test ONNX Runtime (${{ inputs.architecture }} / ${{ inputs.build_config }}) id: test_step if: inputs.run_tests == true - uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.8 + uses: microsoft/onnxruntime-github-actions/run-build-script-in-docker@v0.0.9 with: docker_image: ${{ steps.build_docker_image_step.outputs.full-image-name }} build_config: ${{ inputs.build_config }} diff --git a/.github/workflows/windows_qnn_x64.yml b/.github/workflows/windows_qnn_x64.yml new file mode 100644 index 0000000000000..4c08d543cefd9 --- /dev/null +++ b/.github/workflows/windows_qnn_x64.yml @@ -0,0 +1,82 @@ +name: Windows x64 QNN CI Pipeline + +on: + push: + branches: + - main + - rel-* + pull_request: + branches: + - main + - rel-* + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.ref || github.sha }} + cancel-in-progress: true + +jobs: + build_test_qnn_ep: + name: Windows x64 QNN CI Pipeline (${{ matrix.QnnLibKind }}) + runs-on: ["self-hosted", "1ES.Pool=onnxruntime-github-vs2022-mms"] + timeout-minutes: 120 + strategy: + matrix: + QnnLibKind: [shared_lib, static_lib] + env: + AZCOPY_AUTO_LOGIN_TYPE: MSI + AZCOPY_MSI_CLIENT_ID: 63b63039-6328-442f-954b-5a64d124e5b4 + DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true + ALLOW_RELEASED_ONNX_OPSET_ONLY: '1' + + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + architecture: x64 + + - name: Locate vcvarsall and Setup Env + uses: ./.github/actions/locate-vcvarsall-and-setup-env + with: + architecture: x64 + + - name: Download QNN SDK + working-directory: ${{ runner.temp }} + run: | + azcopy.exe cp --recursive https://lotusscus.blob.core.windows.net/models/qnnsdk/qnn-v2.37.1.250807 . + dir + shell: pwsh + + - name: Set QNN_SDK_ROOT environment variable + shell: pwsh + run: | + $qnn_sdk_path = Join-Path $env:RUNNER_TEMP "qnn-v2.37.1.250807" + echo "QNN_SDK_ROOT=$qnn_sdk_path" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + echo "QNN SDK Root: $qnn_sdk_path" + dir $qnn_sdk_path + + - name: Build and Test + shell: cmd + run: | + python ${{ github.workspace }}\tools\ci_build\build.py --config RelWithDebInfo --build_dir ${{ runner.temp }}\build --cmake_generator "Visual Studio 17 2022" --build_java --build_shared_lib --use_qnn ${{ matrix.QnnLibKind }} --qnn_home %QNN_SDK_ROOT% --use_binskim_compliant_compile_flags --update --build --test --enable_onnx_tests --parallel + + - name: Run ONNX Tests + shell: cmd + working-directory: ${{ runner.temp }}\build\RelWithDebInfo\RelWithDebInfo + run: | + .\onnx_test_runner -j 1 -e qnn -i "backend_path|%QNN_SDK_ROOT%\lib\x86_64-windows-msvc\QnnCpu.dll" ${{ github.workspace }}\cmake\external\onnx\onnx\backend\test\data\node + + - name: Run float32 model tests + shell: cmd + working-directory: ${{ runner.temp }}\build\RelWithDebInfo\RelWithDebInfo + run: | + rem This step assumes the model data exists at C:\data\float32_models on the runner + if exist C:\data\float32_models ( + .\onnx_test_runner -j 1 -e qnn -i "backend_path|%QNN_SDK_ROOT%\lib\x86_64-windows-msvc\QnnCpu.dll" C:\data\float32_models + ) else ( + echo "Skipping float32 model tests: C:\data\float32_models not found." + ) diff --git a/cmake/onnxruntime_mlas.cmake b/cmake/onnxruntime_mlas.cmake index 07e61fb210036..3530ab03c822a 100644 --- a/cmake/onnxruntime_mlas.cmake +++ b/cmake/onnxruntime_mlas.cmake @@ -790,12 +790,6 @@ if (WIN32) endif() endif() -if (PLATFORM_NAME STREQUAL "macabi") - # Needed for maccatalyst C compilation - # i.e. the flags below add "--target=x86_64-apple-ios14.0-macabi -ffunction-sections -fdata-sections" - target_compile_options(onnxruntime_mlas PRIVATE ${CMAKE_C_FLAGS}) -endif() - if (NOT onnxruntime_BUILD_SHARED_LIB) install(TARGETS onnxruntime_mlas EXPORT ${PROJECT_NAME}Targets ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} diff --git a/java/build.gradle b/java/build.gradle index 2d43d1ead13f0..64a31c89ad322 100644 --- a/java/build.gradle +++ b/java/build.gradle @@ -3,8 +3,7 @@ plugins { id 'maven-publish' id 'signing' id 'jacoco' - id "com.diffplug.spotless" version "6.25.0" - id "net.linguica.maven-settings" version "0.5" + id "com.diffplug.spotless" version "7.2.1" } allprojects { @@ -14,17 +13,9 @@ allprojects { } project.group = "com.microsoft.onnxruntime" -version = rootProject.file('../VERSION_NUMBER').text.trim() - // cmake runs will inform us of the build directory of the current run def cmakeBuildDir = System.properties['cmakeBuildDir'] def useCUDA = System.properties['USE_CUDA'] -def useROCM = System.properties['USE_ROCM'] - -def adoArtifact = project.findProperty('adoArtifact') -def adoAccessToken = project.findProperty('adoAccessToken') -// Only publish to ADO feed if all two properties are set -def publishToAdo = adoArtifact != null && adoAccessToken != null boolean enableTrainingApis = (System.properties['ENABLE_TRAINING_APIS'] ?: "0") == "1" def cmakeJavaDir = "${cmakeBuildDir}/java" @@ -33,21 +24,14 @@ def cmakeNativeJniDir = "${cmakeJavaDir}/native-jni" def cmakeNativeTestDir = "${cmakeJavaDir}/native-test" def cmakeBuildOutputDir = "${cmakeJavaDir}/build" -def mavenUser = System.properties['mavenUser'] -def mavenPwd = System.properties['mavenPwd'] - def tmpArtifactId = enableTrainingApis ? project.name + "-training" : project.name -def mavenArtifactId = (useCUDA == null && useROCM == null) ? tmpArtifactId : tmpArtifactId + "_gpu" +def mavenArtifactId = (useCUDA == null) ? tmpArtifactId : tmpArtifactId + "_gpu" def defaultDescription = 'ONNX Runtime is a performance-focused inference engine for ONNX (Open Neural Network Exchange) models.' def trainingDescription = 'ONNX Runtime Training is a training and inference package for ONNX ' + '(Open Neural Network Exchange) models. This package is targeted for Learning on The Edge aka On-Device Training ' + 'See https://github.com/microsoft/onnxruntime-training-examples/tree/master/on_device_training for more details.' -// We need to have a custom settings.xml so codeql can bypass the need for settings.security.xml -mavenSettings { - userSettingsFileName = "${projectDir}/settings.xml" -} java { sourceCompatibility = JavaVersion.VERSION_17 @@ -202,16 +186,27 @@ test { systemProperties System.getProperties().subMap([ 'ENABLE_TRAINING_APIS', 'JAVA_FULL_TEST', + 'USE_ACL', + 'USE_ARMNN', + 'USE_AZURE', + 'USE_CANN', 'USE_COREML', 'USE_CUDA', 'USE_DML', 'USE_DNNL', + 'USE_MIGRAPHX', + 'USE_NNAPI', + 'USE_NV', 'USE_OPENVINO', - 'USE_ROCM', - 'USE_TENSORRT', 'USE_QNN', - 'USE_XNNPACK', + 'USE_RKNPU', + 'USE_SNPE', + 'USE_TENSORRT', + 'USE_VITISAI', + 'USE_VSINPU', 'USE_WEBGPU', + 'USE_WEBNN', + 'USE_XNNPACK', ]) testLogging { events "passed", "skipped", "failed" @@ -233,13 +228,9 @@ publishing { publications { maven(MavenPublication) { groupId = project.group - if(publishToAdo) { - artifactId = 'onnxruntime_gpu' - artifact (adoArtifact) - } else { - artifactId = mavenArtifactId - from components.java - } + artifactId = mavenArtifactId + from components.java + version = project.version pom { name = enableTrainingApis ? 'onnxruntime-training' : 'onnx-runtime' @@ -270,29 +261,6 @@ publishing { } } } - repositories { - if (publishToAdo) { - maven { - url "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/${System.getenv('ADOFeedName')}/maven/v1" - name System.getenv('ADOFeedName') - authentication { - basic(BasicAuthentication) - } - credentials { - username 'aiinfra' - password "${project.findProperty('adoAccessToken')}" - } - } - } else { - maven { - url 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' - credentials { - username mavenUser - password mavenPwd - } - } - } - } } // Generates a task signMavenPublication that will // build all artifacts. @@ -300,12 +268,17 @@ signing { // Queries env vars: // ORG_GRADLE_PROJECT_signingKey // ORG_GRADLE_PROJECT_signingPassword but can be changed to properties - def signingKey = findProperty("signingKey") - def signingPassword = findProperty("signingPassword") - // Skip signing if no key is provided - if (signingKey != null && signingPassword != null) { - useInMemoryPgpKeys(signingKey, signingPassword) - sign publishing.publications.maven - sign publishing.publications.mavenAdo - } + def signingKey = findProperty("signingKey") + def signingPassword = findProperty("signingPassword") + // Skip signing if no key is provided + if (signingKey != null && signingPassword != null) { + useInMemoryPgpKeys(signingKey, signingPassword) + sign publishing.publications.maven + } +} + +tasks.named('generatePomFileForMavenPublication') { + doFirst { + println "AGENT_LOG: Generating POM for version: ${project.version}" + } } diff --git a/java/src/test/java/ai/onnxruntime/InferenceTest.java b/java/src/test/java/ai/onnxruntime/InferenceTest.java index c3f9d345078fe..c202b2a9f80e0 100644 --- a/java/src/test/java/ai/onnxruntime/InferenceTest.java +++ b/java/src/test/java/ai/onnxruntime/InferenceTest.java @@ -693,12 +693,6 @@ public void testCUDA() throws OrtException { runProvider(OrtProvider.CUDA); } - @Test - @EnabledIfSystemProperty(named = "USE_ROCM", matches = "1") - public void testROCM() throws OrtException { - runProvider(OrtProvider.ROCM); - } - @Test @EnabledIfSystemProperty(named = "USE_TENSORRT", matches = "1") public void testTensorRT() throws OrtException { @@ -725,6 +719,18 @@ public void testDNNL() throws OrtException { runProvider(OrtProvider.DNNL); } + @Test + @EnabledIfSystemProperty(named = "USE_MIGRAPHX", matches = "1") + public void testMIGRAPHX() throws OrtException { + runProvider(OrtProvider.MI_GRAPH_X); + } + + @Test + @EnabledIfSystemProperty(named = "USE_NNAPI", matches = "1") + public void testNNAPI() throws OrtException { + runProvider(OrtProvider.NNAPI); + } + @Test @EnabledIfSystemProperty(named = "USE_XNNPACK", matches = "1") public void testXNNPACK() throws OrtException { diff --git a/onnxruntime/test/platform/apple/apple_package_test/Podfile.template b/onnxruntime/test/platform/apple/apple_package_test/Podfile.template index 9abec2242502f..b6b8b8aa02a51 100644 --- a/onnxruntime/test/platform/apple/apple_package_test/Podfile.template +++ b/onnxruntime/test/platform/apple/apple_package_test/Podfile.template @@ -15,7 +15,7 @@ if ENV['SKIP_MACOS_TEST'] != 'true' # Comment the next line if you don't want to use dynamic frameworks use_frameworks! - platform :osx, '13.3' + platform :osx, '13.4' target 'macos_package_testUITests' do inherit! :search_paths diff --git a/tools/ci_build/build.py b/tools/ci_build/build.py index 4e7e03af84302..d22c8587a82b5 100644 --- a/tools/ci_build/build.py +++ b/tools/ci_build/build.py @@ -838,8 +838,6 @@ def generate_build_tree( if is_macOS() and not args.android: add_default_definition(cmake_extra_defines, "CMAKE_OSX_ARCHITECTURES", args.osx_arch) - if args.apple_deploy_target: - cmake_args += ["-DCMAKE_OSX_DEPLOYMENT_TARGET=" + args.apple_deploy_target] # Code sign the binaries, if the code signing development identity and/or team id are provided if args.xcode_code_signing_identity: cmake_args += ["-DCMAKE_XCODE_ATTRIBUTE_CODE_SIGN_IDENTITY=" + args.xcode_code_signing_identity] @@ -930,7 +928,6 @@ def generate_build_tree( cmake_args += [ "-Donnxruntime_BUILD_SHARED_LIB=ON", "-DCMAKE_OSX_SYSROOT=" + args.apple_sysroot, - "-DCMAKE_OSX_DEPLOYMENT_TARGET=" + args.apple_deploy_target, # we do not need protoc binary for ios cross build "-Dprotobuf_BUILD_PROTOC_BINARIES=OFF", "-DPLATFORM_NAME=" + platform_name, @@ -946,16 +943,15 @@ def generate_build_tree( if args.macos == "Catalyst": macabi_target = f"{args.osx_arch}-apple-ios{args.apple_deploy_target}-macabi" cmake_args += [ - "-DCMAKE_CXX_COMPILER_TARGET=" + macabi_target, - "-DCMAKE_C_COMPILER_TARGET=" + macabi_target, - "-DCMAKE_CC_COMPILER_TARGET=" + macabi_target, f"-DCMAKE_CXX_FLAGS=--target={macabi_target}", - f"-DCMAKE_CXX_FLAGS_RELEASE=-O3 -DNDEBUG --target={macabi_target}", f"-DCMAKE_C_FLAGS=--target={macabi_target}", - f"-DCMAKE_C_FLAGS_RELEASE=-O3 -DNDEBUG --target={macabi_target}", - f"-DCMAKE_CC_FLAGS=--target={macabi_target}", - f"-DCMAKE_CC_FLAGS_RELEASE=-O3 -DNDEBUG --target={macabi_target}", + f"-DCMAKE_ASM_FLAGS=--target={macabi_target}", ] + else: + cmake_args += [ + "-DCMAKE_OSX_DEPLOYMENT_TARGET=" + args.apple_deploy_target, + ] + if args.visionos: cmake_args += [ "-DCMAKE_SYSTEM_NAME=visionOS", diff --git a/tools/ci_build/github/azure-pipelines/build-perf-test-binaries-pipeline.yml b/tools/ci_build/github/azure-pipelines/build-perf-test-binaries-pipeline.yml index 5cf5cd8c936fa..53b62762319ba 100644 --- a/tools/ci_build/github/azure-pipelines/build-perf-test-binaries-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/build-perf-test-binaries-pipeline.yml @@ -7,7 +7,6 @@ parameters: default: true stages: - # build binaries for Android - ${{ if parameters.BuildAndroidBinaries }}: - stage: BuildAndroidBinaries diff --git a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml index 40f24b1d2c886..e5319b068a1fc 100644 --- a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml +++ b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml @@ -122,12 +122,12 @@ extends: PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - - template: stages/download-java-tools-stage.yml - - template: templates/c-api-cpu.yml parameters: RunOnnxRuntimeTests: ${{ parameters.RunOnnxRuntimeTests }} IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} ${{ if eq(parameters.NugetPackageSuffix, 'NONE') }}: OrtNugetPackageId: 'Microsoft.ML.OnnxRuntime' ${{ else }}: @@ -135,16 +135,10 @@ extends: AdditionalBuildFlags: '' AdditionalWinBuildFlags: '--enable_onnx_tests ${{parameters.AdditionalBuildFlag}}' BuildVariant: 'default' - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} QnnSDKVersion: ${{ parameters.QnnSdk }} is1ES: true - template: stages/java-cuda-packaging-stage.yml - parameters: - CudaVersion: 12.2 - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - template: stages/nuget-combine-cuda-stage.yml parameters: @@ -159,6 +153,8 @@ extends: buildNodejs: true SpecificArtifact: ${{ parameters.SpecificArtifact }} BuildId: ${{ parameters.BuildId }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - template: stages/nodejs-win-packaging-stage.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-test-pipelines.yml b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-test-pipelines.yml index 12cf8349a5575..b846cc8bb9e80 100644 --- a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-test-pipelines.yml +++ b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-test-pipelines.yml @@ -175,6 +175,10 @@ stages: artifact: 'Windows_Packaging_cuda_build_artifacts' displayName: 'Download Windows GPU Packages Build' + - template: templates/setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + - task: CmdLine@2 inputs: script: | @@ -188,17 +192,6 @@ stages: jdkArchitectureOption: x64 jdkSourceOption: 'PreInstalled' - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: x64 - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - - task: PythonScript@0 displayName: 'Update CTest Path References' inputs: @@ -207,10 +200,6 @@ stages: "$(Build.BinariesDirectory)/RelWithDebInfo/CTestTestfile.cmake" "$(Build.BinariesDirectory)/RelWithDebInfo" - - task: NodeTool@0 - inputs: - versionSpec: '22.x' - - template: templates/jobs/download_win_gpu_library.yml parameters: CudaVersion: 12.2 @@ -223,12 +212,6 @@ stages: scriptPath: '$(Build.SourcesDirectory)\tools\ci_build\build.py' arguments: '--config RelWithDebInfo --use_binskim_compliant_compile_flags --enable_lto --disable_rtti --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --test --enable_onnx_tests' workingDirectory: '$(Build.BinariesDirectory)' - # Previous stage only assembles the java binaries, testing will be done in this stage with GPU machine - - template: templates/make_java_win_binaries.yml - parameters: - msbuildPlatform: x64 - java_artifact_id: onnxruntime_gpu - buildOnly: false - stage: Windows_Packaging_Tensorrt_Testing dependsOn: Setup @@ -242,12 +225,15 @@ stages: - checkout: self clean: true submodules: none - - + - download: build artifact: 'Windows_Packaging_tensorrt_build_artifacts' displayName: 'Download Windows GPU Packages Build' + - template: templates/setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + - task: CmdLine@2 inputs: script: | @@ -260,18 +246,7 @@ stages: versionSpec: "17" jdkArchitectureOption: x64 jdkSourceOption: 'PreInstalled' - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: x64 - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - + - task: PythonScript@0 displayName: 'Update CTest Path References' inputs: @@ -280,10 +255,6 @@ stages: "$(Build.BinariesDirectory)/RelWithDebInfo/CTestTestfile.cmake" "$(Build.BinariesDirectory)/RelWithDebInfo" - - task: NodeTool@0 - inputs: - versionSpec: '22.x' - - template: templates/jobs/download_win_gpu_library.yml parameters: CudaVersion: 12.2 @@ -295,10 +266,4 @@ stages: inputs: scriptPath: '$(Build.SourcesDirectory)\tools\ci_build\build.py' arguments: '--config RelWithDebInfo --use_binskim_compliant_compile_flags --enable_lto --disable_rtti --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --test --enable_onnx_tests' - workingDirectory: '$(Build.BinariesDirectory)' - # Previous stage only assembles the java binaries, testing will be done in this stage with GPU machine - - template: templates/make_java_win_binaries.yml - parameters: - msbuildPlatform: x64 - java_artifact_id: onnxruntime_gpu - buildOnly: false + workingDirectory: '$(Build.BinariesDirectory)' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml index 46695403fd854..95f55f52f9a68 100644 --- a/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml @@ -123,11 +123,9 @@ extends: buildNodejs: false SpecificArtifact: ${{ parameters.SpecificArtifact }} BuildId: ${{ parameters.BuildId }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - template: stages/download-java-tools-stage.yml - template: stages/java-cuda-packaging-stage.yml - parameters: - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} diff --git a/tools/ci_build/github/azure-pipelines/custom-nuget-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/custom-nuget-packaging-pipeline.yml index 3acf0788ab5c3..257f554dd200e 100644 --- a/tools/ci_build/github/azure-pipelines/custom-nuget-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/custom-nuget-packaging-pipeline.yml @@ -92,6 +92,8 @@ extends: - template: templates/win-ci.yml parameters: + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} ort_build_pool_name: 'onnxruntime-Win2022-GPU-A10' DoCompliance: false DoEsrp: true @@ -124,7 +126,6 @@ extends: - template: templates/mac-cpu-packaging-pipeline.yml parameters: AllowReleasedOpsetOnly: 1 - BuildForAllArchs: true AdditionalBuildFlags: '--use_webgpu --skip_tests' DoEsrp: true diff --git a/tools/ci_build/github/azure-pipelines/jar_package_testing.yml b/tools/ci_build/github/azure-pipelines/jar_package_testing.yml index 24e17de06e6bd..19b40cb7c549a 100644 --- a/tools/ci_build/github/azure-pipelines/jar_package_testing.yml +++ b/tools/ci_build/github/azure-pipelines/jar_package_testing.yml @@ -44,7 +44,18 @@ stages: DownloadCUDA: true DownloadTRT: true - - template: templates/download_maven_for_tests.yml + - template: templates/setup-maven.yml + + - task: Maven@4 + displayName: 'Download Java Dependencies' + inputs: + mavenPomFile: '$(Build.SourcesDirectory)/tools/ci_build/java/pom.xml' + goals: 'dependency:copy-dependencies' + options: '-DoutputDirectory=$(Pipeline.Workspace)/build/onnxruntime-java' + publishJUnitTestResults: false + javaHomeOption: 'JDKVersion' + jdkVersionOption: '1.17' + mavenVersionOption: 'Default' - download: build artifact: 'onnxruntime-java-gpu' displayName: 'Download Final Jar' @@ -64,6 +75,8 @@ stages: del *.sha256 del *.sha512 del *.pom + del *.sha1 + del *.pom cd .. mkdir tests cd tests diff --git a/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml b/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml index 757b8ac6e9a16..574302bb11fe3 100644 --- a/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml +++ b/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml @@ -49,21 +49,10 @@ stages: clean: true submodules: none - - template: ../../templates/telemetry-steps.yml - - - task: NodeTool@0 - inputs: - versionSpec: '22.x' - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: x64 - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + + - template: ../../templates/setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' # need to set PROCESSOR_ARCHITECTURE so the x86 SDK is installed correctly - task: UseDotNet@2 diff --git a/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml b/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml deleted file mode 100644 index 949d29d27da9d..0000000000000 --- a/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml +++ /dev/null @@ -1,26 +0,0 @@ -stages: -- stage: Download_Java_Tools - dependsOn: [] - jobs: - - job: Download_Java_Tools - pool: - name: 'onnxruntime-Ubuntu2404-AMD-CPU' - os: linux - steps: - - checkout: none - - task: CmdLine@2 - displayName: Download Java Tools - inputs: - script: | - mkdir -p java-tools - pushd java-tools - wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/org/junit/platform/junit-platform-console-standalone/1.6.2/junit-platform-console-standalone-1.6.2.jar -P ./ - wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/com/google/protobuf/protobuf-java/3.25.5/protobuf-java-3.25.5.jar -P ./ - popd - workingDirectory: '$(Agent.TempDirectory)' - - - task: 1ES.PublishPipelineArtifact@1 - displayName: 'Publish Pipeline Java Tools Artifact' - inputs: - targetPath: '$(Agent.TempDirectory)/java-tools' - artifact: 'onnxruntime-java-tools' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml index 63aaf328e1426..a58d74bf80a86 100644 --- a/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml @@ -1,80 +1,31 @@ -parameters: -- name: CudaVersion - type: string -- name: SpecificArtifact - type: string -- name: BuildId - type: string - stages: - stage: Jar_Packaging_GPU dependsOn: - Linux_C_API_Packaging_GPU - Windows_Packaging_CUDA - Windows_Packaging_TensorRT - - Download_Java_Tools jobs: - job: Jar_Packaging_GPU workspace: clean: all + templateContext: + inputs: + - input: pipelineArtifact + artifactName: drop-onnxruntime-java-win-x64-tensorrt + targetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' + + - input: pipelineArtifact + artifactName: drop-onnxruntime-java-linux-x64-tensorrt + targetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-linux-x64' + + outputs: + - output: pipelineArtifact + targetPath: $(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64 + artifactName: onnxruntime-java-gpu pool: 'onnxruntime-Win-CPU-2022' dependsOn: [] condition: succeeded() steps: - - checkout: self - submodules: false - - template: ../templates/set-version-number-variables-step.yml - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - Win x64' - ArtifactName: 'drop-onnxruntime-java-win-x64-tensorrt' - TargetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - stepName: 'Download Pipeline Artifact - Linux x64' - artifactName: 'drop-onnxruntime-java-linux-x64-cuda' - targetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-linux-x64' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: ../templates/flex-downloadPipelineArtifact.yml + - template: ../templates/jar-packaging.yml parameters: - StepName: 'Download Pipeline Artifact - Linux x64' - ArtifactName: 'drop-onnxruntime-java-linux-x64-tensorrt' - targetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-linux-x64-tensorrt' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - task: PowerShell@2 - displayName: 'PowerShell Script' - inputs: - targetType: filePath - filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_gpu_packaging.ps1 - failOnStderr: true - showWarnings: true - workingDirectory: '$(Build.BinariesDirectory)\java-artifact' - - - template: ../templates/jar-esrp-dll.yml - parameters: - JarFileDirectory: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - JarFileName: 'onnxruntime_gpu-$(OnnxRuntimeVersion).jar' - - - template: ../templates/jar-maven-signing-win.yml - parameters: - JarFileDirectory: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - - - task: CopyFiles@2 - displayName: 'Copy Java Files to Artifact Staging Directory' - inputs: - SourceFolder: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - - task: 1ES.PublishPipelineArtifact@1 - displayName: 'Publish Pipeline Artifact' - inputs: - path: '$(Build.ArtifactStagingDirectory)' - artifact: 'onnxruntime-java-gpu' + package_type: gpu \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/stages/nodejs-win-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nodejs-win-packaging-stage.yml index 76eb5f150ad44..3187a7fb759c2 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nodejs-win-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nodejs-win-packaging-stage.yml @@ -71,23 +71,9 @@ stages: clean: true submodules: none - - template: ../templates/telemetry-steps.yml - - - task: NodeTool@0 - inputs: - versionSpec: '22.x' - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: ${{ parameters.BuildArch }} - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - + - template: ../templates/setup-build-tools.yml + parameters: + host_cpu_arch: ${{ parameters.BuildArch }} # need to set PROCESSOR_ARCHITECTURE so the x86 SDK is installed correctly - task: UseDotNet@2 diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml index e33d3dbf9e107..168432283fa51 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml @@ -32,6 +32,19 @@ parameters: - name: BuildId type: string +- name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + +- name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number + stages: - template: nuget-linux-cuda-packaging-stage.yml parameters: @@ -52,6 +65,8 @@ stages: win_trt_home: ${{ parameters.win_trt_home }} win_cuda_home: ${{ parameters.win_cuda_home }} buildJava: ${{ parameters.buildJava }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - template: nuget-cuda-packaging-stage.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml index 4175a339535e4..121e80fca1021 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml @@ -28,6 +28,10 @@ stages: value: ${{ parameters.CudaVersion }} steps: - template: ../templates/set-version-number-variables-step.yml + - task: UsePythonVersion@0 + displayName: Use Python 3.12 + inputs: + versionSpec: 3.12 - template: ../templates/get-docker-image-steps.yml parameters: Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }}/Dockerfile @@ -45,10 +49,8 @@ stages: arch: 'linux-x64' buildConfig: 'Release' artifactName: 'onnxruntime-java-linux-x64-cuda' - version: '$(OnnxRuntimeVersion)' libraryName: 'libonnxruntime.so' nativeLibraryName: 'libonnxruntime4j_jni.so' - is1ES: true - template: ../templates/c-api-artifacts-package-and-publish-steps-posix.yml parameters: @@ -85,6 +87,10 @@ stages: - checkout: self clean: true submodules: recursive + - task: UsePythonVersion@0 + displayName: Use Python 3.12 + inputs: + versionSpec: 3.12 - template: ../templates/get-docker-image-steps.yml parameters: Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }}/Dockerfile @@ -106,10 +112,8 @@ stages: arch: 'linux-x64' buildConfig: 'Release' artifactName: 'onnxruntime-java-linux-x64-tensorrt' - version: '$(OnnxRuntimeVersion)' libraryName: 'libonnxruntime.so' nativeLibraryName: 'libonnxruntime4j_jni.so' - is1ES: true - template: ../templates/c-api-artifacts-package-and-publish-steps-posix.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml index ed6c4c799c26d..30c3b1e48a89a 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml @@ -34,10 +34,25 @@ parameters: - name: buildJava type: boolean +- name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + +- name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number + stages: # Windows CUDA without TensorRT Packaging - template: ../templates/win-ci.yml parameters: + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} ort_build_pool_name: 'onnxruntime-Win2022-GPU-A10' DoEsrp: ${{ parameters.DoEsrp }} stage_name_suffix: CUDA @@ -52,9 +67,12 @@ stages: UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} SpecificArtifact: ${{ parameters.SpecificArtifact }} BuildId: ${{ parameters.BuildId }} + # Windows CUDA with TensorRT Packaging - template: ../templates/win-ci.yml parameters: + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} ort_build_pool_name: 'onnxruntime-Win2022-GPU-A10' DoEsrp: ${{ parameters.DoEsrp }} stage_name_suffix: TensorRT diff --git a/tools/ci_build/github/azure-pipelines/stages/py-cpu-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/py-cpu-packaging-stage.yml index 7567525258753..999ace8d3e345 100644 --- a/tools/ci_build/github/azure-pipelines/stages/py-cpu-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/py-cpu-packaging-stage.yml @@ -90,20 +90,12 @@ stages: matrix: Python310_x64: PythonVersion: '3.10' - MsbuildPlatform: x64 - buildArch: x64 Python311_x64: PythonVersion: '3.11' - MsbuildPlatform: x64 - buildArch: x64 Python312_x64: PythonVersion: '3.12' - MsbuildPlatform: x64 - buildArch: x64 Python313_x64: PythonVersion: '3.13' - MsbuildPlatform: x64 - buildArch: x64 variables: OnnxRuntimeBuildDirectory: '$(Build.BinariesDirectory)' ExtraParam: ${{ parameters.build_py_parameters }} @@ -116,17 +108,10 @@ stages: clean: true submodules: recursive - - template: ../templates/telemetry-steps.yml - - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PythonVersion) - addToPath: true - architecture: $(buildArch) - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + - template: ../templates/setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + python_version: $(PythonVersion) - template: ../templates/set-nightly-build-option-variable-step.yml @@ -208,80 +193,61 @@ stages: - stage: Python_Packaging_MacOS dependsOn: [] jobs: - - job: MacOS_py_Wheels - timeoutInMinutes: 360 - workspace: - clean: all - pool: - name: "Azure Pipelines" - image: "macOS-14" - os: macOS - templateContext: - outputs: - - output: pipelineArtifact - targetPath: $(Build.SourcesDirectory)/build/Release/dist/fixed_wheels - artifactName: onnxruntime-macos-$(PythonVersion) - variables: - MACOSX_DEPLOYMENT_TARGET: '13.4' - strategy: - matrix: - Python310: - PythonVersion: '3.10' - Python311: - PythonVersion: '3.11' - Python312: - PythonVersion: '3.12' - Python313: - PythonVersion: '3.13' - steps: - - checkout: self - clean: true - submodules: recursive - - - task: UsePythonVersion@0 - displayName: 'Use Python' - inputs: - versionSpec: $(PythonVersion) - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - - - template: ../templates/use-xcode-version.yml + - template: ../templates/py-macos.yml + parameters: + arch: 'arm64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.10' + + - template: ../templates/py-macos.yml + parameters: + arch: 'arm64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.11' - - script: | - set -e -x - export _PYTHON_HOST_PLATFORM=macosx-${{variables.MACOSX_DEPLOYMENT_TARGET}}-universal2 - python3 -m pip install -r '$(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/scripts/requirements.txt' - # Note: There is a build error when we set CMAKE_OSX_ARCHITECTURES="arm64;x86_64" and KleidiAI is enabled. - # Disable KleidiAI as a workaround with --no_kleidiai. - # TODO Re-enable KleidiAI once https://github.com/microsoft/onnxruntime/issues/24152 is fixed. - python3 $(Build.SourcesDirectory)/tools/ci_build/build.py \ - --build_dir $(Build.SourcesDirectory)/build \ - --use_vcpkg --use_vcpkg_ms_internal_asset_cache \ - --use_binskim_compliant_compile_flags \ - --config Release \ - --build_wheel \ - --use_coreml \ - --no_kleidiai \ - ${{ parameters.build_py_parameters }} \ - --cmake_extra_defines CMAKE_OSX_ARCHITECTURES="arm64;x86_64" \ - --update --skip_submodule_sync --build --parallel - displayName: 'Command Line Script' + - template: ../templates/py-macos.yml + parameters: + arch: 'arm64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.12' + + - template: ../templates/py-macos.yml + parameters: + arch: 'arm64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.13' - - script: | - set -ex - python -m pip install --upgrade delocate - cd '$(Build.SourcesDirectory)/build/Release/dist' - ls - for file in *.whl - do - delocate-listdeps "$file" - delocate-wheel --require-archs=x86_64,arm64 -w fixed_wheels -v "$file" - done - displayName: 'delocate wheel' + - template: ../templates/py-macos.yml + parameters: + arch: 'x86_64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.10' + + - template: ../templates/py-macos.yml + parameters: + arch: 'x86_64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.11' + - template: ../templates/py-macos.yml + parameters: + arch: 'x86_64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.12' + + - template: ../templates/py-macos.yml + parameters: + arch: 'x86_64' + extra_build_arg: ${{ parameters.build_py_parameters }} + cmake_build_type: ${{ parameters.cmake_build_type }} + python_version: '3.13' - ${{ if eq(parameters.enable_linux_arm, true) }}: - stage: Python_Packaging_Linux_ARM diff --git a/tools/ci_build/github/azure-pipelines/stages/py-win-gpu-stage.yml b/tools/ci_build/github/azure-pipelines/stages/py-win-gpu-stage.yml index 9c063f561eefc..e2683c04f21f2 100644 --- a/tools/ci_build/github/azure-pipelines/stages/py-win-gpu-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/py-win-gpu-stage.yml @@ -3,6 +3,7 @@ parameters: type: string default: 'onnxruntime-Win2022-GPU-A10' +# Package name suffix - name: EP_NAME type: string @@ -92,18 +93,10 @@ stages: clean: true submodules: none - - template: ../templates/telemetry-steps.yml - - - task: UsePythonVersion@0 - inputs: - versionSpec: ${{ parameters.PYTHON_VERSION }} - addToPath: true - architecture: 'x64' - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + - template: ../templates/setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + python_version: ${{ parameters.PYTHON_VERSION }} - template: ../templates/jobs/download_win_gpu_library.yml parameters: @@ -220,9 +213,10 @@ stages: TMPDIR: "$(Agent.TempDirectory)" - powershell: | - - python -m pip uninstall -y onnxruntime onnxruntime-gpu -qq - Get-ChildItem -Path $(Build.ArtifactStagingDirectory)/*cp${{ replace(parameters.PYTHON_VERSION,'.','') }}*.whl | foreach {pip --disable-pip-version-check install --upgrade $_.fullname tabulate} + $ErrorActionPreference = "Stop" + python -m pip uninstall -y onnxruntime onnxruntime-${{ parameters.EP_NAME }} -qq + dir $(Build.ArtifactStagingDirectory) + python -m pip --disable-pip-version-check install --no-index --find-links $(Build.ArtifactStagingDirectory) onnxruntime-${{ parameters.EP_NAME }} mkdir -p $(Agent.TempDirectory)\ort_test_data Copy-Item -Path $(Build.sourcesDirectory)/onnxruntime/test/python/onnx_backend_test_series.py -Destination $(Agent.TempDirectory)\ort_test_data Copy-Item -Recurse -Path $(Build.sourcesDirectory)/onnxruntime/test/testdata -Destination $(Agent.TempDirectory)\ort_test_data diff --git a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml index c1720a2cac257..1f402160dc4d5 100644 --- a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml +++ b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml @@ -74,10 +74,11 @@ jobs: ${{ if contains(parameters.pool_name, 'mac')}}: os: macOS - - variables: - artifacts_directory: $(Build.BinariesDirectory)/.artifacts - + templateContext: + outputs: + - output: pipelineArtifact + targetPath: $(Build.BinariesDirectory)/.artifacts + artifactName: ${{parameters.artifactName}} steps: - checkout: self clean: true @@ -88,7 +89,7 @@ jobs: inputs: script: | # Create a folder for artifacts - mkdir -p $(artifacts_directory) + mkdir -p $(Build.BinariesDirectory)/.artifacts workingDirectory: $(Build.BinariesDirectory) - template: get-docker-image-steps.yml @@ -131,7 +132,7 @@ jobs: --volume $(Build.BinariesDirectory):/build \ --volume $ANDROID_HOME:/android_home \ --volume $NDK_HOME:/ndk_home \ - --volume $(artifacts_directory):/home/onnxruntimedev/.artifacts \ + --volume $(Build.BinariesDirectory)/.artifacts:/home/onnxruntimedev/.artifacts \ --volume $(Build.BinariesDirectory)/.build_settings:/home/onnxruntimedev/.build_settings \ $QNN_VOLUME \ -e NIGHTLY_BUILD \ @@ -145,18 +146,6 @@ jobs: /bin/bash /onnxruntime_src/tools/ci_build/github/android/build_aar_and_copy_artifacts.sh $USE_QNN workingDirectory: $(Build.SourcesDirectory) - - - ${{ if eq(parameters['enable_code_sign'], 'true') }}: - - template: jar-maven-signing-linux.yml - parameters: - JarFileDirectory: '$(artifacts_directory)' - - ${{ if eq(parameters.is1ES, false) }}: - - task: PublishPipelineArtifact@1 - inputs: - targetPath: '$(artifacts_directory)' - artifactName: '${{parameters.artifactName}}' - - ${{ if eq(parameters.is1ES, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: '$(artifacts_directory)' - artifactName: '${{parameters.artifactName}}' + - template: jar-maven-signing-linux.yml + parameters: + JarFileDirectory: $(Build.BinariesDirectory)/.artifacts \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml b/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml index 40511ee871163..9509a40cda4e9 100644 --- a/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml @@ -9,6 +9,19 @@ parameters: type: boolean default: false +- name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + +- name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number + - name: AdditionalBuildFlags displayName: Additional build flags for build.py type: string @@ -28,16 +41,6 @@ parameters: type: string default: 'default' -- name: SpecificArtifact - displayName: Use Specific Artifact - type: boolean - default: false - -- name: BuildId - displayName: Specific Artifact's BuildId - type: string - default: '0' - # Do not update this to a version that does not exist for the qnn-runtime Maven package: # https://mvnrepository.com/artifact/com.qualcomm.qti/qnn-runtime - name: QnnSDKVersion @@ -58,9 +61,6 @@ stages: - template: mac-cpu-packaging-pipeline.yml parameters: AllowReleasedOpsetOnly: 1 - BuildForAllArchs: true - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} DoEsrp: true - stage: Android_Java_API_AAR_Packaging_Full @@ -108,14 +108,26 @@ stages: clean: all pool: name: 'Azure Pipelines' - image: 'macOS-14' + image: 'macOS-15' os: 'macOS' timeoutInMinutes: 300 steps: - template: set-version-number-variables-step.yml + - task: JavaToolInstaller@0 + inputs: + versionSpec: "17" + jdkArchitectureOption: "x64" + jdkSourceOption: 'PreInstalled' + - template: use-xcode-version.yml + parameters: + xcodeVersion: 16.4 + + - template: setup-build-tools.yml + parameters: + host_cpu_arch: arm64 - script: | set -e -x @@ -155,6 +167,8 @@ stages: runTests: false buildJava: false buildNodejs: false + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - template: win-ci.yml parameters: @@ -167,13 +181,14 @@ stages: runTests: ${{ parameters.RunOnnxRuntimeTests }} buildJava: true buildNodejs: false + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - stage: Jar_Packaging dependsOn: - Linux_C_API_Packaging_CPU - - MacOS_C_API_Package_Publish + - MacOS_C_API_Packaging_CPU - Windows_Packaging_CPU_x64_${{ parameters.BuildVariant }} - - Download_Java_Tools condition: succeeded() jobs: - job: Jar_Packaging @@ -204,38 +219,13 @@ stages: targetPath: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-osx-arm64' outputs: - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) + targetPath: $(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64 artifactName: onnxruntime-java steps: - - checkout: self - submodules: false - - template: set-version-number-variables-step.yml - - - task: PowerShell@2 - displayName: 'PowerShell Script' - inputs: - targetType: filePath - filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_packaging.ps1 - failOnStderr: true - showWarnings: true - workingDirectory: '$(Build.BinariesDirectory)\java-artifact' - - - template: jar-esrp-dll.yml + - template: jar-packaging.yml parameters: - JarFileDirectory: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - JarFileName: 'onnxruntime-$(OnnxRuntimeVersion).jar' - - - template: jar-maven-signing-win.yml - parameters: - JarFileDirectory: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - - - task: CopyFiles@2 - displayName: 'Copy Java Files to Artifact Staging Directory' - inputs: - SourceFolder: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - + package_type: cpu - stage: NuGet_Packaging_CPU dependsOn: @@ -262,6 +252,28 @@ stages: binskim: enabled: true scanOutputDirectoryOnly: true + inputs: + - input: pipelineArtifact + artifactName: onnxruntime-win-x64 + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-win-arm64 + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-osx + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-linux-x64 + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-linux-aarch64 + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-ios-full-xcframework + targetPath: $(Build.BinariesDirectory)/nuget-artifact + - input: pipelineArtifact + artifactName: onnxruntime-android-full-aar + targetPath: $(Build.BinariesDirectory)/nuget-artifact outputs: - output: pipelineArtifact targetPath: $(Build.ArtifactStagingDirectory) @@ -277,62 +289,6 @@ stages: - checkout: self submodules: true - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - Win x64' - ArtifactName: 'onnxruntime-win-x64' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download win-arm64 Pipeline Artifact' - ArtifactName: 'onnxruntime-win-arm64' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download osx-x64 Pipeline Artifact' - ArtifactName: 'onnxruntime-osx' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download linux-x64 Pipeline Artifact' - ArtifactName: 'onnxruntime-linux-x64' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download linux-aarch64 Pipeline Artifact' - ArtifactName: 'onnxruntime-linux-aarch64' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download iOS Pipeline Artifact' - ArtifactName: 'onnxruntime-ios-full-xcframework' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Android-full-aar Pipeline Artifact' - ArtifactName: 'onnxruntime-android-full-aar' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - script: | dir workingDirectory: '$(Build.BinariesDirectory)/nuget-artifact' @@ -443,7 +399,7 @@ stages: - Windows_Nodejs_Packaging_arm64 - Linux_Nodejs_Packaging_x64 - Linux_C_API_Packaging_CPU - - MacOS_C_API_Package_Publish + - MacOS_C_API_Packaging_CPU condition: succeeded() jobs: - job: Nodejs_Packaging diff --git a/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml b/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml index aa1e38f8b0159..f1599b6843fb5 100644 --- a/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml @@ -45,6 +45,14 @@ jobs: - checkout: self clean: true submodules: none + + - task: UsePythonVersion@0 + displayName: Use Python 3.12 + inputs: + versionSpec: 3.12 + ${{ if eq(parameters.OnnxruntimeArch, 'aarch64') }}: + architecture: arm64 + - template: set-version-number-variables-step.yml - ${{ if eq(parameters.OnnxruntimeArch, 'x64') }}: - template: get-docker-image-steps.yml @@ -82,10 +90,8 @@ jobs: arch: 'linux-${{parameters.OnnxruntimeArch}}' buildConfig: 'Release' artifactName: 'onnxruntime-java-linux-${{parameters.OnnxruntimeArch}}' - version: '$(OnnxRuntimeVersion)' libraryName: 'libonnxruntime.so' nativeLibraryName: 'libonnxruntime4j_jni.so' - is1ES: true - template: c-api-artifacts-package-and-publish-steps-posix.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/templates/download_maven_for_tests.yml b/tools/ci_build/github/azure-pipelines/templates/download_maven_for_tests.yml index e53544458d494..7d4cc9550ce54 100644 --- a/tools/ci_build/github/azure-pipelines/templates/download_maven_for_tests.yml +++ b/tools/ci_build/github/azure-pipelines/templates/download_maven_for_tests.yml @@ -16,13 +16,3 @@ steps: echo "Maven is now on the PATH." mvn --version -- task: Maven@4 - displayName: 'Download Java Dependencies' - inputs: - mavenPomFile: '$(Build.SourcesDirectory)/tools/ci_build/java/pom.xml' - goals: 'dependency:copy-dependencies' - options: '-DoutputDirectory=$(Pipeline.Workspace)/build/onnxruntime-java' - publishJUnitTestResults: false - javaHomeOption: 'JDKVersion' - jdkVersionOption: '1.17' - mavenVersionOption: 'Default' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-linux.yml b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-linux.yml index 9d1cb58aee8bb..5a25232a90c39 100644 --- a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-linux.yml +++ b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-linux.yml @@ -73,6 +73,8 @@ stages: rm -f *.asc rm -f *.sha256 rm -f *.sha512 + rm -f *.sha1 + rm -f *.md5 rm -f *.pom ls cd .. diff --git a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-win.yml b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-win.yml index af7fa176d2ac0..de07e9e89dc81 100644 --- a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-win.yml +++ b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing-win.yml @@ -62,6 +62,8 @@ stages: del *.asc del *.sha256 del *.sha512 + del *.md5 + del *.sha1 del *.pom cd .. mkdir tests diff --git a/tools/ci_build/github/azure-pipelines/templates/jar-esrp-dll.yml b/tools/ci_build/github/azure-pipelines/templates/jar-esrp-dll.yml index b59ba551c222f..dd0e0898ecc3b 100644 --- a/tools/ci_build/github/azure-pipelines/templates/jar-esrp-dll.yml +++ b/tools/ci_build/github/azure-pipelines/templates/jar-esrp-dll.yml @@ -3,28 +3,25 @@ parameters: type: string default: '' -- name: JarFileName - type: string - default: '' - steps: - - task: PowerShell@2 - displayName: 'ESRP Jar - Extract Jar File' - inputs: - targetType: filePath - filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_esrp_dll.ps1 - arguments: extract '${{ parameters.JarFileDirectory }}' '${{ parameters.JarFileName }}' - workingDirectory: '$(Build.BinariesDirectory)' +- task: PowerShell@2 + displayName: 'ESRP Jar - Extract Jar File' + inputs: + targetType: filePath + filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_esrp_dll.ps1 + arguments: extract '${{ parameters.JarFileDirectory }}' + workingDirectory: '$(Build.BinariesDirectory)' - - template: win-esrp-dll.yml - parameters: - FolderPath: '${{ parameters.JarFileDirectory }}\jar_extracted_full_files' - DisplayName: 'ESRP Jar - Sign Dlls' +- template: win-esrp-dll.yml + parameters: + FolderPath: '${{ parameters.JarFileDirectory }}\jar_extracted_full_files' + DisplayName: 'ESRP Jar - Sign Dlls' + DoEsrp: true # Assuming ESRP should always run when this template is called - - task: PowerShell@2 - displayName: 'ESRP Jar - Repack Jar File' - inputs: - targetType: filePath - filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_esrp_dll.ps1 - arguments: repack '${{ parameters.JarFileDirectory }}' '${{ parameters.JarFileName }}' - workingDirectory: '$(Build.BinariesDirectory)' +- task: PowerShell@2 + displayName: 'ESRP Jar - Repack Jar File' + inputs: + targetType: filePath + filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_esrp_dll.ps1 + arguments: repack '${{ parameters.JarFileDirectory }}' + workingDirectory: '$(Build.BinariesDirectory)' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-linux.yml b/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-linux.yml index df2aff0634819..98a52b08f32f2 100644 --- a/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-linux.yml +++ b/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-linux.yml @@ -4,54 +4,25 @@ parameters: steps: - task: AzureKeyVault@2 - displayName: 'Get GnuPG signing keys' + displayName: "Get GnuPG signing keys" inputs: #The value below is the name of an ADO service connection. - azureSubscription: 'AIInfraBuildOnnxRuntimeOSS' - KeyVaultName: 'ort-release' - SecretsFilter: 'java-pgp-pwd,java-pgp-key' + azureSubscription: "AIInfraBuildOnnxRuntimeOSS" + KeyVaultName: "ort-release" + SecretsFilter: "java-pgp-pwd,java-pgp-key" RunAsPreJob: false - - task: CmdLine@2 - displayName: 'Sign jar files: GnuPG and sha256' + - task: UsePythonVersion@0 + displayName: "Use Python 3.12" inputs: - workingDirectory: '$(Build.SourcesDirectory)' - script: | - #!/bin/bash - set -e + versionSpec: "3.12" - jar_file_directory='${{ parameters.JarFileDirectory }}' - working_directory='$(Build.SourcesDirectory)' - original_private_key='$(java-pgp-key)' - original_passphrase='$(java-pgp-pwd)' - - private_key_file=$working_directory/private_key.txt - passphrase_file=$working_directory/passphrase.txt - - echo "Generating GnuPG key files." - printf "%s" "$original_private_key" >$private_key_file - printf "%s" "$original_passphrase" >$passphrase_file - echo "Generated GnuPG key files." - - echo "Importing GnuPG private key file." - gpg --batch --import $private_key_file - echo "Imported GnuPG private key file." - - for file in $(find $jar_file_directory -type f); do - echo "GnuPG signing to file: $file" - gpg --pinentry-mode loopback --passphrase-file $passphrase_file -ab $file - echo "GnuPG signed to file: $file" - done - - for file in $(find $jar_file_directory -type f); do - echo "Adding checksum of sha256 to file: $file" - sha256_value=$(sha256sum $file | awk '{print $1}') - echo $sha256_value" *"$(basename "$file") >$file.sha256 - echo "Added checksum of sha256 to file: $file" - done - - echo "GnuPG and sha256 signing to files completed." - echo "Deleting GnuPG key files." - rm -f $private_key_file - rm -f $passphrase_file - echo "Deleted GnuPG key files." + - task: PythonScript@0 + displayName: "Sign files: GnuPG, sha1, and md5" + env: + JAVA_PGP_PWD: $(java-pgp-pwd) + JAVA_PGP_KEY: $(java-pgp-key) + inputs: + scriptPath: "$(Build.SourcesDirectory)/tools/ci_build/github/windows/sign_java_artifacts.py" + arguments: "${{ parameters.JarFileDirectory }}" + workingDirectory: "$(Build.SourcesDirectory)" \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-win.yml b/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-win.yml deleted file mode 100644 index ef845dc3bf243..0000000000000 --- a/tools/ci_build/github/azure-pipelines/templates/jar-maven-signing-win.yml +++ /dev/null @@ -1,78 +0,0 @@ -parameters: - - name: JarFileDirectory - type: string - -steps: - - task: AzureKeyVault@2 - displayName: 'Get GnuPG signing keys' - inputs: - azureSubscription: 'AIInfraBuildOnnxRuntimeOSS' - KeyVaultName: 'ort-release' - SecretsFilter: 'java-pgp-pwd,java-pgp-key' - RunAsPreJob: false - - - task: PowerShell@2 - displayName: 'Sign jar files: GnuPG and sha256' - inputs: - targetType: 'inline' - pwsh: true - workingDirectory: '$(Build.SourcesDirectory)' - script: | - $jar_file_directory = '${{ parameters.JarFileDirectory }}' - $working_directory = '$(Build.SourcesDirectory)' - - $original_passphrase='$(java-pgp-pwd)' - $original_private_key='$(java-pgp-key)' - - $gpg_exe_path = "C:\Program Files (x86)\gnupg\bin\gpg.exe" - - $passphrase_file = Join-Path -Path $working_directory -ChildPath "passphrase.txt" - $private_key_file = Join-Path -Path $working_directory -ChildPath "private_key.txt" - - Write-Host "Generating GnuPG key files." - Out-File -FilePath $passphrase_file -InputObject $original_passphrase -NoNewline -Encoding ascii - Out-File -FilePath $private_key_file -InputObject $original_private_key -NoNewline -Encoding ascii - Write-Host "Generated GnuPG key files." - - Write-Host "Importing GnuPG private key file." - & $gpg_exe_path --batch --import $private_key_file - if ($lastExitCode -ne 0) { - Write-Host -Object "GnuPG importing private key command failed. Exitcode: $exitCode" - exit $lastExitCode - } - Write-Host "Imported GnuPG private key file." - - $targeting_original_files = Get-ChildItem $jar_file_directory -Recurse -Force -File -Name - foreach ($file in $targeting_original_files) { - $file_path = Join-Path $jar_file_directory -ChildPath $file - Write-Host "GnuPG signing to file: "$file_path - & $gpg_exe_path --pinentry-mode loopback --passphrase-file $passphrase_file -ab $file_path - if ($lastExitCode -ne 0) { - Write-Host -Object "GnuPG signing file command failed. Exitcode: $exitCode" - exit $lastExitCode - } - Write-Host "GnuPG signed to file: "$file_path - } - - $PSDefaultParameterValues['Out-File:Encoding'] = 'utf8NoBOM' - $sha256sum_exe_path = "C:\Program Files\Git\usr\bin\sha256sum.exe" - $targeting_asc_files = Get-ChildItem $jar_file_directory -Recurse -Force -File -Name - $original_location = Get-Location - Set-Location $jar_file_directory - foreach ($file in $targeting_asc_files) { - Write-Host "Adding checksum of sha256 to file: "$file - $file_path_sha256 = $file + ".sha256" - & $sha256sum_exe_path $file 1>$file_path_sha256 - if ($lastExitCode -ne 0) { - Write-Host -Object "sha256sum command failed. Exitcode: $exitCode" - exit $lastExitCode - } - Write-Host "Added checksum of sha256 to file: "$file - } - Set-Location $original_location - - Write-Host "GnuPG and sha256 signing to files completed." - Write-Host "Deleting GnuPG key files." - Remove-Item -Path $passphrase_file - Remove-Item -Path $private_key_file - Write-Host "Deleted GnuPG key files." diff --git a/tools/ci_build/github/azure-pipelines/templates/jar-packaging.yml b/tools/ci_build/github/azure-pipelines/templates/jar-packaging.yml new file mode 100644 index 0000000000000..098d7e3162d1f --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/templates/jar-packaging.yml @@ -0,0 +1,61 @@ +# This template packages the Java artifacts for either CPU or GPU. +# It calls the PowerShell script with the correct package type and ensures +# that the correct final JAR file is signed and published. +# Currently this file only runs on Windows x64. + +parameters: + - name: package_type + type: string + default: 'cpu' + values: + - 'cpu' + - 'gpu' + +steps: +- checkout: self + submodules: false + +- task: UsePythonVersion@0 + inputs: + versionSpec: '3.13' + addToPath: true + +- task: PipAuthenticate@1 + displayName: 'Pip Authenticate' + inputs: + artifactFeeds: 'Lotus' + +- template: set-version-number-variables-step.yml + +- script: python -m pip install -r $(Build.SourcesDirectory)\tools\ci_build\github\windows\python\requirements.txt + +- task: PythonScript@0 + displayName: 'Package Java Artifacts' + inputs: + scriptPath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\jar_packaging.py + arguments: '--package_type ${{ parameters.package_type }} --build_dir $(Build.BinariesDirectory)' + workingDirectory: '$(Build.BinariesDirectory)\java-artifact' + +- script: dir $(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64 + +- template: jar-esrp-dll.yml + parameters: + JarFileDirectory: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' + +- task: AzureKeyVault@2 + displayName: 'Get GnuPG signing keys' + inputs: + azureSubscription: 'AIInfraBuildOnnxRuntimeOSS' + KeyVaultName: 'ort-release' + SecretsFilter: 'java-pgp-pwd,java-pgp-key' + RunAsPreJob: false + +- task: PythonScript@0 + displayName: 'Sign files: GnuPG, sha1, and md5' + env: + JAVA_PGP_PWD: $(java-pgp-pwd) + JAVA_PGP_KEY: $(java-pgp-key) + inputs: + scriptPath: '$(Build.SourcesDirectory)/tools/ci_build/github/windows/sign_java_artifacts.py' + arguments: '$(Build.BinariesDirectory)\java-artifact\onnxruntime-java-win-x64' + workingDirectory: '$(Build.SourcesDirectory)' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/java-api-artifacts-package-and-publish-steps-posix.yml b/tools/ci_build/github/azure-pipelines/templates/java-api-artifacts-package-and-publish-steps-posix.yml index 1c4b0ae5f4137..166b03f6b55e1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/java-api-artifacts-package-and-publish-steps-posix.yml +++ b/tools/ci_build/github/azure-pipelines/templates/java-api-artifacts-package-and-publish-steps-posix.yml @@ -1,28 +1,50 @@ # sets up common build tools for the windows build machines before build parameters: - arch: 'linux-x64' - buildConfig: 'RelWithDebInfo' - artifactName: 'onnxruntime-java-linux-x64' - libraryName: 'libonnxruntime.so' - nativeLibraryName: 'libonnxruntime4j_jni.so' - version: '' - is1ES: false +- name: buildConfig + displayName: Build Configuration + type: string + values: + - 'Release' + - 'Debug' + - 'RelWithDebInfo' + +- name: artifactName + displayName: Artifact Name + type: string + #default: 'onnxruntime-java' + +- name: libraryName + displayName: Main Library Name + type: string + #default: 'libonnxruntime.so' + +- name: nativeLibraryName + displayName: JNI Library Name + type: string + #default: 'libonnxruntime4j_jni.so' + +- name: arch + displayName: Architecture + type: string + #default: 'linux-x64' + steps: -- task: ShellScript@2 - displayName: 'Copy build artifacts for zipping' +- task: PythonScript@0 + inputs: + scriptSource: 'filePath' + scriptPath: 'tools/ci_build/linux_java_copy_strip_binary.py' + arguments: >- + --binary-dir $(Build.BinariesDirectory) + --build-config ${{parameters.buildConfig}} + --artifact-name ${{parameters.artifactName}} + --lib-name ${{parameters.libraryName}} + --native-lib-name ${{parameters.nativeLibraryName}} + --arch ${{parameters.arch}} + displayName: 'Package ONNX Runtime Java Native Libs' + +- task: 1ES.PublishPipelineArtifact@1 inputs: - scriptPath: 'tools/ci_build/github/linux/java_copy_strip_binary.sh' - args: '-r $(Build.BinariesDirectory) -c ${{parameters.buildConfig}} -a ${{parameters.artifactName}} -l ${{parameters.libraryName}} -n ${{parameters.nativeLibraryName}} -v ${{parameters.version}} -h ${{parameters.arch}}' - workingDirectory: '$(Build.BinariesDirectory)/${{parameters.buildConfig}}' + targetPath: '$(Build.BinariesDirectory)/${{parameters.artifactName}}' + artifactName: 'drop-${{parameters.artifactName}}' -- ${{ if eq(parameters.is1ES, true) }}: - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: '$(Build.BinariesDirectory)/${{parameters.artifactName}}' - artifactName: 'drop-${{parameters.artifactName}}' -- ${{ if eq(parameters.is1ES, false) }}: - - task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.BinariesDirectory)/${{parameters.artifactName}}' - artifactName: 'drop-${{parameters.artifactName}}' diff --git a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-pipeline.yml index 7547b841c7480..56cc84a90dc68 100644 --- a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-pipeline.yml @@ -1,3 +1,5 @@ +# This stage fetch built macOS binaries from other stages, sign the binaries, then repack them + parameters: - name: AdditionalBuildFlags displayName: Additional build flags for build.py @@ -13,31 +15,11 @@ parameters: - 1 - 0 -- name: BuildForAllArchs - displayName: Build for all CPU ARCHs - type: boolean - -- name: WithCache - displayName: Build with Cache - type: boolean - default: false - - name: DoESRP displayName: Do ESRP type: boolean default: false -# these 2 parameters are used for debugging. -- name: SpecificArtifact - displayName: Use Specific Artifact (Debugging only) - type: boolean - default: false - -- name: BuildId - displayName: Pipeline BuildId, you could find it in the URL - type: string - default: '0' - stages: - stage: MacOS_C_API_Packaging_CPU dependsOn: [] @@ -47,21 +29,12 @@ stages: MacosArch: 'x86_64' AllowReleasedOpsetOnly: ${{ parameters.AllowReleasedOpsetOnly }} AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} - WithCache: ${{ parameters.WithCache }} - - ${{ if eq(parameters.BuildForAllArchs, true) }}: - - template: mac-cpu-packing-jobs.yml - parameters: - MacosArch: 'arm64' - AllowReleasedOpsetOnly: ${{ parameters.AllowReleasedOpsetOnly }} - AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} - WithCache: ${{ parameters.WithCache }} - - template: mac-cpu-packing-jobs.yml - parameters: - MacosArch: 'universal2' - AllowReleasedOpsetOnly: ${{ parameters.AllowReleasedOpsetOnly }} - AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} - WithCache: ${{ parameters.WithCache }} + - template: mac-cpu-packing-jobs.yml + parameters: + MacosArch: 'arm64' + AllowReleasedOpsetOnly: ${{ parameters.AllowReleasedOpsetOnly }} + AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} - stage: MacOS_C_API_Package_Publish dependsOn: MacOS_C_API_Packaging_CPU @@ -71,68 +44,56 @@ stages: name: 'Azure Pipelines' image: 'macOS-14' os: 'macOS' + templateContext: + inputs: + - input: pipelineArtifact + artifactName: onnxruntime-osx-x86_64 # The files in this artifact are not signed + targetPath: $(Build.ArtifactStagingDirectory) + - input: pipelineArtifact + artifactName: onnxruntime-osx-arm64 # The files in this artifact are not signed + targetPath: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.ArtifactStagingDirectory) + artifactName: 'onnxruntime-osx' # The files in this artifact are signed steps: - - checkout: none - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline onnxruntime-osx-x86_64' - ArtifactName: 'onnxruntime-osx-x86_64' - TargetPath: '$(Build.ArtifactStagingDirectory)' - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} + - checkout: self - - ${{ if eq(parameters.BuildForAllArchs, true) }}: - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline onnxruntime-osx-arm64' - ArtifactName: 'onnxruntime-osx-arm64' - TargetPath: '$(Build.ArtifactStagingDirectory)' - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - - template: flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline onnxruntime-osx-universal2' - ArtifactName: 'onnxruntime-osx-universal2' - TargetPath: '$(Build.ArtifactStagingDirectory)' - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.13' + addToPath: true - - ${{ if eq(parameters.DoESRP, true)}}: - - script: | - pushd '$(Build.ArtifactStagingDirectory)' - find . '*.tgz' -exec tar -zxvf {} \; - rm -f *.tgz; - find . -type d -name 'onnxruntime-osx-*' -exec zip -FSr --symlinks {}.zip {} \; - find . -type d -name 'onnxruntime-osx-*' -exec rm -rf {} \; - ls -l - popd - displayName: tgz to zip + - task: PythonScript@0 + displayName: 'Prepare, Create Universal Binary, and Zip with Python' + inputs: + scriptSource: 'filePath' + scriptPath: 'tools/ci_build/prepare_macos_package.py' + arguments: '--staging_dir $(Build.ArtifactStagingDirectory)' - - template: mac-esrp-dylib.yml - parameters: - FolderPath: '$(Build.ArtifactStagingDirectory)' - Pattern: '*.zip' + - template: mac-esrp-dylib.yml + parameters: + FolderPath: '$(Build.ArtifactStagingDirectory)' + Pattern: '*.zip' - - script: | - pushd '$(Build.ArtifactStagingDirectory)' - find . '*.zip' -exec unzip {} \; - rm -f *.zip; - find . -type d -name 'onnxruntime-osx-*' -exec tar -czf {}.tgz {} \; - find . -type d -name 'onnxruntime-osx-*' -exec rm -rf {} \; - ls -l - popd - displayName: zip to tgz - - bash: | - set -ex - mkdir -p $(Agent.TempDirectory)/macpackage - find $(Build.ArtifactStagingDirectory) -name "*.tgz" -exec tar -zxvf {} -C $(Agent.TempDirectory)/macpackage \; - find $(Agent.TempDirectory)/macpackage -name "*.dylib" -exec codesign -dvvv {} \; - find $(Agent.TempDirectory)/macpackage -name "*.dylib" -exec ls -l {} \; - rm -rf $(Agent.TempDirectory)/macpackage - displayName: 'Verify code signing' + - script: | + set -ex + mkdir temp + cd temp + find $(Build.ArtifactStagingDirectory) -name '*.zip' -exec unzip {} \; + rm -rf $(Build.ArtifactStagingDirectory)/*; + find . -type d -name 'onnxruntime-osx-*' -exec tar -czf {}.tgz {} \; + ls -l + mv *.tgz $(Build.ArtifactStagingDirectory) + displayName: 'Unzip Signed Files and Repackage to TGZ' + workingDirectory: $(Agent.TempDirectory) - - task: 1ES.PublishPipelineArtifact@1 - inputs: - targetPath: '$(Build.ArtifactStagingDirectory)' - artifactName: 'onnxruntime-osx' - condition: 'succeededOrFailed()' + - bash: | + set -ex + mkdir -p macpackage + find $(Build.ArtifactStagingDirectory) -name "*.tgz" -exec tar -zxvf {} -C macpackage \; + find macpackage -name "*.dylib" -exec codesign -dvvv {} \; + find macpackage -name "*.dylib" -exec ls -l {} \; + rm -rf macpackage + displayName: 'Verify Code Signing' + workingDirectory: $(Agent.TempDirectory) diff --git a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-steps.yml b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-steps.yml index 9a8264a288582..c43bfe2886f22 100644 --- a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packaging-steps.yml @@ -4,56 +4,22 @@ parameters: values: - 'x86_64' - 'arm64' - - 'universal2' - default: 'x86_64' - name: AdditionalBuildFlags displayName: Additional build flags for build.py type: string default: '' -- name: BuildJava - displayName: Build with Java - type: boolean - default: true -- name: BuildNodejs - displayName: Build with Nodejs - type: boolean - default: false - -- name: WithCache - displayName: Build with Cache - type: boolean - default: false - -- name: CacheDir - displayName: Cache Directory - type: string - default: '' - -- name: Today - type: string - default: "" steps: -- template: mac-build-step-with-cache.yml - parameters: - WithCache: ${{ parameters.WithCache }} - Today: ${{ parameters.Today }} - AdditionalKey: onnxruntime_${{ parameters.MacosArch }} - CacheDir: ${{ parameters.CacheDir }} - ChangeEveryCommit: true - BuildStep: - - script: | - set -e -x - rm -rf $(Build.BinariesDirectory)/Release - python3 $(Build.SourcesDirectory)/tools/ci_build/build.py --update --build ${{ parameters.AdditionalBuildFlags }} --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --parallel --use_vcpkg --use_vcpkg_ms_internal_asset_cache --use_binskim_compliant_compile_flags --build_shared_lib --config Release --use_vcpkg --use_vcpkg_ms_internal_asset_cache - cd $(Build.BinariesDirectory)/Release - make install DESTDIR=$(Build.BinariesDirectory)/installed - displayName: 'Build ${{ parameters.MacosArch }}' - env: - CCACHE_DIR: ${{ parameters.CacheDir }} +- script: | + set -e -x + rm -rf $(Build.BinariesDirectory)/Release + python3 $(Build.SourcesDirectory)/tools/ci_build/build.py --update --build ${{ parameters.AdditionalBuildFlags }} --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --parallel 3 --use_vcpkg --use_vcpkg_ms_internal_asset_cache --use_binskim_compliant_compile_flags --build_shared_lib --config Release --use_vcpkg --use_vcpkg_ms_internal_asset_cache + cd $(Build.BinariesDirectory)/Release + make install DESTDIR=$(Build.BinariesDirectory)/installed + displayName: 'Build ${{ parameters.MacosArch }}' - ${{ if eq(parameters.MacosArch, 'x86_64') }}: - script: | @@ -77,9 +43,9 @@ steps: replaceExistingArchive: true - script: | - set -e -x - mkdir -p $(Build.ArtifactStagingDirectory)/testdata - cp $(Build.BinariesDirectory)/Release/libcustom_op_library.dylib $(Build.ArtifactStagingDirectory)/testdata + set -e -x + mkdir -p $(Build.ArtifactStagingDirectory)/testdata + cp $(Build.BinariesDirectory)/Release/libcustom_op_library.dylib $(Build.ArtifactStagingDirectory)/testdata displayName: 'Copy libcustom_op_library.dylib to ArtifactStagingDirectory' condition: and(succeeded(), eq('${{ parameters.MacosArch }}', 'x86_64')) @@ -88,23 +54,19 @@ steps: targetPath: '$(Build.ArtifactStagingDirectory)' artifactName: 'onnxruntime-osx-${{ parameters.MacosArch }}' -- ${{ if eq(parameters.BuildJava, true) }}: - - template: java-api-artifacts-package-and-publish-steps-posix.yml - parameters: - arch: 'osx-${{ parameters.MacosArch }}' - buildConfig: 'Release' - artifactName: 'onnxruntime-java-osx-${{ parameters.MacosArch }}' - version: '$(OnnxRuntimeVersion)' - libraryName: 'libonnxruntime.dylib' - nativeLibraryName: 'libonnxruntime4j_jni.dylib' - is1ES: true +- template: java-api-artifacts-package-and-publish-steps-posix.yml + parameters: + arch: 'osx-${{ parameters.MacosArch }}' + buildConfig: 'Release' + artifactName: 'onnxruntime-java-osx-${{ parameters.MacosArch }}' + libraryName: 'libonnxruntime.dylib' + nativeLibraryName: 'libonnxruntime4j_jni.dylib' -- ${{ if eq(parameters.BuildNodejs, true) }}: - - template: nodejs-artifacts-package-and-publish-steps-posix.yml - parameters: - ${{ if eq(parameters.MacosArch, 'x86_64') }}: - arch: x64 - ${{ if eq(parameters.MacosArch, 'arm64') }}: - arch: arm64 - os: 'darwin' - artifactName: 'drop-onnxruntime-nodejs-osx-${{ parameters.MacosArch }}' +- template: nodejs-artifacts-package-and-publish-steps-posix.yml + parameters: + ${{ if eq(parameters.MacosArch, 'x86_64') }}: + arch: x64 + ${{ if eq(parameters.MacosArch, 'arm64') }}: + arch: arm64 + os: 'darwin' + artifactName: 'drop-onnxruntime-nodejs-osx-${{ parameters.MacosArch }}' diff --git a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml index 6d908dec05ee2..c63c74fb997fe 100644 --- a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml +++ b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml @@ -4,13 +4,6 @@ parameters: values: - 'x86_64' - 'arm64' - - 'universal2' - default: 'x86_64' - -- name: WithCache - displayName: Build with Cache - type: boolean - default: false - name: AdditionalBuildFlags displayName: Additional build flags for build.py @@ -33,9 +26,6 @@ jobs: variables: MACOSX_DEPLOYMENT_TARGET: '13.4' ALLOW_RELEASED_ONNX_OPSET_ONLY: ${{ parameters.AllowReleasedOpsetOnly }} - TODAY: $[format('{0:dd}{0:MM}{0:yyyy}', pipeline.startTime)] - PROTO_CACHE_DIR: $(Pipeline.Workspace)/ccache_proto - ORT_CACHE_DIR: $(Pipeline.Workspace)/ccache_ort pool: name: "Azure Pipelines" image: 'macOS-14' @@ -46,65 +36,34 @@ jobs: clean: true submodules: none - - task: UsePythonVersion@0 - displayName: Use Python 3.10 - inputs: - versionSpec: 3.10 - - - task: NodeTool@0 - inputs: - versionSpec: '22.x' - - task: JavaToolInstaller@0 inputs: versionSpec: "17" jdkArchitectureOption: "x64" jdkSourceOption: 'PreInstalled' - - template: set-version-number-variables-step.yml - - template: use-xcode-version.yml + - template: setup-build-tools.yml + parameters: + host_cpu_arch: ${{ parameters.MacosArch }} + + - template: set-version-number-variables-step.yml + - script: | set -e -x - export PATH=$(Build.BinariesDirectory)/installed/bin:$PATH export ONNX_ML=1 export CMAKE_ARGS="-DONNX_GEN_PB_TYPE_STUBS=ON -DONNX_WERROR=OFF" - python3 -m pip install -r '$(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/scripts/requirements.txt' - - - - ${{ if eq(parameters.MacosArch, 'universal2') }}: - - template: mac-cpu-packaging-steps.yml - parameters: - MacosArch: ${{ parameters.MacosArch }} - AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} --use_coreml --use_webgpu --no_kleidiai --cmake_extra_defines CMAKE_OSX_ARCHITECTURES="arm64;x86_64" - BuildJava: false - BuildNodejs: false - WithCache: ${{ parameters.WithCache }} - ${{ if eq(parameters.WithCache, true) }}: - Today: $(TODAY) - CacheDir: $(ORT_CACHE_DIR) + python3 -m pip install -r '$(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/scripts/requirements.txt' - ${{ if eq(parameters.MacosArch, 'arm64') }}: - template: mac-cpu-packaging-steps.yml parameters: MacosArch: ${{ parameters.MacosArch }} AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} --build_nodejs --build_java --use_coreml --use_webgpu --cmake_extra_defines CMAKE_OSX_ARCHITECTURES=arm64 - BuildJava: true - BuildNodejs: true - WithCache: ${{ parameters.WithCache }} - ${{ if eq(parameters.WithCache, true) }}: - Today: $(TODAY) - CacheDir: $(ORT_CACHE_DIR) - ${{ if eq(parameters.MacosArch, 'x86_64') }}: - template: mac-cpu-packaging-steps.yml parameters: MacosArch: ${{ parameters.MacosArch }} - AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} --build_nodejs --build_java --use_coreml --use_webgpu - BuildJava: true - BuildNodejs: true - WithCache: ${{ parameters.WithCache }} - ${{ if eq(parameters.WithCache, true) }}: - Today: $(TODAY) - CacheDir: $(ORT_CACHE_DIR) + AdditionalBuildFlags: ${{ parameters.AdditionalBuildFlags }} --build_nodejs --build_java --use_coreml --use_webgpu --cmake_extra_defines CMAKE_OSX_ARCHITECTURES=x86_64 diff --git a/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml b/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml index 0d62ed7907a67..d1ea61ada90c3 100644 --- a/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml +++ b/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml @@ -1,59 +1,50 @@ parameters: - - name: msbuildPlatform - type: string - - name: java_artifact_id - type: string - - name: buildOnly - type: boolean +- name: msbuildPlatform + type: string +- name: java_artifact_id + type: string +- name: buildOnly + type: boolean + default: false +- name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + +- name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number steps: - - task: CmdLine@2 - displayName: 'Gradle cmakeCheck' - inputs: - ${{ if eq(parameters.buildOnly, true) }}: - script: | - call gradlew.bat testClasses -DcmakeBuildDir=$(Build.BinariesDirectory)\RelWithDebInfo - call gradlew.bat cmakeCheck -x test -DcmakeBuildDir=$(Build.BinariesDirectory)\RelWithDebInfo --warning-mode all - workingDirectory: $(Build.SourcesDirectory)\java - ${{ else }}: - script: | - call gradlew.bat cmakeCheck -DcmakeBuildDir=$(Build.BinariesDirectory)\RelWithDebInfo --warning-mode all - workingDirectory: $(Build.SourcesDirectory)\java +- task: PowerShell@2 + displayName: 'Build and Package Java Artifacts' + inputs: + targetType: 'inline' + script: | + # Define arguments for the Python script + $scriptArgs = @( + "--sources-dir", "$(Build.SourcesDirectory)", + "--binaries-dir", "$(Build.BinariesDirectory)", + "--platform", "${{ parameters.msbuildPlatform }}", + "--build-config", "RelWithDebInfo", + "--java-artifact-id", "${{ parameters.java_artifact_id }}", + "--pre-release-version-suffix-string", "${{ parameters.PreReleaseVersionSuffixString }}", + "--pre-release-version-suffix-number", "${{ parameters.PreReleaseVersionSuffixNumber }}", + "--commit-hash", "$(OnnxRuntimeGitCommitHash)" + ) + + # Conditionally add the --build-only flag if the parameter is true + if ('${{ parameters.buildOnly }}' -eq 'True') { + $scriptArgs += "--build-only" + } + + # Define the path to the python script within your repository + $scriptPath = "$(Build.SourcesDirectory)/tools/ci_build/manage_java_artifacts.py" - - task: CmdLine@2 - displayName: 'Add symbols and notices to Java' - inputs: - script: | - @echo on - cd $(Build.BinariesDirectory)\RelWithDebInfo - set NATIVE_FOLDER=$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage\ai\onnxruntime\native\win-x64 - mkdir %NATIVE_FOLDER% - echo "Directories created" - copy .\java\build\libs\*.jar $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }} - pushd $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }} - set artifact_id=${{ parameters.java_artifact_id }} - jar xf onnxruntime-$(OnnxRuntimeVersion).jar META-INF\maven\com.microsoft.onnxruntime\%artifact_id%\pom.xml - move META-INF\maven\com.microsoft.onnxruntime\%artifact_id%\pom.xml onnxruntime-$(OnnxRuntimeVersion).pom - rd /s /q META-INF - popd - copy .\RelWithDebInfo\onnxruntime.pdb %NATIVE_FOLDER% - copy .\RelWithDebInfo\onnxruntime4j_jni.pdb %NATIVE_FOLDER% - copy $(Build.SourcesDirectory)\docs\Privacy.md $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage\Privacy.md - copy $(Build.SourcesDirectory)\ThirdPartyNotices.txt $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage\ThirdPartyNotices.txt - @echo $(OnnxRuntimeGitCommitHash) > $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage\GIT_COMMIT_ID - pushd $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage - jar uf $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\onnxruntime-$(OnnxRuntimeVersion).jar ai\onnxruntime\native\win-x64\onnxruntime.pdb - jar uf $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\onnxruntime-$(OnnxRuntimeVersion).jar ai\onnxruntime\native\win-x64\onnxruntime4j_jni.pdb - jar uf $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\onnxruntime-$(OnnxRuntimeVersion).jar Privacy.md ThirdPartyNotices.txt GIT_COMMIT_ID - popd - pushd $(Build.SourcesDirectory)\java\build\classes\java\test - if %errorlevel% neq 0 exit /b %errorlevel% - jar cvf $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\testing.jar . - if %errorlevel% neq 0 exit /b %errorlevel% - popd - pushd $(Build.SourcesDirectory)\java\build\resources\test - rd /s /q ai\onnxruntime\native - jar uvf $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\testing.jar . - popd - rd /s /q $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage - dir /s /b $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }} + # Execute the Python script, passing all arguments + Write-Host "Executing Python script: $scriptPath with arguments: $($scriptArgs -join ' ')" + python $scriptPath $scriptArgs \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/py-macos.yml b/tools/ci_build/github/azure-pipelines/templates/py-macos.yml new file mode 100644 index 0000000000000..c8a26481d6205 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/templates/py-macos.yml @@ -0,0 +1,75 @@ +parameters: +- name: arch + type: string + +- name: python_version + type: string + +- name: cmake_build_type + type: string + default: 'Release' + values: + - Debug + - Release + - RelWithDebInfo + - MinSizeRel + +- name: extra_build_arg + type: string + default: '' + +jobs: +- job: Mac_${{ parameters.arch }}_${{ replace(parameters.python_version,'.','_') }} + timeoutInMinutes: 240 + workspace: + clean: all + pool: + name: "Azure Pipelines" + image: "macOS-15" + os: macOS + templateContext: + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory)/build/Release/dist/fixed_wheels + artifactName: onnxruntime-macos-${{ parameters.arch }}_${{ replace(parameters.python_version,'.','_') }} + + variables: + - name: MACOSX_DEPLOYMENT_TARGET + value: '13.4' + + steps: + - checkout: self + clean: true + submodules: none + + - template: use-xcode-version.yml + parameters: + xcodeVersion: '16.4.0' + + + - template: setup-build-tools.yml + parameters: + host_cpu_arch: ${{ parameters.arch }} + python_version: ${{ parameters.python_version }} + + - script: | + set -e -x + export _PYTHON_HOST_PLATFORM=macosx-${{variables.MACOSX_DEPLOYMENT_TARGET}}-${{ parameters.arch }} + python3 -m pip install -r '$(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/scripts/requirements.txt' + python3 $(Build.SourcesDirectory)/tools/ci_build/build.py \ + --build_dir $(Build.SourcesDirectory)/build \ + --use_vcpkg --use_vcpkg_ms_internal_asset_cache \ + --use_binskim_compliant_compile_flags \ + --config Release \ + --build_wheel \ + --use_coreml ${{ parameters.extra_build_arg }} \ + --cmake_extra_defines CMAKE_OSX_ARCHITECTURES=${{ parameters.arch }} \ + --update --skip_submodule_sync --build --parallel + python -m pip install --upgrade delocate + cd '$(Build.SourcesDirectory)/build/Release/dist' + ls + for file in *.whl + do + delocate-listdeps "$file" + delocate-wheel --require-archs=${{ parameters.arch }} -w fixed_wheels -v "$file" + done \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/py-package-smoking-test.yml b/tools/ci_build/github/azure-pipelines/templates/py-package-smoking-test.yml index be9707e8f3f65..fe8c898fa193b 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-package-smoking-test.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-package-smoking-test.yml @@ -54,7 +54,8 @@ jobs: FILE_NAME="${files[0]}" FILE_NAME=$(basename $FILE_NAME) PYTHON_PACKAGE_NAME=$(echo "$FILE_NAME" | cut -f 1 -d '-') - python3 -m pip install --find-links "$(Pipeline.Workspace)/build/onnxruntime-${{ parameters.arch }}-${{ parameters.ep }}" $PYTHON_PACKAGE_NAME + python3 -m pip install coloredlogs flatbuffers numpy packaging protobuf sympy + python3 -m pip install --no-index --find-links "$(Pipeline.Workspace)/build $PYTHON_PACKAGE_NAME python3 -m pip show $PYTHON_PACKAGE_NAME python3 -c "import onnxruntime as ort; print(ort.__version__)" workingDirectory: $(Pipeline.Workspace)/build/onnxruntime-${{ parameters.arch }}-${{ parameters.ep }} diff --git a/tools/ci_build/github/azure-pipelines/templates/py-win-arm64-qnn.yml b/tools/ci_build/github/azure-pipelines/templates/py-win-arm64-qnn.yml index 09133499bc23f..9ad59ba90402d 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-win-arm64-qnn.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-win-arm64-qnn.yml @@ -57,18 +57,10 @@ jobs: clean: true submodules: none - - template: telemetry-steps.yml - - - task: UsePythonVersion@0 - inputs: - versionSpec: ${{ parameters.PYTHON_VERSION }} - addToPath: true - architecture: 'arm64' - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'arm64' + python_version: ${{ parameters.PYTHON_VERSION }} - task: PythonScript@0 inputs: diff --git a/tools/ci_build/github/azure-pipelines/templates/py-win-arm64ec-qnn.yml b/tools/ci_build/github/azure-pipelines/templates/py-win-arm64ec-qnn.yml index cd6a43a18991e..aad24661b868c 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-win-arm64ec-qnn.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-win-arm64ec-qnn.yml @@ -45,18 +45,10 @@ jobs: clean: true submodules: recursive - - template: telemetry-steps.yml - - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PythonVersion) - addToPath: true - architecture: 'x64' - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + python_version: $(PythonVersion) - script: python -m pip install -r $(Build.SourcesDirectory)\tools\ci_build\github\windows\python\requirements.txt diff --git a/tools/ci_build/github/azure-pipelines/templates/py-win-x64-qnn.yml b/tools/ci_build/github/azure-pipelines/templates/py-win-x64-qnn.yml index dd202270768af..49f6fc662aa75 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-win-x64-qnn.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-win-x64-qnn.yml @@ -49,19 +49,11 @@ jobs: clean: true submodules: recursive - - template: telemetry-steps.yml - - - task: UsePythonVersion@0 - inputs: - versionSpec: $(PythonVersion) - addToPath: true - architecture: 'x64' - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + python_version: $(PythonVersion) + - script: python -m pip install -r $(Build.SourcesDirectory)\tools\ci_build\github\windows\python\requirements.txt - template: set-nightly-build-option-variable-step.yml diff --git a/tools/ci_build/github/azure-pipelines/templates/qnn-ep-win.yml b/tools/ci_build/github/azure-pipelines/templates/qnn-ep-win.yml index b6388c22fae98..3836db5ee7ba0 100644 --- a/tools/ci_build/github/azure-pipelines/templates/qnn-ep-win.yml +++ b/tools/ci_build/github/azure-pipelines/templates/qnn-ep-win.yml @@ -52,10 +52,9 @@ stages: steps: - template: set-version-number-variables-step.yml - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' - template: jobs/download_win_qnn_sdk.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/templates/setup-build-tools.yml b/tools/ci_build/github/azure-pipelines/templates/setup-build-tools.yml new file mode 100644 index 0000000000000..df7fea537ce6f --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/templates/setup-build-tools.yml @@ -0,0 +1,65 @@ +# Setup python/nodejs/cmake/vcpkg tools. Also, setup telemetry header file if the current OS is Windows. + +parameters: +# for selecting python binary +- name: host_cpu_arch + type: string + +- name: python_version + type: string + default: '3.12' + +- name: action_version + type: string + default: 'v0.0.9' + +steps: +- template: telemetry-steps.yml + +# Currently all ADO macOS machines are x64 machines +- task: UsePythonVersion@0 + displayName: 'Use Python ${{ parameters.host_cpu_arch }} (macOS)' + condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin')) + inputs: + versionSpec: ${{ parameters.python_version }} + architecture: 'x64' + +- task: UsePythonVersion@0 + displayName: 'Use Python ${{ parameters.host_cpu_arch }} (non-macOS)' + condition: and(succeeded(), ne(variables['Agent.OS'], 'Darwin')) + inputs: + versionSpec: ${{ parameters.python_version }} + architecture: ${{ parameters.host_cpu_arch }} + +- task: PipAuthenticate@1 + displayName: 'Pip Authenticate' + inputs: + artifactFeeds: 'Lotus' + +# The following task does not support different arches. +- task: UseNode@1 + condition: and(succeeded(), ne(variables['Agent.OS'], 'Windows_NT')) + inputs: + version: '22.x' + +- task: PowerShell@2 + displayName: 'Setup Latest Node.js v20 (Win)' + condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) + inputs: + filePath: '$(System.DefaultWorkingDirectory)\tools\ci_build\github\windows\setup_nodejs.ps1' + arguments: '-MajorVersion 22' + +- script: | + node -v + npm -v + + condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) + displayName: 'Verify Node.js Version' + +- script: python3 -m pip install requests + +- task: PythonScript@0 + displayName: 'Run GitHub Action via Python Wrapper' + inputs: + scriptPath: 'tools/ci_build/run_gh_action.py' + arguments: '${{ parameters.action_version }}' diff --git a/tools/ci_build/github/azure-pipelines/templates/setup-maven.yml b/tools/ci_build/github/azure-pipelines/templates/setup-maven.yml new file mode 100644 index 0000000000000..7ad755c50e541 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/templates/setup-maven.yml @@ -0,0 +1,47 @@ +steps: +- task: AzureCLI@2 + displayName: 'Download and Extract Maven using Azure CLI' + inputs: + azureSubscription: 'AIInfraBuildOnnxRuntimeOSS' + scriptType: 'pscore' # Use PowerShell Core + scriptLocation: 'inlineScript' + inlineScript: | + # Define the scope for the access token + $authScope = "https://mspmecloud.onmicrosoft.com/RebuildManager.Web/.default" + + Write-Host "Requesting access token for scope: $authScope" + $tokenInfo = az account get-access-token --scope $authScope | ConvertFrom-Json + + # Set the token as an environment variable for the next tool to use + $env:TRT_UPLOAD_AUTH_TOKEN = $tokenInfo.accessToken + Write-Host "Successfully configured TRT_UPLOAD_AUTH_TOKEN environment variable." + + # Execute the Terrapin Retrieval Tool to download Maven + Write-Host "Downloading Maven..." + & C:\local\Terrapin\TerrapinRetrievalTool.exe -b https://vcpkg.storage.devpackages.microsoft.io/artifacts/ -a true -u Environment -p https://dlcdn.apache.org/maven/maven-3/3.9.11/binaries/apache-maven-3.9.11-bin.zip -s 03e2d65d4483a3396980629f260e25cac0d8b6f7f2791e4dc20bc83f9514db8d0f05b0479e699a5f34679250c49c8e52e961262ded468a20de0be254d8207076 -d $(Agent.TempDirectory)\maven.zip + + # Check if the download was successful + if ($LASTEXITCODE -ne 0) { + throw "Error downloading maven. Exit code: $LASTEXITCODE" + } + Write-Host "Maven downloaded successfully." + + # Extract the downloaded maven zip file + $arguments = "x", "$(Agent.TempDirectory)\maven.zip", "-y", "-o$(Agent.TempDirectory)" + Write-Output "Executing: 7z.exe $arguments" + & 7z.exe $arguments + + # Check if the extraction was successful + if ($LASTEXITCODE -ne 0) { + throw "Error extracting maven.zip. Exit code: $LASTEXITCODE" + } + Write-Host "Maven extracted successfully." + + # Prepend the Maven bin directory to the PATH for subsequent steps in the job + Write-Host "Adding Maven to the pipeline PATH." + Write-Host "##vso[task.prependpath]$(Agent.TempDirectory)\apache-maven-3.9.11\bin" + +- script: | + echo "Verifying Maven installation..." + mvn --version + displayName: 'Verify Maven Version' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/stages/mac-ios-packaging-build-stage.yml b/tools/ci_build/github/azure-pipelines/templates/stages/mac-ios-packaging-build-stage.yml index 5eef1ae8e8e93..f377ad863cbe0 100644 --- a/tools/ci_build/github/azure-pipelines/templates/stages/mac-ios-packaging-build-stage.yml +++ b/tools/ci_build/github/azure-pipelines/templates/stages/mac-ios-packaging-build-stage.yml @@ -87,11 +87,9 @@ stages: removeProfile: true displayName: 'Install ORT Mobile Test Provisioning Profile' - - task: UsePythonVersion@0 - inputs: - versionSpec: "3.12" - addToPath: true - architecture: "x64" + - template: ../setup-build-tools.yml + parameters: + host_cpu_arch: arm64 - template: ../use-xcode-version.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/templates/telemetry-steps.yml b/tools/ci_build/github/azure-pipelines/templates/telemetry-steps.yml index a8bc789e1cffe..8db4a8f8c8658 100644 --- a/tools/ci_build/github/azure-pipelines/templates/telemetry-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/telemetry-steps.yml @@ -5,6 +5,7 @@ steps: # TELEMETRYGUID is a runtime variable that is stored on the pipeline in an old-fashioned way. So it cannot be used in # template expressions. We access it through env variables. - task: PowerShell@2 + condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT')) displayName: 'Set TelemetryOption variable and optionally create TraceLoggingConfigPrivate.h for WinML Telemetry' inputs: targetType: filePath diff --git a/tools/ci_build/github/azure-pipelines/templates/win-ci.yml b/tools/ci_build/github/azure-pipelines/templates/win-ci.yml index eec0f273581a2..c54b13b8dec6a 100644 --- a/tools/ci_build/github/azure-pipelines/templates/win-ci.yml +++ b/tools/ci_build/github/azure-pipelines/templates/win-ci.yml @@ -40,6 +40,19 @@ parameters: type: string default: '' +- name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + +- name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number + # for inference packages '', for training packages '-training' # used for drop-extra and c api artifacts (onnxruntime-win-* or onnxrutime-training-win-*) - name: artifact_name_suffix @@ -110,6 +123,11 @@ stages: - output: pipelineArtifact targetPath: $(Build.ArtifactStagingDirectory) artifactName: 'onnxruntime${{ parameters.artifact_name_suffix }}-win-${{ parameters.packageName }}' + + - ${{ if eq(parameters.buildJava, 'true') }}: + - output: pipelineArtifact + targetPath: $(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }} + artifactName: 'drop-onnxruntime-java-win-${{ parameters.packageName }}${{parameters.artifact_name_suffix}}' # GPU build has two jobs. This is the first one. - ${{ if contains(parameters.ort_build_pool_name, 'GPU') }}: - output: pipelineArtifact @@ -134,18 +152,9 @@ stages: clean: true submodules: none - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: ${{ parameters.buildArch }} - - - template: telemetry-steps.yml - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' - ${{ if eq(parameters['buildJava'], 'true') }}: - task: JavaToolInstaller@0 @@ -154,12 +163,6 @@ stages: jdkArchitectureOption: ${{ parameters.buildArch }} jdkSourceOption: 'PreInstalled' - - - task: NodeTool@0 - condition: and(succeeded(), eq('${{ parameters.buildNodejs}}', true)) - inputs: - versionSpec: '22.x' - - ${{ if ne(parameters.CudaVersion, '') }}: - template: jobs/download_win_gpu_library.yml parameters: @@ -183,21 +186,15 @@ stages: # For CPU job, tests are run in the same machine as building - ${{ if eq(parameters.buildJava, 'true') }}: + - template: setup-maven.yml - template: make_java_win_binaries.yml parameters: msbuildPlatform: ${{ parameters.msbuildPlatform }} java_artifact_id: ${{ parameters.java_artifact_id }} - ${{ if or(contains(parameters.buildparameter, 'use_cuda'), contains(parameters.buildparameter, 'use_tensorrt')) }}: - # When it is a GPU build, we only assemble the java binaries, testing will be done in the later stage with GPU machine - buildOnly: true - ${{ else }}: - buildOnly: false - - - task: 1ES.PublishPipelineArtifact@1 - displayName: 'Publish Java temp binaries' - inputs: - targetPath: '$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}' - artifactName: 'drop-onnxruntime-java-win-${{ parameters.packageName }}${{parameters.artifact_name_suffix}}' + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} + buildOnly: true + # All GPU builds will be tested in the next stage with GPU machine - ${{ if contains(parameters.ort_build_pool_name, 'CPU') }}: - task: PythonScript@0 diff --git a/tools/ci_build/github/azure-pipelines/templates/win-web-ci.yml b/tools/ci_build/github/azure-pipelines/templates/win-web-ci.yml index 01f73a63075e3..8b2504d61def1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/win-web-ci.yml +++ b/tools/ci_build/github/azure-pipelines/templates/win-web-ci.yml @@ -76,9 +76,11 @@ jobs: git checkout -- .gitattributes workingDirectory: '$(Build.SourcesDirectory)' displayName: 'Testing: force EOL to lf on windows for /js/**' - - task: NodeTool@0 - inputs: - versionSpec: '22.x' + + - template: setup-build-tools.yml + parameters: + host_cpu_arch: 'x64' + - task: DownloadPipelineArtifact@2 inputs: patterns: '${{ parameters.BuildConfig }}_wasm/**/*' diff --git a/tools/ci_build/github/azure-pipelines/templates/windowsai-steps.yml b/tools/ci_build/github/azure-pipelines/templates/windowsai-steps.yml index a084d28e84c1e..915ff517742fd 100644 --- a/tools/ci_build/github/azure-pipelines/templates/windowsai-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/windowsai-steps.yml @@ -23,19 +23,9 @@ jobs: inputs: version: '6.x' - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - ${{ if eq(parameters.BuildArch, 'x86') }}: - architecture: 'x86' - - - task: PipAuthenticate@1 - displayName: 'Pip Authenticate' - inputs: - artifactFeeds: 'Lotus' - - - template: telemetry-steps.yml + - template: setup-build-tools.yml + parameters: + host_cpu_arch: ${{ parameters.BuildArch }} - task: NuGetCommand@2 displayName: 'NuGet restore' diff --git a/tools/ci_build/github/azure-pipelines/win-qnn-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-qnn-ci-pipeline.yml deleted file mode 100644 index c350ba2ce402c..0000000000000 --- a/tools/ci_build/github/azure-pipelines/win-qnn-ci-pipeline.yml +++ /dev/null @@ -1,112 +0,0 @@ -##### start trigger Don't edit it manually, Please do edit set-trigger-rules.py #### -### please do rerun set-trigger-rules.py ### -trigger: - branches: - include: - - main - - rel-* - paths: - exclude: - - docs/** - - README.md - - CONTRIBUTING.md - - BUILD.md - - 'js/web' - - 'onnxruntime/core/providers/js' -pr: - branches: - include: - - main - - rel-* - paths: - exclude: - - docs/** - - README.md - - CONTRIBUTING.md - - BUILD.md - - 'js/web' - - 'onnxruntime/core/providers/js' -#### end trigger #### - -parameters: - -- name: QnnSdk - displayName: QNN SDK version - type: string - default: 2.37.1.250807 - -jobs: -- job: 'BUILD_QNN_EP' - pool: 'Onnxruntime-QNNEP-Windows-2022-CPU' - variables: - MsbuildArguments: '-detailedsummary -maxcpucount -consoleloggerparameters:PerformanceSummary' - OnnxRuntimeBuildDirectory: '$(Build.BinariesDirectory)' - DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true - buildArch: x64 - setVcvars: true - BuildConfig: 'RelWithDebInfo' - ALLOW_RELEASED_ONNX_OPSET_ONLY: '1' - TODAY: $[format('{0:dd}{0:MM}{0:yyyy}', pipeline.startTime)] - timeoutInMinutes: 120 - workspace: - clean: all - strategy: - matrix: - SHARED_LIB: - QnnLibKind: 'shared_lib' - STATIC_LIB: - QnnLibKind: 'static_lib' - steps: - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.12' - addToPath: true - architecture: $(buildArch) - - - template: templates/jobs/download_win_qnn_sdk.yml - parameters: - QnnSDKVersion: ${{ parameters.QnnSdk }} - - - template: templates/jobs/win-ci-build-steps.yml - parameters: - WithCache: True - Today: $(TODAY) - AdditionalKey: "win-qnn | $(BuildConfig)" - BuildPyArguments: >- - --config $(BuildConfig) - --build_dir $(Build.BinariesDirectory) - --cmake_generator "Visual Studio 17 2022" - --build_java - --build_shared_lib - --use_qnn $(QnnLibKind) - --qnn_home $(QnnSDKRootDir) - --use_binskim_compliant_compile_flags - --update --parallel - MsbuildArguments: $(MsbuildArguments) - BuildArch: $(buildArch) - Platform: 'x64' - BuildConfig: $(BuildConfig) - - - script: | - python $(Build.SourcesDirectory)\tools\ci_build\build.py ^ - --config $(BuildConfig) ^ - --build_dir $(Build.BinariesDirectory) ^ - --cmake_generator "Visual Studio 17 2022" ^ - --build_java ^ - --build_shared_lib ^ - --use_qnn $(QnnLibKind) ^ - --qnn_home $(QnnSDKRootDir) ^ - --use_binskim_compliant_compile_flags ^ - --test --enable_onnx_tests - displayName: 'Run unit tests' - - - script: | - .\$(BuildConfig)\onnx_test_runner -j 1 -e qnn -i "backend_path|$(QnnSDKRootDir)\lib\x86_64-windows-msvc\QnnCpu.dll" $(Build.SourcesDirectory)\cmake\external\onnx\onnx\backend\test\data\node - workingDirectory: '$(Build.BinariesDirectory)\$(BuildConfig)' - displayName: 'Run ONNX Tests' - - - script: | - .\$(BuildConfig)\onnx_test_runner -j 1 -e qnn -i "backend_path|$(QnnSDKRootDir)\lib\x86_64-windows-msvc\QnnCpu.dll" C:\data\float32_models - workingDirectory: '$(Build.BinariesDirectory)\$(BuildConfig)' - displayName: 'Run float32 model tests' diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu index 177df14d6eaee..2a65e7c26b20b 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu @@ -1,4 +1,5 @@ -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +FROM $BASEIMAGE ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-17 diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_rocm b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_rocm index 957eef8046eaf..3337af3be6074 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_rocm +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_rocm @@ -1,4 +1,5 @@ -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +FROM $BASEIMAGE ARG ROCM_VERSION=6.2.3 #Add our own dependencies diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_webgpu b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_webgpu index 56d67599f0bce..0007a4e06f7c0 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_webgpu +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_webgpu @@ -1,4 +1,5 @@ -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +FROM $BASEIMAGE ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-17 diff --git a/tools/ci_build/github/linux/docker/inference/aarch64/default/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/aarch64/default/cpu/Dockerfile index c8e164282a2f0..8b2083c2ccfc1 100644 --- a/tools/ci_build/github/linux/docker/inference/aarch64/default/cpu/Dockerfile +++ b/tools/ci_build/github/linux/docker/inference/aarch64/default/cpu/Dockerfile @@ -2,7 +2,8 @@ # Licensed under the MIT License. # This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_aarch64_almalinux8_gcc14_dotnet:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_aarch64_almalinux8_gcc14_dotnet:20250724.1 +FROM $BASEIMAGE ENV LANG=en_US.UTF-8 ENV LC_ALL=en_US.UTF-8 diff --git a/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile index 31bd41226263f..f5143d5ac9ab9 100644 --- a/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile +++ b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile @@ -1,4 +1,5 @@ -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_aarch64_almalinux8_gcc14:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_aarch64_almalinux8_gcc14:20250724.1 +FROM $BASEIMAGE ADD scripts /tmp/scripts RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile index 461464093688a..cfc2ce7079148 100644 --- a/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile @@ -2,7 +2,8 @@ # Licensed under the MIT License. # This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14_dotnet:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14_dotnet:20250724.1 +FROM $BASEIMAGE ENV LANG=en_US.UTF-8 ENV LC_ALL=en_US.UTF-8 diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile index 043291065736d..8401393a661b1 100644 --- a/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile @@ -2,7 +2,8 @@ # Licensed under the MIT License. # This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_almalinux8_gcc12_dotnet:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_almalinux8_gcc12_dotnet:20250724.1 +FROM $BASEIMAGE ARG TRT_VERSION #Install TensorRT only if TRT_VERSION is not empty diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile index 43da13df2fe8b..b923febc1227f 100644 --- a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile @@ -1,4 +1,5 @@ -FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +ARG BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_almalinux8_gcc14:20250724.1 +FROM $BASEIMAGE ADD scripts /tmp/scripts RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && rm -rf /tmp/scripts diff --git a/tools/ci_build/github/linux/java_copy_strip_binary.sh b/tools/ci_build/github/linux/java_copy_strip_binary.sh deleted file mode 100755 index 329c1b0ab9b9e..0000000000000 --- a/tools/ci_build/github/linux/java_copy_strip_binary.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash -set -e -o -x - -while getopts r:a:l:n:c:h:v: parameter_Option -do case "${parameter_Option}" -in -r) BINARY_DIR=${OPTARG};; -a) ARTIFACT_NAME=${OPTARG};; -c) BUILD_CONFIG=${OPTARG};; -l) LIB_NAME=${OPTARG};; -n) NATIVE_LIB_NAME=${OPTARG};; -h) ARCH=${OPTARG};; #must match the JAVA_OS_ARCH variable in onnxruntime_java.cmake -v) VERSION_NUMBER=${OPTARG};; -esac -done - -EXIT_CODE=1 - -uname -a - -echo "Version: $VERSION_NUMBER" -if [[ $LIB_NAME == *.dylib ]] && [[ $ARCH == 'osx-x86_64' ]]; then - ARCH='osx-x64' -elif [[ $LIB_NAME == *.dylib ]] && [[ $ARCH == 'osx-arm64' ]]; then - ARCH='osx-aarch64' -fi -NATIVE_FOLDER=ai/onnxruntime/native/$ARCH - -mkdir -p $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER - -echo "Directories created" - -echo "Copy debug symbols in a separate file and strip the original binary." - -if [[ $LIB_NAME == *.dylib ]] -then - # ORT LIB - dsymutil $BINARY_DIR/$BUILD_CONFIG/$LIB_NAME -o $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/$LIB_NAME.dSYM - cp $BINARY_DIR/$BUILD_CONFIG/$LIB_NAME $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime.dylib - strip -S $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime.dylib - # JNI Lib - dsymutil $BINARY_DIR/$BUILD_CONFIG/$NATIVE_LIB_NAME -o $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/$NATIVE_LIB_NAME.dSYM - cp $BINARY_DIR/$BUILD_CONFIG/$NATIVE_LIB_NAME $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime4j_jni.dylib - strip -S $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime4j_jni.dylib - # Add custom lib for testing. This should be added to testing.jar - cp $BINARY_DIR/$BUILD_CONFIG/libcustom_op_library.dylib $BINARY_DIR/$ARTIFACT_NAME -elif [[ $LIB_NAME == *.so ]] -then - cp $BINARY_DIR/$BUILD_CONFIG/$LIB_NAME $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime.so - cp $BINARY_DIR/$BUILD_CONFIG/$NATIVE_LIB_NAME $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime4j_jni.so - # Add custom lib - cp $BINARY_DIR/$BUILD_CONFIG/libcustom_op_library.so $BINARY_DIR/$ARTIFACT_NAME - # Add cuda provider if it exists - if [[ -f "$BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_cuda.so" ]]; then - cp $BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_shared.so $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime_providers_shared.so - cp $BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_cuda.so $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime_providers_cuda.so - fi - # Add tensorrt provider if it exists - if [[ -f "$BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_tensorrt.so" ]]; then - cp $BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_shared.so $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime_providers_shared.so - cp $BINARY_DIR/$BUILD_CONFIG/libonnxruntime_providers_tensorrt.so $BINARY_DIR/$ARTIFACT_NAME/$NATIVE_FOLDER/libonnxruntime_providers_tensorrt.so - fi -fi - -find $BINARY_DIR/$ARTIFACT_NAME -ls -rm -fr $BINARY_DIR/$ARTIFACT_NAME/jar - -EXIT_CODE=$? - -set -e -exit $EXIT_CODE diff --git a/tools/ci_build/github/linux/java_linux_final_test.sh b/tools/ci_build/github/linux/java_linux_final_test.sh index 71eb24dc7a1e2..cdbfd2bad10a8 100755 --- a/tools/ci_build/github/linux/java_linux_final_test.sh +++ b/tools/ci_build/github/linux/java_linux_final_test.sh @@ -23,6 +23,8 @@ uname -a cd "$BINARY_DIR/onnxruntime-java" rm -f *.asc rm -f *.sha256 +rm -f *.sha1 +rm -f *.md5 rm -f *.sha512 rm -f *.pom ls diff --git a/tools/ci_build/github/windows/jar_esrp_dll.ps1 b/tools/ci_build/github/windows/jar_esrp_dll.ps1 index 8492d7591271b..2a53374d845a0 100644 --- a/tools/ci_build/github/windows/jar_esrp_dll.ps1 +++ b/tools/ci_build/github/windows/jar_esrp_dll.ps1 @@ -1,41 +1,70 @@ -$instruction = $args[0] # extract or repack -$original_jar_file_directory = $args[1] # The directory where the original jar file is located -$original_jar_file_name = $args[2] # The name of the original jar file +param( + [string]$instruction, # Should be 'extract' or 'repack' + [string]$jar_file_directory # The directory where the original jar file is located +) -$original_jar_file_full_path = "$original_jar_file_directory\$original_jar_file_name" -$extracted_file_directory = "$original_jar_file_directory\jar_extracted_full_files" +$extracted_file_directory = Join-Path $jar_file_directory "jar_extracted_full_files" +$state_file = Join-Path $jar_file_directory "repack_list.txt" if ($instruction -eq "extract") { - Write-Host "Extracting the jar file $original_jar_file_full_path..." - & 7z x $original_jar_file_full_path -o"$extracted_file_directory" - if ($lastExitCode -ne 0) { - Write-Host -Object "7z extracting the jar file command failed. Exitcode: $exitCode" - exit $lastExitCode + # Find the main jar file(s) by looking for names that start with 'onnxruntime' + # and excluding common suffixes for sources and javadocs. + $main_jar_files = Get-ChildItem -Path $jar_file_directory -Filter onnxruntime*.jar | Where-Object { $_.Name -notlike '*-sources.jar' -and $_.Name -notlike '*-javadoc.jar' } + + if ($main_jar_files.Count -eq 0) { + Write-Error "No main ONNX Runtime JAR file found in directory: $jar_file_directory" + exit 1 } - Write-Host "Extracted files directory: $extracted_file_directory" - Write-Host "Removing the original jar file..." - Remove-Item -Path "$original_jar_file_full_path" -Force - Write-Host "Removed the original jar file." -} -elseif ($instruction -eq "repack") { + # Clear any previous state file + if (Test-Path $state_file) { + Remove-Item $state_file + } + + foreach ($jar_file in $main_jar_files) { + Write-Host "Extracting the jar file $($jar_file.FullName)..." + & 7z x $jar_file.FullName -o"$extracted_file_directory" + if ($LASTEXITCODE -ne 0) { + Write-Error "7z failed to extract the jar file. Exitcode: $LASTEXITCODE" + exit $LASTEXITCODE + } + + # Save the original name for repacking, then remove the file + $jar_file.Name | Out-File -FilePath $state_file -Append + Write-Host "Removing the original jar file: $($jar_file.FullName)" + Remove-Item -Path $jar_file.FullName -Force + } + Write-Host "Extracted files to directory: $extracted_file_directory" + +} elseif ($instruction -eq "repack") { + if (-not (Test-Path $state_file)) { + Write-Error "State file '$state_file' not found. Cannot repack." + exit 1 + } + Write-Host "Removing ESRP's CodeSignSummary file..." - # It is the summary generated by ESRP tool. It is not needed in the jar file. - Remove-Item -Path "$extracted_file_directory/CodeSignSummary*.*" -Force + Remove-Item -Path "$extracted_file_directory/CodeSignSummary*.*" -Force -ErrorAction SilentlyContinue Write-Host "Removed ESRP's CodeSignSummary file." - Write-Host "Repacking the jar file from directory $extracted_file_directory..." - & 7z a "$original_jar_file_full_path" "$extracted_file_directory\*" - if ($lastExitCode -ne 0) { - Write-Host -Object "7z repacking the jar file command failed. Exitcode: $exitCode" - exit $lastExitCode + $jar_files_to_repack = Get-Content $state_file + + foreach ($jar_file_name in $jar_files_to_repack) { + $repacked_jar_file_path = Join-Path $jar_file_directory $jar_file_name + Write-Host "Repacking to $repacked_jar_file_path from directory $extracted_file_directory..." + & 7z a "$repacked_jar_file_path" "$extracted_file_directory\*" + if ($LASTEXITCODE -ne 0) { + Write-Error "7z failed to repack the jar file. Exitcode: $LASTEXITCODE" + exit $LASTEXITCODE + } + Write-Host "Repacked the jar file $repacked_jar_file_path." } - Write-Host "Repacked the jar file $original_jar_file_full_path." - Write-Host "Removing the extracted files..." + Write-Host "Removing the extracted files and state file..." Remove-Item -Path "$extracted_file_directory" -Recurse -Force - Write-Host "Removed the extracted files." -} -else { - Write-Host "Invalid instruction: $instruction" + Remove-Item -Path $state_file -Force + Write-Host "Cleaned up temporary files." + +} else { + Write-Error "Invalid instruction: '$instruction'. Must be 'extract' or 'repack'." + exit 1 } diff --git a/tools/ci_build/github/windows/jar_gpu_packaging.ps1 b/tools/ci_build/github/windows/jar_gpu_packaging.ps1 deleted file mode 100644 index 1c94f4678f988..0000000000000 --- a/tools/ci_build/github/windows/jar_gpu_packaging.ps1 +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -$ErrorActionPreference = "Stop" -Write-Output "Start" -dir -Copy-Item -Path $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-linux-x64\ai\onnxruntime\native\linux-x64\libonnxruntime_providers_cuda.so -Destination $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-linux-x64-tensorrt\ai\onnxruntime\native\linux-x64 -pushd onnxruntime-java-linux-x64-tensorrt -Write-Output "Run 7z" -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\testing.jar libcustom_op_library.so -Remove-Item -Path libcustom_op_library.so -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\onnxruntime-$Env:ONNXRUNTIMEVERSION.jar . -popd -pushd onnxruntime-java-win-x64 -ren onnxruntime-$Env:ONNXRUNTIMEVERSION.jar onnxruntime_gpu-$Env:ONNXRUNTIMEVERSION.jar -ren onnxruntime-$Env:ONNXRUNTIMEVERSION-javadoc.jar onnxruntime_gpu-$Env:ONNXRUNTIMEVERSION-javadoc.jar -ren onnxruntime-$Env:ONNXRUNTIMEVERSION-sources.jar onnxruntime_gpu-$Env:ONNXRUNTIMEVERSION-sources.jar -ren onnxruntime-$Env:ONNXRUNTIMEVERSION.pom onnxruntime_gpu-$Env:ONNXRUNTIMEVERSION.pom -popd diff --git a/tools/ci_build/github/windows/jar_packaging.ps1 b/tools/ci_build/github/windows/jar_packaging.ps1 deleted file mode 100644 index a132ba6b26e2a..0000000000000 --- a/tools/ci_build/github/windows/jar_packaging.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -$ErrorActionPreference = "Stop" -Write-Output "Start" -dir -pushd onnxruntime-java-linux-x64 -Write-Output "Run 7z" -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\testing.jar libcustom_op_library.so -Remove-Item -Path libcustom_op_library.so -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\onnxruntime-$Env:ONNXRUNTIMEVERSION.jar . -popd -pushd onnxruntime-java-osx-x86_64 -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\testing.jar libcustom_op_library.dylib -Remove-Item -Path libcustom_op_library.dylib -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\onnxruntime-$Env:ONNXRUNTIMEVERSION.jar . -popd -pushd onnxruntime-java-linux-aarch64 -Remove-Item -Path libcustom_op_library.so -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\onnxruntime-$Env:ONNXRUNTIMEVERSION.jar . -popd -pushd onnxruntime-java-osx-arm64 -Remove-Item -Path libcustom_op_library.dylib -7z a $Env:BUILD_BINARIESDIRECTORY\java-artifact\onnxruntime-java-win-x64\onnxruntime-$Env:ONNXRUNTIMEVERSION.jar . -popd diff --git a/tools/ci_build/github/windows/jar_packaging.py b/tools/ci_build/github/windows/jar_packaging.py new file mode 100644 index 0000000000000..2354363610251 --- /dev/null +++ b/tools/ci_build/github/windows/jar_packaging.py @@ -0,0 +1,312 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +""" +Packages ONNX Runtime Java artifacts by combining native libraries from +various platform builds into final Java archive (JAR) files using 7z. +""" + +import argparse +import glob +import os +import re +import shutil +import subprocess +import sys +from pathlib import Path +from typing import Any + +# Add semver as a dependency +try: + import semver +except ImportError: + print("Error: The 'semver' package is not installed. Please add it to your requirements.txt.", file=sys.stderr) + sys.exit(1) + +# --- Helper Functions for Archiving --- + + +def find_7z_executable(): + """Finds the 7z executable, checking the system PATH and default installation locations.""" + # 1. Check if '7z' is in the PATH + seven_zip_exe = shutil.which("7z") + if seven_zip_exe: + return seven_zip_exe + + # 2. Check the default installation directory under Program Files + program_files = os.environ.get("ProgramFiles") # noqa: SIM112 + if program_files: + default_path = Path(program_files) / "7-Zip" / "7z.exe" + if default_path.is_file(): + return str(default_path) + + return None + + +SEVEN_ZIP_EXE = find_7z_executable() + + +def add_file_to_archive(archive_path: Path, file_to_add: Path, description: str): + """Appends a single file to a zip archive (JAR file) using 7z.""" + print(f" -> {description}...") + try: + if not SEVEN_ZIP_EXE: + raise FileNotFoundError + # Run 7z from the file's parent directory to ensure a clean archive path. + subprocess.run( + [SEVEN_ZIP_EXE, "a", str(archive_path), file_to_add.name], + check=True, + cwd=file_to_add.parent, + capture_output=True, + text=True, + ) + except FileNotFoundError: + print( + "Error: '7z' command not found. Please ensure 7-Zip is installed and in your PATH, or in the default location 'C:\\Program Files\\7-Zip'.", + file=sys.stderr, + ) + raise + except subprocess.CalledProcessError as e: + print(f"Error: 7z failed to archive '{file_to_add.name}' to '{archive_path.name}'.", file=sys.stderr) + print(f"Reason: {e.stderr}", file=sys.stderr) + raise + + +def archive_directory_contents(archive_path: Path, source_dir: Path, description: str): + """Archives a directory into a zip file (JAR file) using 7z, preserving its top-level name.""" + print(f" -> {description}...") + try: + if not SEVEN_ZIP_EXE: + raise FileNotFoundError + # Run 7z from the parent of the source directory to ensure the source directory + # itself is added to the archive, preserving the path structure (e.g., 'ai/...'). + subprocess.run( + [SEVEN_ZIP_EXE, "a", str(archive_path), source_dir.name], + check=True, + cwd=source_dir.parent, + capture_output=True, + text=True, + ) + except FileNotFoundError: + print( + "Error: '7z' command not found. Please ensure 7-Zip is installed and in your PATH, or in the default location 'C:\\Program Files\\7-Zip'.", + file=sys.stderr, + ) + raise + except subprocess.CalledProcessError as e: + print(f"Error: 7z failed to archive directory '{source_dir.name}' to '{archive_path.name}'.", file=sys.stderr) + print(f"Reason: {e.stderr}", file=sys.stderr) + raise + + +# --- Validation Helpers --- + + +def validate_version(version_string: str): + """Validates if the version string conforms to the project's format.""" + print(f"Validating version string: {version_string}...") + try: + version_info = semver.Version.parse(version_string) + if version_info.prerelease: + prerelease_tag = version_info.prerelease + allowed_tags_pattern = r"^(alpha|beta|rc)\d+$" + if not re.match(allowed_tags_pattern, str(prerelease_tag)): + raise ValueError(f"Pre-release tag '{prerelease_tag}' is not an allowed type.") + except ValueError as e: + print(f"Error: Version '{version_string}' is not valid. Reason: {e}", file=sys.stderr) + print("Expected format is 'X.Y.Z' or 'X.Y.Z-(alpha|beta|rc)N'.", file=sys.stderr) + sys.exit(1) + print("Version format is valid.") + + +def validate_companion_jars(base_jar_path: Path): + """Ensures that -sources.jar and -javadoc.jar files exist.""" + print("Validating presence of companion -sources.jar and -javadoc.jar...") + base_stem = base_jar_path.stem + directory = base_jar_path.parent + sources_jar_path = directory / f"{base_stem}-sources.jar" + + if not sources_jar_path.is_file(): + print(f"Error: Missing companion sources JAR. Expected: {sources_jar_path.name}", file=sys.stderr) + sys.exit(1) + + if not list(directory.glob(f"{base_stem}-javadoc*.jar")): + print(f"Error: Missing companion javadoc JAR. Expected file like: {base_stem}-javadoc.jar", file=sys.stderr) + sys.exit(1) + print("Companion JARs are present.") + + +# --- Core Logic Function --- + + +def process_platform_archive( + platform_path: Path, + main_archive_file: Path, + test_archive_file: Path, + custom_lib_file: str, + archive_custom_lib: bool, +): + """Processes a single platform directory, adding only the 'ai' subdirectory to the main JAR.""" + print(f"Processing platform: {platform_path}...") + + # 1. Handle the custom op library. + custom_lib_full_path = platform_path / custom_lib_file + if custom_lib_file and custom_lib_full_path.is_file(): + if archive_custom_lib: + add_file_to_archive(test_archive_file, custom_lib_full_path, f"Archiving '{custom_lib_file}' to test JAR") + # Always remove the lib after processing to prevent it from being in the main JAR. + print(f" -> Removing '{custom_lib_file}' from source directory...") + custom_lib_full_path.unlink() + elif archive_custom_lib: + # If we expected to archive the file but it wasn't there, it's a fatal error. + print(f"Error: Expected custom op library '{custom_lib_file}' not found in {platform_path}", file=sys.stderr) + sys.exit(1) + + # 2. Archive only the native library directory ('ai/...') to the main JAR. + # This explicitly excludes other files or folders like '_manifest'. + native_lib_root = platform_path / "ai" + if native_lib_root.is_dir(): + archive_directory_contents( + main_archive_file, native_lib_root, f"Archiving native libs from '{native_lib_root.name}' to main JAR" + ) + else: + print(f"Warning: Native library path 'ai/' not found in {platform_path}. Skipping main archive step.") + + print(f"Finished platform: {platform_path}") + print("--------------------------------") + + +def run_packaging(package_type: str, build_dir: str): + """The main logic for the packaging process, refactored to be callable.""" + artifacts_base_dir = Path(build_dir) / "java-artifact" + primary_package_dir = artifacts_base_dir / "onnxruntime-java-win-x64" + if not primary_package_dir.is_dir(): + print(f"Error: Primary package directory not found at '{primary_package_dir}'", file=sys.stderr) + sys.exit(1) + + # --- Version Discovery --- + print(f"Discovering version from JAR files in '{primary_package_dir}'...") + jar_pattern = str(primary_package_dir / "onnxruntime*-*.jar") + jar_files = [Path(f) for f in glob.glob(jar_pattern) if "-sources" not in f and "-javadoc" not in f] + if not jar_files: + print( + f"Error: Could not find a main JAR file in '{primary_package_dir}' to determine the version.", + file=sys.stderr, + ) + sys.exit(1) + + main_jar_file = jar_files[0] + validate_companion_jars(main_jar_file) + + version = "" + stem = main_jar_file.stem + try: + # Per user feedback, the version is everything after the first dash. + _, version = stem.split("-", 1) + except ValueError: + # This will happen if there is no dash in the filename, which is unexpected. + print( + f"Error: Could not parse version from JAR file '{main_jar_file.name}'. Expected format -.jar", + file=sys.stderr, + ) + sys.exit(1) + + if not version: + print( + f"Error: Could not parse version from JAR file '{main_jar_file.name}'. Version part is empty.", + file=sys.stderr, + ) + sys.exit(1) + + print(f"Version discovered: {version}") + validate_version(version) + + # --- Package Definitions --- + package_definitions: dict[str, dict[str, Any]] = { + "cpu": { + "platforms": [ + {"path": "onnxruntime-java-linux-x64", "lib": "libcustom_op_library.so", "archive_lib": True}, + {"path": "onnxruntime-java-osx-x86_64", "lib": "libcustom_op_library.dylib", "archive_lib": True}, + {"path": "onnxruntime-java-linux-aarch64", "lib": "libcustom_op_library.so", "archive_lib": False}, + {"path": "onnxruntime-java-osx-arm64", "lib": "libcustom_op_library.dylib", "archive_lib": False}, + ] + }, + "gpu": { + "platforms": [ + {"path": "onnxruntime-java-linux-x64", "lib": "libcustom_op_library.so", "archive_lib": False} + ] + }, + } + + # --- Processing Loop --- + print(f"\n## Configuring for {package_type.upper()} package build...") + + final_main_archive = main_jar_file + final_test_archive = primary_package_dir / "testing.jar" + + print(f"Using '{final_main_archive.name}' as the base for in-place packaging.") + + if not final_test_archive.is_file(): + print(f"Error: Base 'testing.jar' not found at '{final_test_archive}'.", file=sys.stderr) + sys.exit(1) + + platforms_to_process = package_definitions[package_type]["platforms"] + + for platform in platforms_to_process: + platform_full_path = artifacts_base_dir / platform["path"] + if not platform_full_path.is_dir(): + print(f"Error: Required platform artifact directory not found: {platform_full_path}", file=sys.stderr) + sys.exit(1) + + process_platform_archive( + platform_path=platform_full_path, + main_archive_file=final_main_archive, + test_archive_file=final_test_archive, + custom_lib_file=platform["lib"], + archive_custom_lib=platform["archive_lib"], + ) + + print("\nScript completed successfully.") + + +def main(): + """Main script entry point for command-line execution.""" + if sys.platform != "win32": + print("Error: This script is intended to be run on Windows.", file=sys.stderr) + sys.exit(1) + + parser = argparse.ArgumentParser(description="Package ONNX Runtime Java artifacts.") + parser.add_argument( + "--package_type", + type=str, + choices=["cpu", "gpu"], + default="cpu", + help="The type of package to build ('cpu' or 'gpu').", + ) + parser.add_argument( + "--build_dir", + type=str, + help="The build directory containing the java-artifact folder.", + ) + args = parser.parse_args() + + build_dir = args.build_dir + if not build_dir: + try: + build_dir = os.environ["BUILD_BINARIESDIRECTORY"] + except KeyError: + print( + "Error: Environment variable BUILD_BINARIESDIRECTORY is not set and --build_dir is not provided.", + file=sys.stderr, + ) + sys.exit(1) + + run_packaging(args.package_type, build_dir) + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"\nAn unhandled error occurred: {e}", file=sys.stderr) + sys.exit(1) diff --git a/tools/ci_build/github/windows/jar_packaging_test.py b/tools/ci_build/github/windows/jar_packaging_test.py new file mode 100644 index 0000000000000..91b68728dad15 --- /dev/null +++ b/tools/ci_build/github/windows/jar_packaging_test.py @@ -0,0 +1,157 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import zipfile +from pathlib import Path + +import jar_packaging # The refactored script +import pytest + + +# Helper to create an empty file +def create_empty_file(path): + Path(path).touch() + + +# Helper to create a dummy JAR file +def create_dummy_jar(path): + with zipfile.ZipFile(path, "w") as zf: + zf.writestr("META-INF/MANIFEST.MF", "Manifest-Version: 1.0\n") + + +@pytest.fixture +def directory_setup_factory(tmp_path): + """ + A factory fixture that returns a function to set up a test directory + for a given package type and version. + """ + + def _setup_test_directory(package_type: str, version_string: str): + """Sets up a temporary directory structure mimicking the build artifacts.""" + java_artifact_dir = tmp_path / "java-artifact" + win_dir = java_artifact_dir / "onnxruntime-java-win-x64" + linux_dir = java_artifact_dir / "onnxruntime-java-linux-x64" + osx_dir = java_artifact_dir / "onnxruntime-java-osx-x86_64" + + # --- Main artifact directory (Windows) --- + win_dir.mkdir(parents=True, exist_ok=True) + artifact_name = f"onnxruntime_{package_type}" if package_type == "gpu" else "onnxruntime" + create_dummy_jar(win_dir / f"{artifact_name}-{version_string}.jar") + create_dummy_jar(win_dir / f"{artifact_name}-{version_string}-sources.jar") + create_dummy_jar(win_dir / f"{artifact_name}-{version_string}-javadoc.jar") + create_empty_file(win_dir / f"{artifact_name}-{version_string}.pom") + create_dummy_jar(win_dir / "testing.jar") + (win_dir / "_manifest" / "spdx_2.2").mkdir(parents=True, exist_ok=True) + + # --- Linux platform --- + linux_native_dir = linux_dir / "ai" / "onnxruntime" / "native" / "linux-x64" + linux_native_dir.mkdir(parents=True, exist_ok=True) + create_empty_file(linux_dir / "libcustom_op_library.so") + create_empty_file(linux_native_dir / "libonnxruntime.so") + create_empty_file(linux_native_dir / "libonnxruntime4j_jni.so") + if package_type == "gpu": + create_empty_file(linux_native_dir / "libonnxruntime_providers_cuda.so") + (linux_dir / "_manifest" / "spdx_2.2").mkdir(parents=True, exist_ok=True) + + # --- macOS and other platforms (for CPU test) --- + if package_type == "cpu": + osx_native_dir = osx_dir / "ai" / "onnxruntime" / "native" / "osx-x86_64" + osx_native_dir.mkdir(parents=True, exist_ok=True) + create_empty_file(osx_dir / "libcustom_op_library.dylib") + create_empty_file(osx_native_dir / "libonnxruntime.dylib") + create_empty_file(osx_native_dir / "libonnxruntime4j_jni.dylib") + (osx_dir / "_manifest" / "spdx_2.2").mkdir(parents=True, exist_ok=True) + + # Add linux-aarch64 and osx-arm64 for CPU test + linux_aarch64_dir = java_artifact_dir / "onnxruntime-java-linux-aarch64" + linux_aarch64_native_dir = linux_aarch64_dir / "ai" / "onnxruntime" / "native" / "linux-aarch64" + linux_aarch64_native_dir.mkdir(parents=True, exist_ok=True) + create_empty_file(linux_aarch64_dir / "libcustom_op_library.so") + + osx_arm64_dir = java_artifact_dir / "onnxruntime-java-osx-arm64" + osx_arm64_native_dir = osx_arm64_dir / "ai" / "onnxruntime" / "native" / "osx-arm64" + osx_arm64_native_dir.mkdir(parents=True, exist_ok=True) + create_empty_file(osx_arm64_dir / "libcustom_op_library.dylib") + + return tmp_path + + return _setup_test_directory + + +@pytest.mark.parametrize("version_string", ["1.23.0", "1.23.0-rc1"]) +def test_gpu_packaging(directory_setup_factory, version_string): + """ + Tests the GPU packaging logic for both release and pre-release versions + to ensure correct files are added to the JARs. + """ + temp_build_dir = directory_setup_factory("gpu", version_string) + + # Run the packaging script logic + jar_packaging.run_packaging("gpu", str(temp_build_dir)) + + # --- Verification --- + win_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-win-x64" + main_jar_path = win_dir / f"onnxruntime_gpu-{version_string}.jar" + testing_jar_path = win_dir / "testing.jar" + + # 1. Verify the main JAR contains the Linux native libraries + with zipfile.ZipFile(main_jar_path, "r") as zf: + jar_contents = zf.namelist() + assert "ai/onnxruntime/native/linux-x64/libonnxruntime.so" in jar_contents + assert "ai/onnxruntime/native/linux-x64/libonnxruntime4j_jni.so" in jar_contents + assert "ai/onnxruntime/native/linux-x64/libonnxruntime_providers_cuda.so" in jar_contents + + # 2. Verify the testing JAR does not contain the custom op library for GPU builds + with zipfile.ZipFile(testing_jar_path, "r") as zf: + jar_contents = zf.namelist() + # The custom op lib for linux is not archived for GPU builds. + # This checks that it's NOT in the test jar. + assert "libcustom_op_library.so" not in jar_contents + + # 3. Verify the custom op library was removed from the source linux directory + linux_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-linux-x64" + assert not (linux_dir / "libcustom_op_library.so").exists() + + +@pytest.mark.parametrize("version_string", ["1.23.0", "1.23.0-rc1"]) +def test_cpu_packaging(directory_setup_factory, version_string): + """ + Tests the CPU packaging logic to ensure correct files are added to the JARs. + """ + temp_build_dir = directory_setup_factory("cpu", version_string) + + # Run the packaging script logic + jar_packaging.run_packaging("cpu", str(temp_build_dir)) + + # --- Verification --- + win_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-win-x64" + main_jar_path = win_dir / f"onnxruntime-{version_string}.jar" + testing_jar_path = win_dir / "testing.jar" + + # 1. Verify the main JAR contains native libraries from all relevant platforms + with zipfile.ZipFile(main_jar_path, "r") as zf: + jar_contents = zf.namelist() + # Linux libs + assert "ai/onnxruntime/native/linux-x64/libonnxruntime.so" in jar_contents + assert "ai/onnxruntime/native/linux-x64/libonnxruntime4j_jni.so" in jar_contents + # macOS libs + assert "ai/onnxruntime/native/osx-x86_64/libonnxruntime.dylib" in jar_contents + assert "ai/onnxruntime/native/osx-x86_64/libonnxruntime4j_jni.dylib" in jar_contents + # GPU libs should NOT be present + assert "ai/onnxruntime/native/linux-x64/libonnxruntime_providers_cuda.so" not in jar_contents + + # 2. Verify the testing JAR contains the custom op libraries that should be archived + with zipfile.ZipFile(testing_jar_path, "r") as zf: + jar_contents = zf.namelist() + assert "libcustom_op_library.so" in jar_contents + assert "libcustom_op_library.dylib" in jar_contents + + # 3. Verify the custom op libraries were removed from the source directories + linux_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-linux-x64" + osx_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-osx-x86_64" + linux_aarch64_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-linux-aarch64" + osx_arm64_dir = temp_build_dir / "java-artifact" / "onnxruntime-java-osx-arm64" + assert not (linux_dir / "libcustom_op_library.so").exists() + assert not (osx_dir / "libcustom_op_library.dylib").exists() + assert not (linux_aarch64_dir / "libcustom_op_library.so").exists() + assert not (osx_arm64_dir / "libcustom_op_library.dylib").exists() diff --git a/tools/ci_build/github/windows/python/requirements.txt b/tools/ci_build/github/windows/python/requirements.txt index b36f6045a5962..91c3a88aca464 100644 --- a/tools/ci_build/github/windows/python/requirements.txt +++ b/tools/ci_build/github/windows/python/requirements.txt @@ -11,3 +11,6 @@ psutil onnxscript==0.3.2 jinja2 markupsafe +semver +packaging +coloredlogs diff --git a/tools/ci_build/github/windows/setup_nodejs.ps1 b/tools/ci_build/github/windows/setup_nodejs.ps1 new file mode 100644 index 0000000000000..478bb35f010f8 --- /dev/null +++ b/tools/ci_build/github/windows/setup_nodejs.ps1 @@ -0,0 +1,59 @@ +[CmdletBinding()] +param ( + # The major version of Node.js to use. Example: '20' + [Parameter(Mandatory = $true)] + [string]$MajorVersion +) + +try { + # Get the processor architecture ID using CIM + # 9 = x64, 12 = arm64 + $architectureId = (Get-CimInstance -ClassName Win32_Processor).Architecture + + # Map the architecture ID to the string used in the tool path + $archString = switch ($architectureId) { + 9 { "x64" } + 12 { "arm64" } + default { throw "Unsupported CPU architecture: $architectureId. This script only supports x64 and arm64." } + } + + Write-Host "Detected Architecture: $archString" + + # --- New Logic to find the latest version --- + $nodeVersionsPath = Join-Path $env:AGENT_TOOLSDIRECTORY "node" + if (-not (Test-Path -Path $nodeVersionsPath)) { + throw "Node.js tool directory not found at '$nodeVersionsPath'." + } + + # Find all directory names matching the major version (e.g., "20.*") + $matchingVersions = Get-ChildItem -Path $nodeVersionsPath | + Where-Object { $_.PSIsContainer -and $_.Name -like "$MajorVersion.*" } | + Select-Object -ExpandProperty Name + + if ($null -eq $matchingVersions) { + throw "No installed Node.js versions found for major version '$MajorVersion' at '$nodeVersionsPath'." + } + + # Sort the versions to find the highest one and select it + $latestVersion = $matchingVersions | Sort-Object -Descending {[version]$_} | Select-Object -First 1 + Write-Host "Found latest matching version: $latestVersion" + # --- End of New Logic --- + + # Construct the full path using the discovered latest version + $nodeToolPath = Join-Path $nodeVersionsPath "$latestVersion\$archString" + + # Verify that the final directory exists + if (-not (Test-Path -Path $nodeToolPath -PathType Container)) { + throw "Node.js tool path not found. Please ensure version '$latestVersion' for '$archString' exists at: $nodeToolPath" + } + + # Use the Azure DevOps logging command to prepend the directory to the PATH + Write-Host "##vso[task.prependpath]$nodeToolPath" + Write-Host "Successfully added Node.js $latestVersion ($archString) to the PATH." + +} +catch { + # If any error occurs, log it as an error in the pipeline and fail the task + Write-Host "##vso[task.logissue type=error]$($_.Exception.Message)" + exit 1 +} \ No newline at end of file diff --git a/tools/ci_build/github/windows/sign_java_artifacts.py b/tools/ci_build/github/windows/sign_java_artifacts.py new file mode 100644 index 0000000000000..19d1a4af98799 --- /dev/null +++ b/tools/ci_build/github/windows/sign_java_artifacts.py @@ -0,0 +1,139 @@ +import argparse +import hashlib +import os +import platform +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + + +def get_gpg_path() -> Path: + """Finds the path to the GPG executable.""" + if platform.system() == "Windows": + program_files_x86 = os.environ.get("ProgramFiles(x86)") # noqa: SIM112 + if not program_files_x86: + raise OSError("ProgramFiles(x86) environment variable not found.") + return Path(program_files_x86) / "gnupg/bin/gpg.exe" + + gpg_path_str = shutil.which("gpg") + if gpg_path_str is None: + raise FileNotFoundError("gpg executable not found in system PATH.") + return Path(gpg_path_str) + + +def run_command(command: list[str], check: bool = True) -> subprocess.CompletedProcess: + """Executes a command and raises an exception if it fails.""" + print(f"Running command: {' '.join(command)}") + result = subprocess.run(command, capture_output=True, text=True, check=False) + if check and result.returncode != 0: + print(f"Command failed with exit code {result.returncode}") + print(f"Stdout:\n{result.stdout}") + print(f"Stderr:\n{result.stderr}") + raise subprocess.CalledProcessError(result.returncode, command, result.stdout, result.stderr) + return result + + +def create_hash_file(file_path: Path, algorithm: str) -> None: + """Creates a checksum file for the given file using the specified algorithm.""" + print(f" - Generating {algorithm.upper()} checksum...") + try: + hasher = hashlib.new(algorithm) + with file_path.open("rb") as f: + # Read in chunks to handle large files efficiently + while chunk := f.read(8192): + hasher.update(chunk) + + hash_value = hasher.hexdigest() + # Create checksum file in 'sha1sum'/'md5sum' format. + # The '*' indicates to read the file in binary mode for verification tools. + Path(f"{file_path}.{algorithm}").write_text(hash_value.lower(), encoding="utf-8") + except Exception as e: + print(f"Error generating {algorithm} hash for {file_path}: {e}") + raise + + +def main() -> None: + """ + Signs files with GPG and generates checksums. + """ + parser = argparse.ArgumentParser(description="Signs files with GPG and generates checksums.") + parser.add_argument("jar_file_directory", help="The directory containing files to sign.") + args = parser.parse_args() + + jar_file_directory = Path(args.jar_file_directory) + if not jar_file_directory.is_dir(): + print(f"Error: Directory not found at '{jar_file_directory}'", file=sys.stderr) + sys.exit(1) + + print(f"\nListing files to be processed in '{jar_file_directory}':") + files_to_process = [p for p in jar_file_directory.rglob("*") if p.is_file()] + for file_path in files_to_process: + print(f" - {file_path}") + print(f"Found {len(files_to_process)} files.") + + print("\nGetting GnuPG signing keys from environment variables.") + gpg_passphrase = os.environ.get("JAVA_PGP_PWD") + gpg_private_key = os.environ.get("JAVA_PGP_KEY") + + if not gpg_passphrase or not gpg_private_key: + print( + "Error: GPG passphrase or private key not found in environment variables ('JAVA_PGP_PWD', 'JAVA_PGP_KEY').", + file=sys.stderr, + ) + sys.exit(1) + + gpg_exe_path = get_gpg_path() + if not gpg_exe_path.is_file(): + print(f"Error: GPG executable not found at '{gpg_exe_path}'.", file=sys.stderr) + sys.exit(1) + + agent_temp_dir = os.environ.get("AGENT_TEMPDIRECTORY") + + # Use a single temporary directory to manage all temporary files + with tempfile.TemporaryDirectory(dir=agent_temp_dir) as temp_dir: + temp_dir_path = Path(temp_dir) + print(f"Created temporary directory: {temp_dir_path}") + + private_key_file = temp_dir_path / "private.key" + passphrase_file = temp_dir_path / "passphrase.txt" + + print("Writing GnuPG key and passphrase to temporary files.") + private_key_file.write_text(gpg_private_key, encoding="utf-8") + passphrase_file.write_text(gpg_passphrase, encoding="utf-8") + + print("Importing GnuPG private key.") + run_command([str(gpg_exe_path), "--batch", "--import", str(private_key_file)]) + print("Successfully imported GnuPG private key.") + + print(f"\nProcessing {len(files_to_process)} files in '{jar_file_directory}'.") + + for file_path in files_to_process: + print(f"Processing file: {file_path}") + + # GPG Signing (.asc) + print(" - GnuPG signing...") + run_command( + [ + str(gpg_exe_path), + "--pinentry-mode", + "loopback", + "--passphrase-file", + str(passphrase_file), + "--detach-sign", + "--armor", + str(file_path), + ] + ) + + # SHA-1 and MD5 Checksums + create_hash_file(file_path, "sha1") + create_hash_file(file_path, "md5") + + print("\nFile signing and checksum generation completed.") + print("Temporary directory and its contents have been deleted.") + + +if __name__ == "__main__": + main() diff --git a/tools/ci_build/linux_java_copy_strip_binary.py b/tools/ci_build/linux_java_copy_strip_binary.py new file mode 100644 index 0000000000000..b9ca856d1c514 --- /dev/null +++ b/tools/ci_build/linux_java_copy_strip_binary.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +""" +Prepares native shared libraries for the ONNX Runtime Java package. + +This script is a build utility that run as part of a packaging pipeline and takes compiled C/C++ shared libraries +(.so, .dylib) and stages them for packaging into a Java JAR file. + +It expected the following inputs: +/ +└── / + ├── libonnxruntime.so (File from --lib-name) + ├── libonnxruntime4j_jni.so (File from --native-lib-name) + ├── libcustom_op_library.so + │ + ├── (Optional) libonnxruntime_providers_shared.so + ├── (Optional) libonnxruntime_providers_cuda.so + └── (Optional) libonnxruntime_providers_tensorrt.so + +It performs the following key operations: + +1. Validates the existence of all required source directories and libraries. +2. Creates the specific Java Native Interface (JNI) directory structure + (ai/onnxruntime/native/). +3. Copies the main, JNI, and custom op libraries to their destinations. +4. For macOS, extracts debug symbols into .dSYM files using `dsymutil`. +5. Strips all release binaries of their debug symbols to reduce file size. +6. Copies optional provider libraries (e.g., CUDA, TensorRT) for Linux builds. + +It is intended to be called from a CI/CD pipeline as part of the overall +build process for the onnxruntime-java package. +""" + +import argparse +import logging +import platform +import shutil +import subprocess +import sys +from pathlib import Path + +# --- Configuration --- +logging.basicConfig( + level=logging.INFO, + format="%(asctime)s [%(levelname)s] %(message)s", +) + + +# --- Helper Functions --- +def run_command(command: list[str | Path]): + """Runs an external command and exits the script if the command fails.""" + str_command = " ".join(map(str, command)) + logging.info(f"Running command: '{str_command}'") + try: + proc = subprocess.run(command, check=True, text=True, capture_output=True) + logging.info(f"Successfully executed: {Path(command[0]).name}") + if proc.stdout: + logging.debug(f"STDOUT: {proc.stdout.strip()}") + except FileNotFoundError: + logging.error(f"Command not found: '{command[0]}'. Please ensure it is installed and in your PATH.") + raise + except subprocess.CalledProcessError as e: + logging.error(f"Command '{Path(e.cmd[0]).name}' failed with exit code {e.returncode}.") + if e.stdout: + logging.error(f"STDOUT: {e.stdout.strip()}") + if e.stderr: + logging.error(f"STDERR: {e.stderr.strip()}") + raise + + +# --- Main Execution --- +def main(): + """Main function to parse arguments and package the native libraries.""" + parser = argparse.ArgumentParser( + description="Packages ONNX Runtime native libraries for Java.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + + # Arguments + parser.add_argument("--binary-dir", required=True, type=Path, help="Path to the build binaries directory.") + parser.add_argument("--artifact-name", required=True, help="Name of the final artifact directory.") + parser.add_argument("--build-config", required=True, help="CMake build configuration (e.g., Release).") + parser.add_argument("--lib-name", required=True, help="Filename of the main ONNX Runtime shared library.") + parser.add_argument("--native-lib-name", required=True, help="Filename of the JNI shared library.") + parser.add_argument("--arch", required=True, help="Architecture string (e.g., osx-x86_64).") + args = parser.parse_args() + + # --- Path Setup and Validation --- + logging.info(f"System Info: {' '.join(platform.uname())}") + + source_build_dir = args.binary_dir / args.build_config + target_artifact_dir = args.binary_dir / args.artifact_name + + # Validate that the source build directory exists. + if not source_build_dir.is_dir(): + logging.error(f"Source build directory not found: {source_build_dir}") + sys.exit(1) + + # Map architecture names for macOS to align with Java conventions + arch = args.arch + if args.lib_name.endswith(".dylib"): + if arch == "osx-x86_64": + arch = "osx-x64" + elif arch == "osx-arm64": + arch = "osx-aarch64" + + # --- Library Processing --- + native_folder = target_artifact_dir / "ai" / "onnxruntime" / "native" / arch + native_folder.mkdir(parents=True, exist_ok=True) + logging.info(f"Staging native libraries in: {native_folder}") + + # Validate that all required library files exist before processing. + main_lib_src = source_build_dir / args.lib_name + jni_lib_src = source_build_dir / args.native_lib_name + + required_files = [main_lib_src, jni_lib_src] + lib_suffix = ".dylib" if args.lib_name.endswith(".dylib") else ".so" + custom_op_lib_src = source_build_dir / f"libcustom_op_library{lib_suffix}" + required_files.append(custom_op_lib_src) + + for f in required_files: + if not f.is_file(): + logging.error(f"Required library file not found: {f}") + sys.exit(1) + logging.info("All required source library files found.") + + # Start processing now that checks have passed + if lib_suffix == ".dylib": # macOS + logging.info("Processing macOS libraries (.dylib)...") + run_command(["dsymutil", main_lib_src, "-o", native_folder / f"{args.lib_name}.dSYM"]) + shutil.copy2(main_lib_src, native_folder / "libonnxruntime.dylib") + run_command(["strip", "-S", native_folder / "libonnxruntime.dylib"]) + + run_command(["dsymutil", jni_lib_src, "-o", native_folder / f"{args.native_lib_name}.dSYM"]) + shutil.copy2(jni_lib_src, native_folder / "libonnxruntime4j_jni.dylib") + run_command(["strip", "-S", native_folder / "libonnxruntime4j_jni.dylib"]) + + shutil.copy2(custom_op_lib_src, target_artifact_dir) + + elif lib_suffix == ".so": # Linux + logging.info("Processing Linux libraries (.so)...") + + # Main library + main_lib_dest = native_folder / "libonnxruntime.so" + shutil.copy2(main_lib_src, main_lib_dest) + run_command(["strip", "-S", main_lib_dest]) + + # JNI library + jni_lib_dest = native_folder / "libonnxruntime4j_jni.so" + shutil.copy2(jni_lib_src, jni_lib_dest) + run_command(["strip", "-S", jni_lib_dest]) + + # Custom op library (not stripped as it's for testing) + shutil.copy2(custom_op_lib_src, target_artifact_dir) + + # Provider checks are optional, so we check for their existence here. + for provider in ["cuda", "tensorrt"]: + provider_lib_src = source_build_dir / f"libonnxruntime_providers_{provider}.so" + if provider_lib_src.exists(): + logging.info(f"Found optional {provider} provider library. Copying and stripping...") + + # Shared provider library + shared_provider_lib_src = source_build_dir / "libonnxruntime_providers_shared.so" + if shared_provider_lib_src.exists(): + shared_provider_dest = native_folder / shared_provider_lib_src.name + shutil.copy2(shared_provider_lib_src, shared_provider_dest) + run_command(["strip", "-S", shared_provider_dest]) + + # Specific provider library + provider_lib_dest = native_folder / provider_lib_src.name + shutil.copy2(provider_lib_src, provider_lib_dest) + run_command(["strip", "-S", provider_lib_dest]) + else: + logging.warning(f"Unsupported library type for '{args.lib_name}'. No special processing will occur.") + + # --- Finalization --- + logging.info(f"--- Final contents of '{target_artifact_dir}' ---") + for path in sorted(target_artifact_dir.rglob("*")): + logging.info(f" - {path.relative_to(target_artifact_dir)}") + logging.info("--- End of contents ---") + + jar_dir_to_remove = target_artifact_dir / "jar" + if jar_dir_to_remove.is_dir(): + logging.info(f"Removing temporary directory: {jar_dir_to_remove}") + shutil.rmtree(jar_dir_to_remove) + + logging.info("Script completed successfully.") + + +if __name__ == "__main__": + try: + main() + except Exception as e: + logging.error(f"Script failed due to an unhandled error: {e}") + sys.exit(1) diff --git a/tools/ci_build/manage_java_artifacts.py b/tools/ci_build/manage_java_artifacts.py new file mode 100644 index 0000000000000..51521f651adec --- /dev/null +++ b/tools/ci_build/manage_java_artifacts.py @@ -0,0 +1,312 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# This script runs after ORT jars are built. It picks up the jars from ORT's build dir then repack them a bit. + +import argparse +import logging +import re +import shutil +import subprocess +import sys +import zipfile +from pathlib import Path + +# --- Configuration --- +logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s") + + +# --- Helper Functions --- +def run_command(command: list, working_dir: Path): + """Runs a command in a specified directory and checks for errors.""" + logging.info(f"Running command: '{' '.join(map(str, command))}' in '{working_dir}'") + try: + # On Windows, shell=True is required to correctly locate and execute .bat or .cmd files + # like gradlew.bat and mvn.cmd that may be in the system's PATH. + use_shell = sys.platform == "win32" + subprocess.run(command, cwd=working_dir, check=True, shell=use_shell) + logging.info("Command successful.") + except subprocess.CalledProcessError as e: + # Output will have been streamed, so we just need to log the failure. + logging.error(f"Command failed with exit code {e.returncode}") + raise + except FileNotFoundError: + logging.error( + f"Command failed: The executable '{command[0]}' was not found. " + "Please ensure it is installed and that its location is in the system's PATH environment variable." + ) + raise + + +def log_directory_contents(dir_path: Path, description: str): + """Logs the contents of a directory for debugging.""" + logging.info(f"--- Listing contents of {description} at '{dir_path}' ---") + if not dir_path.is_dir(): + logging.warning(f"Directory does not exist: {dir_path}") + return + contents = list(dir_path.rglob("*")) + if not contents: + logging.warning(f"Directory is empty: {dir_path}") + else: + for item in contents: + logging.info(f" - {item.relative_to(dir_path)}") + logging.info("--- End of directory listing ---") + + +def create_zip_from_directory(zip_file_path: Path, source_dir: Path): + """Creates a zip file from the contents of a source directory.""" + logging.info(f"Creating archive '{zip_file_path}' from directory '{source_dir}'...") + with zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_DEFLATED) as zipf: + for root, _, files in source_dir.walk(): + for file in files: + file_path = root / file + archive_name = file_path.relative_to(source_dir) + zipf.write(file_path, archive_name) + logging.info("Archive created successfully.") + + +# --- New function for validation --- +def validate_artifacts( + platform_dir: Path, main_jar: Path, main_pom: Path, testing_jar: Path, version: str, artifact_id: str +): + """Uses Maven to validate the generated JAR and POM files.""" + logging.info("--- Starting Maven Artifact Validation ---") + maven_executable = "mvn.cmd" if sys.platform == "win32" else "mvn" + group_id = "com.microsoft.onnxruntime" # Assuming this is constant + + # 1. Validate the main ONNX Runtime JAR and its POM + logging.info(f"Validating main artifact: {main_jar.name}") + install_main_cmd = [ + maven_executable, + "install:install-file", + f"-Dfile={main_jar.resolve()}", + f"-DpomFile={main_pom.resolve()}", + # Adding these makes the command more robust and less prone to errors + f"-DgroupId={group_id}", + f"-DartifactId={artifact_id}", + f"-Dversion={version}", + "-Dpackaging=jar", + ] + run_command(install_main_cmd, working_dir=platform_dir) + logging.info("Main artifact validated successfully.") + + # 2. Validate the testing JAR (it has no POM, so we supply all info) + logging.info(f"Validating testing artifact: {testing_jar.name}") + install_testing_cmd = [ + maven_executable, + "install:install-file", + f"-Dfile={testing_jar.resolve()}", + f"-DgroupId={group_id}", + f"-DartifactId={artifact_id}-testing", + f"-Dversion={version}", + "-Dpackaging=jar", + ] + run_command(install_testing_cmd, working_dir=platform_dir) + logging.info("Testing artifact validated successfully.") + logging.info("--- Maven Artifact Validation Complete ---") + + +def main(): + """Main script execution.""" + parser = argparse.ArgumentParser(description="Builds and packages Java artifacts, PDBs, and notice files.") + parser.add_argument("--sources-dir", required=True, type=Path, help="Path to the build sources directory.") + parser.add_argument("--binaries-dir", required=True, type=Path, help="Path to the build binaries directory.") + parser.add_argument("--platform", required=True, help="Platform string (e.g., x64).") + parser.add_argument( + "--java-artifact-id", required=True, help="The Java artifact ID (e.g., onnxruntime or onnxruntime_gpu)." + ) + parser.add_argument( + "--build-config", + choices=["Debug", "Release", "RelWithDebInfo", "MinSizeRel"], + default="RelWithDebInfo", + help="The CMake build configuration type.", + ) + parser.add_argument( + "--pre-release-version-suffix-string", + choices=["alpha", "beta", "rc", "none"], + default="none", + help="The pre-release version suffix string.", + ) + parser.add_argument( + "--pre-release-version-suffix-number", type=int, default=0, help="The pre-release version suffix number." + ) + parser.add_argument("--commit-hash", required=True, help="The git commit hash.") + parser.add_argument("--build-only", action="store_true", help="Flag to indicate if this is a build-only run.") + args = parser.parse_args() + + # --- 1. Version and Build Logic --- + # Determine the repository root from the script's location + repo_root = Path(__file__).resolve().parent.parent.parent + version_file_path = repo_root / "VERSION_NUMBER" + + logging.info(f"Reading base version from {version_file_path}") + if not version_file_path.is_file(): + raise FileNotFoundError(f"Version file not found at {version_file_path}") + + base_version = version_file_path.read_text(encoding="utf-8").strip() + + # Validate the version format + if not re.match(r"^\d+\.\d+\.\d+$", base_version): + raise ValueError(f"Version '{base_version}' from {version_file_path} is not in the required x.y.z format.") + + logging.info(f"Successfully read and validated base version: {base_version}") + + # Start with the base version and conditionally append the pre-release suffix. + full_version = base_version + if args.pre_release_version_suffix_string != "none": + if args.pre_release_version_suffix_number <= 0: + raise ValueError( + "Pre-release version suffix number must be a positive integer if a suffix string is provided." + ) + # Append the suffix, conforming to Maven standards (e.g., 1.2.3-rc1) + full_version += f"-{args.pre_release_version_suffix_string}{args.pre_release_version_suffix_number}" + + logging.info(f"Using full version: {full_version}") + + # Use the java subdirectory of the repository root as the working directory for Gradle + java_working_dir = repo_root / "java" + + build_config_dir = args.binaries_dir / args.build_config + cmake_build_dir_arg = f"-DcmakeBuildDir={build_config_dir}" + version_property_arg = f"-Dorg.gradle.project.version={full_version}" + + # Construct the absolute path to the Gradle wrapper + gradle_executable_name = "gradlew.bat" if sys.platform == "win32" else "gradlew" + gradle_executable_path = java_working_dir / gradle_executable_name + + # Rebuild the jar so that we can change the version + gradle_args = [cmake_build_dir_arg, version_property_arg] + if args.java_artifact_id == "onnxruntime_gpu": + gradle_args.append("-DUSE_CUDA") + gradle_args.append("-DUSE_TENSORRT") + run_command([str(gradle_executable_path), "cmakeBuild", *gradle_args], working_dir=java_working_dir) + if args.build_only: + run_command( + [ + str(gradle_executable_path), + "testClasses", + "--warning-mode", + "all", + *gradle_args, + ], + working_dir=java_working_dir, + ) + else: + run_command( + [ + str(gradle_executable_path), + "cmakeCheck", + "--warning-mode", + "all", + *gradle_args, + ], + working_dir=java_working_dir, + ) + + # --- 2. Path Definitions --- + platform_dir = args.binaries_dir / f"onnxruntime-java-win-{args.platform}" + stage_dir = platform_dir / "stage" + native_folder = stage_dir / "ai" / "onnxruntime" / "native" / f"win-{args.platform}" + main_jar_name = f"{args.java_artifact_id}-{full_version}.jar" + main_jar_path = platform_dir / main_jar_name + final_pom_path = platform_dir / f"{args.java_artifact_id}-{full_version}.pom" + testing_jar_path = platform_dir / "testing.jar" + + # --- 3. Packaging Logic --- + try: + stage_dir.mkdir(parents=True, exist_ok=True) + native_folder.mkdir(parents=True, exist_ok=True) + + gradle_libs_dir = java_working_dir / "build" / "libs" + log_directory_contents(gradle_libs_dir, "Gradle build output libs") + + # FIX: Filter glob results to find the main artifact JAR, excluding sources and javadoc. + main_jars = [ + p + for p in gradle_libs_dir.glob("*.jar") + if not p.name.endswith("-sources.jar") and not p.name.endswith("-javadoc.jar") + ] + + if not main_jars: + raise FileNotFoundError(f"Gradle build finished, but no main artifact JAR was found in {gradle_libs_dir}") + if len(main_jars) > 1: + logging.warning(f"Found multiple potential main JARs: {[p.name for p in main_jars]}. Using the first one.") + + source_jar_path = main_jars[0] + logging.info(f"Found source JAR to copy: {source_jar_path.name}") + + # The main JAR file is copied to its final name directly. + shutil.copy2(source_jar_path, main_jar_path) + + # Now, find and copy the associated sources and javadoc JARs, renaming them to match. + source_basename = source_jar_path.stem # e.g., 'onnxruntime-1.23.0' + dest_basename = main_jar_path.stem # e.g., 'onnxruntime_gpu-1.23.0' + + for classifier in ["sources", "javadoc"]: + source_classified_jar = gradle_libs_dir / f"{source_basename}-{classifier}.jar" + if source_classified_jar.is_file(): + dest_classified_jar = platform_dir / f"{dest_basename}-{classifier}.jar" + logging.info(f"Copying classified artifact: {source_classified_jar.name} -> {dest_classified_jar.name}") + shutil.copy2(source_classified_jar, dest_classified_jar) + else: + logging.warning(f"Optional artifact '{source_classified_jar.name}' not found, skipping.") + + log_directory_contents(platform_dir, "final platform directory before JAR processing") + + pom_archive_path = f"META-INF/maven/com.microsoft.onnxruntime/{args.java_artifact_id}/pom.xml" + with zipfile.ZipFile(main_jar_path, "r") as jar: + jar.extract(pom_archive_path, path=platform_dir) + + shutil.move(str(platform_dir / pom_archive_path), str(final_pom_path)) + shutil.rmtree(platform_dir / "META-INF") + + shutil.copy2(args.sources_dir / "docs" / "Privacy.md", stage_dir) + shutil.copy2(args.sources_dir / "ThirdPartyNotices.txt", stage_dir) + (stage_dir / "GIT_COMMIT_ID").write_text(args.commit_hash, encoding="utf-8") + + with zipfile.ZipFile(main_jar_path, "a") as jar: + for root, _, files in stage_dir.walk(): + for file in files: + file_path = root / file + jar.write(file_path, file_path.relative_to(stage_dir)) + + test_classes_dir = args.sources_dir / "java" / "build" / "classes" / "java" / "test" + test_resources_dir = args.sources_dir / "java" / "build" / "resources" / "test" + + create_zip_from_directory(testing_jar_path, test_classes_dir) + + native_resource_path = test_resources_dir / "ai" / "onnxruntime" / "native" + if native_resource_path.exists(): + shutil.rmtree(native_resource_path) + + with zipfile.ZipFile(testing_jar_path, "a") as jar: + for root, _, files in test_resources_dir.walk(): + for file in files: + file_path = root / file + jar.write(file_path, file_path.relative_to(test_resources_dir)) + + logging.info("Java artifact packaging complete.") + + # --- 4. Validation Step --- + validate_artifacts( + platform_dir=platform_dir, + main_jar=main_jar_path, + main_pom=final_pom_path, + testing_jar=testing_jar_path, + version=full_version, + artifact_id=args.java_artifact_id, + ) + + finally: + # 5. Clean up stage directory + if stage_dir.exists(): + logging.info(f"Cleaning up stage directory: {stage_dir}") + shutil.rmtree(stage_dir) + + logging.info(f"\nFinal contents of '{platform_dir}':") + for item in platform_dir.iterdir(): + print(item) + + +if __name__ == "__main__": + main() diff --git a/tools/ci_build/prepare_macos_package.py b/tools/ci_build/prepare_macos_package.py new file mode 100644 index 0000000000000..b92e81663c776 --- /dev/null +++ b/tools/ci_build/prepare_macos_package.py @@ -0,0 +1,185 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +import argparse +import os +import pathlib +import shutil +import stat as stat_module +import subprocess +import sys +import tarfile +from datetime import datetime + + +def run_command(command: list[str | pathlib.Path], check: bool = True) -> subprocess.CompletedProcess: + """Helper to run a command, stream its output, and check for errors.""" + print(f"Executing: {' '.join(map(str, command))}", flush=True) + try: + return subprocess.run(command, check=check, text=True, capture_output=True) + except subprocess.CalledProcessError as e: + print(f"ERROR: Command failed with exit code {e.returncode}", file=sys.stderr) + print(f"--- STDOUT ---\n{e.stdout}", file=sys.stderr) + print(f"--- STDERR ---\n{e.stderr}", file=sys.stderr) + raise + + +def get_relative_file_paths(root_dir: pathlib.Path) -> set[pathlib.Path]: + """ + Returns a set of all relative file paths within a directory, + ignoring any files inside .dSYM directories. + """ + paths = set() + for p in root_dir.rglob("*"): + # Check if any part of the path is a .dSYM directory. + if any(part.endswith(".dSYM") for part in p.relative_to(root_dir).parts): + continue + if p.is_file(): + paths.add(p.relative_to(root_dir)) + return paths + + +def is_macho_binary(file_path: pathlib.Path) -> bool: + """Checks if a file is a Mach-O binary using the 'file' command.""" + if not file_path.is_file(): + return False + try: + result = run_command(["file", file_path]) + return "Mach-O" in result.stdout + except (subprocess.CalledProcessError, FileNotFoundError): + return False + + +def main(): + """Main function to prepare macOS packages for signing.""" + # 1. Setup paths and parse arguments + parser = argparse.ArgumentParser(description="Prepares macOS packages for signing.") + parser.add_argument( + "--staging_dir", + type=pathlib.Path, + required=True, + help="The directory where artifacts are staged and processed.", + ) + args = parser.parse_args() + staging_dir = args.staging_dir.resolve() + + if not staging_dir.is_dir(): + raise FileNotFoundError(f"Staging directory not found: {staging_dir}") + + os.chdir(staging_dir) + print(f"##[group]Working in directory: {staging_dir}") + print(f"Initial contents: {[p.name for p in staging_dir.iterdir()]}") + print("##[endgroup]") + + # 2. Unpack all .tgz archives + print("##[group]Unpacking downloaded archives...") + tgz_files = list(staging_dir.glob("*.tgz")) + if not tgz_files: + raise FileNotFoundError("Build Error: No .tgz files found to process.") + + for tgz in tgz_files: + print(f"Extracting {tgz.name}...") + with tarfile.open(tgz) as tar: + tar.extractall(path=".") + tgz.unlink() # Delete the archive + print("##[endgroup]") + + # 3. Locate architecture-specific directories + print("##[group]Locating architecture directories...") + arm64_dirs = list(staging_dir.glob("onnxruntime-osx-arm64*")) + x64_dirs = list(staging_dir.glob("onnxruntime-osx-x86_64*")) + + if len(arm64_dirs) != 1 or len(x64_dirs) != 1: + raise FileNotFoundError( + f"Build Error: Expected 1 arm64 and 1 x64 directory, but found: arm64={len(arm64_dirs)}, x64={len(x64_dirs)}" + ) + + arm64_dir, x64_dir = arm64_dirs[0], x64_dirs[0] + print(f"Found ARM64 source: {arm64_dir.name}") + print(f"Found x86_64 source: {x64_dir.name}") + print("##[endgroup]") + + # **NEW**: Remove _manifest directories before comparison or processing. + print("##[group]Removing _manifest directories...") + for package_dir in (arm64_dir, x64_dir): + manifest_path = package_dir / "_manifest" + if manifest_path.is_dir(): + print(f"Removing manifest directory: {manifest_path.relative_to(staging_dir)}") + shutil.rmtree(manifest_path) + print("##[endgroup]") + + # 4. Error Check: Verify file tree structures are identical + print("##[group]Verifying file tree structures...") + arm64_files = get_relative_file_paths(arm64_dir) + x64_files = get_relative_file_paths(x64_dir) + + if arm64_files != x64_files: + difference = arm64_files.symmetric_difference(x64_files) + print(f"ERROR: File tree structures do not match. Found {len(difference)} differing files:", file=sys.stderr) + for f in sorted(difference): + print(f"- {f}", file=sys.stderr) + sys.exit(1) + + print("✅ File tree structures match.") + print("##[endgroup]") + + # 5. Create the universal binary package + print("##[group]Creating universal2 package with lipo...") + universal_dir = staging_dir / arm64_dir.name.replace("arm64", "universal2") + + print(f"Copying {arm64_dir.name} to {universal_dir.name} as a template.") + shutil.copytree(arm64_dir, universal_dir, symlinks=True, ignore=shutil.ignore_patterns("*.dSYM")) + + for relative_path in arm64_files: + arm64_file = arm64_dir / relative_path + x64_file = x64_dir / relative_path + universal_file = universal_dir / relative_path + + if is_macho_binary(arm64_file) and is_macho_binary(x64_file): + print(f"Combining {relative_path}...") + run_command(["lipo", "-create", arm64_file, x64_file, "-output", universal_file]) + run_command(["lipo", "-info", universal_file]) + print("##[endgroup]") + + # Remove .dSYM folders from source packages before zipping. + print("##[group]Removing .dSYM folders from source packages...") + for package_dir in (arm64_dir, x64_dir): + for dsym_dir in package_dir.rglob("*.dSYM"): + if dsym_dir.is_dir(): + print(f"Removing {dsym_dir.relative_to(staging_dir)}") + shutil.rmtree(dsym_dir) + print("##[endgroup]") + + # 6. Zip all packages for signing and clean up + print("##[group]Zipping all packages for signing...") + for dir_path in (arm64_dir, x64_dir, universal_dir): + # Create a zip file in the staging directory. + zip_file_path = staging_dir / f"{dir_path.name}.zip" + print(f"Zipping {dir_path.name} to {zip_file_path}") + # The source directory path (dir_path.name) is relative to the current working directory (staging_dir). + run_command(["zip", "-FSr", "--symlinks", zip_file_path, dir_path.name]) + + print(f"Removing directory {dir_path.name}") + shutil.rmtree(dir_path) + + print("Final contents of staging directory:") + for item in sorted(staging_dir.iterdir()): + try: + stat = item.stat() + size = stat.st_size + mode_str = stat_module.filemode(stat.st_mode) + mtime = datetime.fromtimestamp(stat.st_mtime).strftime("%b %d %H:%M") + print(f"{mode_str} {size:>10} {mtime} {item.name}") + except FileNotFoundError: + # Handle cases where a file might be a broken symlink + print(f"l????????? {'?':>10} ? ? {item.name} (broken link)") + + print("##[endgroup]") + + +if __name__ == "__main__": + try: + main() + except Exception as e: + print(f"##[error]A critical error occurred: {e}", file=sys.stderr) + sys.exit(1) diff --git a/tools/ci_build/run_gh_action.py b/tools/ci_build/run_gh_action.py new file mode 100644 index 0000000000000..3980324cdd7a1 --- /dev/null +++ b/tools/ci_build/run_gh_action.py @@ -0,0 +1,162 @@ +import os +import platform +import shutil +import sys +import tempfile +import zipfile +from pathlib import Path + +import requests + +SCRIPT_DIR = Path(__file__).resolve().parent +REPO_DIR = (SCRIPT_DIR / ".." / "..").resolve() + +sys.path.insert(0, str(REPO_DIR / "tools" / "python")) + +from util import run # noqa: E402 + +# Hash structure for platform-specific binaries +CMAKE_HASHES = { + "windows": { + "x64": "807b774fcb12defff8ce869e602fc5b6279d5b7bf7229ebcf3f7490da3f887d516b9c49a00d50f9179e552ed8737d19835a19ef8f366d1ffda1ad6f3352a90c2", + "arm64": "86937dc89deabe0ff2a08fe198fcfc70764476b865cca4c6dc3bfc7fb9f7d44d4929af919e26e84aaedef17ad01ffb9683e42c39cb38b409100f723bc5ef1cc0", + }, + "linux": { + "x64": "7939260931098c3f00d2b36de3bee6a0ee3bcae2dba001598c492ed5c82d295c9aa9969654f1ff937fec4d71679541238baaa648c5246f36e14f28f0a62337a0", + "arm64": "8eeb07e966a5340c122979dd2e371708a78adccc85200b22bc7e66028e65513bce5ced6c37fe65aedb94000d970186c5c7562d1ab3dbda911061de46b75345d9", + }, + "macos": "99cc9c63ae49f21253efb5921de2ba84ce136018abf08632c92c060ba91d552e0f6acc214e9ba8123dee0cf6d1cf089ca389e321879fd9d719a60d975bcffcc8", +} + + +def get_platform_keys() -> tuple[str | None, str | None]: + """Detects the OS and CPU architecture and returns normalized keys.""" + os_key: str | None = None + match sys.platform: + case "win32": + os_key = "windows" + case "linux": + os_key = "linux" + case "darwin": + os_key = "macos" + + arch_key: str | None = None + match platform.machine().lower(): + case "amd64" | "x86_64": + arch_key = "x64" + case "arm64" | "aarch64": + arch_key = "arm64" + + return os_key, arch_key + + +def main() -> None: + if len(sys.argv) < 2: + print("::error::Action version argument was not provided.") + sys.exit(1) + + action_version = sys.argv[1] + + # --- Platform Detection and Variable Setup --- + os_key, arch_key = get_platform_keys() + if not os_key or not arch_key: + print( + f"::error::Could not determine a supported platform from OS '{sys.platform}' and Arch '{platform.machine()}'." + ) + sys.exit(1) + + print(f"Detected Platform: OS='{os_key}', Architecture='{arch_key}'") + + try: + if os_key == "macos": + cmake_hash = CMAKE_HASHES[os_key] + else: + cmake_hash = CMAKE_HASHES[os_key][arch_key] + + print(f"Selected CMake hash for '{os_key}'.") + except KeyError: + print(f"::error::Unsupported platform or missing hash for OS='{os_key}' and Arch='{arch_key}'.") + sys.exit(1) + + # --- Conditionally set Terrapin and define action inputs --- + disable_terrapin_value = "true" + terrapin_tool_path_str = "C:\\local\\Terrapin\\TerrapinRetrievalTool.exe" + + action_inputs = { + "INPUT_CMAKE-VERSION": "3.31.8", + "INPUT_CMAKE-HASH": cmake_hash, + "INPUT_VCPKG-VERSION": "2025.06.13", + "INPUT_VCPKG-HASH": "735923258c5187966698f98ce0f1393b8adc6f84d44fd8829dda7db52828639331764ecf41f50c8e881e497b569f463dbd02dcb027ee9d9ede0711102de256cc", + "INPUT_ADD-CMAKE-TO-PATH": "true", + } + + if os_key == "windows" and Path(terrapin_tool_path_str).exists(): + disable_terrapin_value = "false" + action_inputs["INPUT_TERRAPIN-TOOL-PATH"] = terrapin_tool_path_str + print("Terrapin tool found. Setting INPUT_DISABLE-TERRAPIN to 'false' and providing tool path.") + + action_inputs["INPUT_DISABLE-TERRAPIN"] = disable_terrapin_value + + # --- Download and Extract the Action to a Temporary Directory --- + zip_url = f"https://github.com/microsoft/onnxruntime-github-actions/archive/refs/tags/{action_version}.zip" + + # Use AGENT_TEMPDIRECTORY, with a fallback to the system's default temp directory. + temp_dir = Path(os.environ.get("AGENT_TEMPDIRECTORY", tempfile.gettempdir())).resolve() + zip_path = temp_dir / "action.zip" + extract_dir = temp_dir / "action-unzipped" + + print(f"Using temporary directory: {temp_dir}") + + # --- Locate, Run, and Cleanup the Action Script --- + try: + print(f"Downloading action source from: {zip_url}") + response = requests.get(zip_url, stream=True) + response.raise_for_status() + with open(zip_path, "wb") as f: + shutil.copyfileobj(response.raw, f) + + print(f"Extracting {zip_path} to {extract_dir}") + if extract_dir.exists(): + shutil.rmtree(extract_dir) + with zipfile.ZipFile(zip_path, "r") as zip_ref: + zip_ref.extractall(extract_dir) + + try: + action_base_path = next(extract_dir.glob("onnxruntime-github-actions-*")) + print(f"Found action base path: {action_base_path}") + except StopIteration as e: + raise FileNotFoundError(f"Could not find extracted action directory in '{extract_dir}'") from e + + action_script_path = action_base_path / "setup-build-tools" / "dist" / "index.js" + if not action_script_path.exists(): + raise FileNotFoundError(f"Action script not found at expected path: {action_script_path}") + + env = os.environ.copy() + env.update(action_inputs) + + if "AGENT_TOOLSDIRECTORY" in env: + env["RUNNER_TOOL_CACHE"] = env["AGENT_TOOLSDIRECTORY"] + print(f"Mapped RUNNER_TOOL_CACHE to AGENT_TOOLSDIRECTORY: {env['RUNNER_TOOL_CACHE']}") + if "AGENT_TEMPDIRECTORY" in env: + env["RUNNER_TEMP"] = env["AGENT_TEMPDIRECTORY"] + print(f"Mapped RUNNER_TEMP to AGENT_TEMPDIRECTORY: {env['RUNNER_TEMP']}") + + run("node", str(action_script_path), env=env) + + finally: + # --- Cleanup --- + # This block ensures the zip file and extracted directory are always removed. + print("\nStarting cleanup...") + if zip_path.exists(): + print(f"Removing temporary zip file: {zip_path}") + zip_path.unlink() + + if extract_dir.exists(): + print(f"Removing extracted action directory: {extract_dir}") + shutil.rmtree(extract_dir) + + print("Cleanup complete.") + + +if __name__ == "__main__": + main() diff --git a/tools/python/run_packaging_pipelines.py b/tools/python/run_packaging_pipelines.py index 4948f35c642e8..259b7f9e39e9c 100644 --- a/tools/python/run_packaging_pipelines.py +++ b/tools/python/run_packaging_pipelines.py @@ -446,28 +446,42 @@ def main(): print(f" - {result['pipeline']['name']} (ID: {result['pipeline']['id']})") else: print(f"\n--- Triggering {len(pipelines_to_trigger)} Pipelines on branch '{branch_for_trigger}' ---") - nightly_override = None - release_override = None - if args.build_mode == "nightly": - nightly_override = "1" - release_override = "false" - elif args.build_mode == "release": - nightly_override = "0" - release_override = "true" # If pre-release flags are used, it implies a release build. if args.pre_release_suffix_string: print("Pre-release suffix provided. Forcing 'release' build mode.") if args.build_mode and args.build_mode != "release": print(f"Warning: --build-mode={args.build_mode} is overridden by pre-release flags.") - nightly_override = "0" - release_override = "true" + + # If pre-release flags are used, it implies a release build. + if args.pre_release_suffix_string: + print("Pre-release suffix provided. Forcing 'release' build mode.") + if args.build_mode and args.build_mode != "release": + print(f"Warning: --build-mode={args.build_mode} is overridden by pre-release flags.") for result in pipelines_to_trigger: pipeline = result["pipeline"] packaging_type = result["packaging_type"] has_pre_release_params = result["has_pre_release_params"] + # Determine build mode based on flags + nightly_override = None + release_override = None + if args.build_mode == "nightly": + nightly_override = "1" + release_override = "false" + elif args.build_mode == "release": + nightly_override = "0" + release_override = "true" + + # If pre-release flags are used AND the pipeline supports them, it implies a release build. + if args.pre_release_suffix_string and has_pre_release_params: + print(f"Pre-release flags used and supported by '{pipeline['name']}'. Forcing 'release' mode.") + if args.build_mode and args.build_mode != "release": + print(f" - Warning: --build-mode={args.build_mode} is overridden for this pipeline.") + nightly_override = "0" + release_override = "true" + if not args.no_cancel_builds: cancel_running_builds(pipeline["id"], branch_for_trigger, token, project) else: