diff --git a/.github/actions/build-container/action.yml b/.github/actions/build-container/action.yml new file mode 100644 index 000000000000..1a4705348abc --- /dev/null +++ b/.github/actions/build-container/action.yml @@ -0,0 +1,95 @@ +name: Build Container +description: >- + Build and test a container using the standard llvm naming scheme for containers. + +inputs: + tag: + description: >- + The tag to use for this container. + required: false + container-name: + description: >- + The name for the container. + required: true + dockerfile: + description: >- + Path to docker file. + required: false + target: + description: >- + The container target to build 'passed to podman via ---target option' + required: false + context: + description: >- + Path to context for the container build. + required: false + test-command: + description: >- + Test command to run to ensure the container is working correctly. + required: false + +runs: + using: "composite" + steps: + # podman is not installed by default on the ARM64 images. + - name: Install Podman + if: runner.arch == 'ARM64' + shell: bash + run: | + sudo apt-get install podman + + - name: Build Container + shell: bash + env: + INPUT_TAG: ${{inputs.tag }} + INPUT_CONTAINER_NAME: ${{ inputs.container-name }} + INPUT_TARGET: ${{ inputs.target }} + INPUT_DOCKERFILE: ${{ inputs.dockerfile }} + INPUT_CONTEXT: ${{ inputs.context }} + id: build + run: | + env + tag="${INPUT_TAG:-$(git rev-parse --short=12 HEAD)}" + + case "$RUNNER_ARCH" in + ARM64) + container_arch="arm64v8" + ;; + *) + container_arch="amd64" + ;; + esac + + container_name="ghcr.io/$GITHUB_REPOSITORY_OWNER/$container_arch/$INPUT_CONTAINER_NAME:$tag" + container_filename="$(echo $container_name | sed -e 's/\//-/g' -e 's/:/-/g').tar" + if [ -n "$INPUT_TARGET" ]; then + podman_options="$podman_options --target $INPUT_TARGET" + fi + if [ -n "$INPUT_DOCKERFILE" ]; then + podman_options="$podman_options -f $INPUT_DOCKERFILE" + fi + podman_options="$podman_options ${INPUT_CONTEXT:-.}" + echo "Podman Options: $podman_options" + + podman build -t $container_name $podman_options + + podman save $container_name > $container_filename + + echo "container-full-name=$container_name" >> $GITHUB_OUTPUT + + - name: Create container artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ inputs.container-name }}-${{ runner.arch }} + path: "*.tar" + retention-days: 14 + + - name: Test container + shell: bash + if: inputs.test-command + env: + INPUT_TEST_COMMAND: ${{ inputs.test-command }} + CONTAINER_FULL_NAME: ${{ steps.build.outputs.container-full-name }} + run: | + podman run --pull=never --rm -it $CONTAINER_FULL_NAME /usr/bin/bash -x -c "$INPUT_TEST_COMMAND" + diff --git a/.github/actions/push-container/action.yml b/.github/actions/push-container/action.yml new file mode 100644 index 000000000000..6dc364d83c85 --- /dev/null +++ b/.github/actions/push-container/action.yml @@ -0,0 +1,44 @@ +name: Push Container +description: >- + Download all container artifacts for this job and push them to the GitHub registry. + +inputs: + token: + description: >- + Token to use to authenticate with the container registry. + required: true + +runs: + using: "composite" + steps: + - name: Download container + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + + - name: Push Container + env: + GITHUB_TOKEN: ${{ inputs.token }} + shell: bash + run: | + function push_container { + image_name=$1 + latest_name=$(echo $image_name | sed 's/:[a-f0-9]\+$/:latest/g') + podman tag $image_name $latest_name + echo "Pushing $image_name ..." + podman push --compression-format=zstd $image_name + echo "Pushing $latest_name ..." + podman push --compression-format=zstd $latest_name + } + + podman login -u ${{ github.actor }} -p $GITHUB_TOKEN ghcr.io + for f in $(find . -iname '*.tar'); do + image_name=$(podman load -q -i $f | sed 's/Loaded image: //g') + push_container $image_name + + if echo $image_name | grep '/amd64/'; then + # For amd64, create an alias with the arch component removed. + # This matches the convention used on dockerhub. + default_image_name=$(echo $(dirname $(dirname $image_name))/$(basename $image_name)) + podman tag $image_name $default_image_name + push_container $default_image_name + fi + done diff --git a/.github/instructions/lldb.instructions.md b/.github/instructions/lldb.instructions.md new file mode 100644 index 000000000000..35bcd27b1b42 --- /dev/null +++ b/.github/instructions/lldb.instructions.md @@ -0,0 +1,79 @@ +--- +applyTo: lldb/**/* +--- + +When reviewing code, focus on: + +## Language, Libraries & Standards + +- Target C++17 and avoid vendor-specific extensions. +- For Python scripts, follow PEP 8. +- Prefer standard library or LLVM support libraries instead of reinventing data structures. + +## Comments & Documentation + +- Each source file should include the standard LLVM file header. +- Header files must have proper header guards. +- Non-trivial classes and public methods should have Doxygen documentation. +- Use `//` or `///` comments normally; avoid block comments unless necessary. +- Non-trivial code should have comments explaining what it does and why. Avoid comments that explain how it does it at a micro level. + +## Language & Compiler Issues + +- Write portable code; wrap non-portable code in interfaces. +- Do not use RTTI or exceptions. +- Prefer C++-style casts over C-style casts. +- Do not use static constructors. +- Use `class` or `struct` consistently; `struct` only for all-public data. +- When then same class is declared or defined multiple times, make sure it's consistently done using either `class` or `struct`. + +## Headers & Library Layering + +- Include order: module header → local/private headers → project headers → system headers. +- Headers must compile standalone (include all dependencies). +- Maintain proper library layering; avoid circular dependencies. +- Include minimally; use forward declarations where possible. +- Keep internal headers private to modules. +- Use full namespace qualifiers for out-of-line definitions. + +## Control Flow & Structure + +- Prefer early exits over deep nesting. +- Do not use `else` after `return`, `continue`, `break`, or `goto`. +- Encapsulate loops that compute predicates into helper functions. + +## Naming + +- LLDB's code style differs from LLVM's coding style. +- Variables are `snake_case`. +- Functions and methods are `UpperCamelCase`. +- Static, global and member variables have `s_`, `g_` and `m_` prefixes respectively. + +## General Guidelines + +- Use `assert` liberally; prefer `llvm_unreachable` for unreachable states. +- Do not use `using namespace std;` in headers. +- Provide a virtual method anchor for classes defined in headers. +- Do not use default labels in fully covered switches over enumerations. +- Use range-based for loops wherever possible. +- Capture `end()` outside loops if not using range-based iteration. +- Including `` is forbidded. Use LLVM’s `raw_ostream` instead. +- Don’t use `inline` when defining a function in a class definition. + +## Microscopic Details + +- Preserve existing style in modified code. +- Prefer pre-increment (`++i`) when value is unused. +- Use `private`, `protected`, or `public` keyword as appropriate to restrict class member visibility. +- Omit braces for single-statement `if`, `else`, `while`, `for` unless needed. + +## Review Style + +- Be specific and actionable in feedback. +- Explain the "why" behind recommendations. +- Link back to the LLVM Coding Standards: https://llvm.org/docs/CodingStandards.html. +- Ask clarifying questions when code intent is unclear. + +Ignore formatting and assume that's handled by external tools like `clang-format` and `black`. +Remember that these standards are **guidelines**. +Always prioritize consistency with the style that is already being used by the surrounding code. diff --git a/.github/instructions/llvm.instructions.md b/.github/instructions/llvm.instructions.md new file mode 100644 index 000000000000..3f1308f51e67 --- /dev/null +++ b/.github/instructions/llvm.instructions.md @@ -0,0 +1,8 @@ +--- +applyTo: llvm/**/* +--- + +When performing a code review, pay close attention to code modifying a function's +control flow. Could the change result in the corruption of performance profile +data? Could the change result in invalid debug information, in particular for +branches and calls? diff --git a/.github/new-prs-labeler.yml b/.github/new-prs-labeler.yml new file mode 100644 index 000000000000..07c3df227ec8 --- /dev/null +++ b/.github/new-prs-labeler.yml @@ -0,0 +1,1457 @@ +BOLT: + - changed-files: + - any-glob-to-any-file: + - bolt/**/* + +ClangIR: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/CIR/**/* + - clang/lib/CIR/**/* + - clang/tools/cir-*/**/* + - clang/test/CIR/**/* + +clang:bytecode: + - changed-files: + - any-glob-to-any-file: + - clang/docs/ConstantInterpreter.rst + - clang/lib/AST/ByteCode/**/* + - clang/test/AST/ByteCode/**/* + - clang/unittests/AST/ByteCode/**/* + +clang:dataflow: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/Analysis/FlowSensitive/**/* + - clang/lib/Analysis/FlowSensitive/**/* + - clang/unittests/Analysis/FlowSensitive/**/* + - clang/docs/DataFlowAnalysisIntro.md + - clang/docs/DataFlowAnalysisIntroImages/**/* + +clang:frontend: + - changed-files: + - any-glob-to-any-file: + - clang/lib/AST/**/* + - clang/include/clang/AST/**/* + - clang/lib/Basic/**/* + - clang/include/clang/Basic/**/* + - clang/lib/Interpreter/**/* + - clang/include/clang/Interpreter/**/* + - clang/lib/Lex/**/* + - clang/include/clang/Lex/**/* + - clang/lib/Parse/**/* + - clang/include/clang/Parse/**/* + - clang/lib/Sema/**/* + - clang/include/clang/Sema/**/* + +clang:headers: + - changed-files: + - any-glob-to-any-file: + - clang/lib/Headers/**/* + +compiler-rt: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/**/* + +flang: + - changed-files: + - any-glob-to-any-file: + - flang/**/* + +flang:frontend: + - changed-files: + - any-glob-to-any-file: + - flang/Parser/**/* + - flang/Evaluate/**/* + - flang/Semantics/**/* + +flang-rt: + - changed-files: + - any-glob-to-any-file: + - flang-rt/**/* + +libclc: + - changed-files: + - any-glob-to-any-file: + - libclc/** + +HLSL: + - changed-files: + - any-glob-to-any-file: + - clang/*HLSL*/**/* + - clang/**/*HLSL* + - llvm/**/Frontend/HLSL/**/* + +lld: + - changed-files: + - any-glob-to-any-file: + - lld/**/* + +llvm-lit: + - changed-files: + - any-glob-to-any-file: + - llvm/utils/lit/**/* + +PGO: + - changed-files: + - any-glob-to-any-file: + - llvm/**/ProfileData/**/* + - llvm/**/SampleProfile* + - llvm/**/CodeGen/MIRSampleProfile* + - llvm/lib/Transforms/Instrumentation/CGProfile.cpp + - llvm/lib/Transforms/Instrumentation/ControlHeightReduction.cpp + - llvm/lib/Transforms/Instrumentation/IndirectCallPromotion.cpp + - llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp + - llvm/lib/Transforms/Instrumentation/PGO* + - llvm/lib/Transforms/Instrumentation/ValueProfile* + - llvm/test/Instrumentation/InstrProfiling/**/* + - llvm/test/Transforms/PGOProfile/**/* + - llvm/test/Transforms/SampleProfile/**/* + - llvm/**/llvm-profdata/**/* + - llvm/**/llvm-profgen/**/* + +vectorizers: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Transforms/Vectorize/**/* + - llvm/include/llvm/Transforms/Vectorize/**/* + +# IMPORTED FROM CODEOWNERS +LTO: + - changed-files: + - any-glob-to-any-file: + - llvm/*/LTO/** + - llvm/*/Linker/** + - llvm/*/ThinLTO/** + - llvm/lib/Transforms/*/FunctionImport* + - llvm/tools/gold/** + +clang:driver: + - changed-files: + - any-glob-to-any-file: + - clang/*/Driver/** + +compiler-rt:asan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/asan/** + - compiler-rt/include/sanitizer/asan_interface.h + - compiler-rt/test/asan/** + - compiler-rt/lib/asan_abi/** + - compiler-rt/test/asan_abi/** + +compiler-rt:builtins: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/builtins/** + - compiler-rt/test/builtins/** + +compiler-rt:cfi: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/cfi/** + - compiler-rt/test/cfi/** + +compiler-rt:fuzzer: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/fuzzer/** + - compiler-rt/include/fuzzer/** + - compiler-rt/test/fuzzer/** + +compiler-rt:hwasan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/hwasan/** + - compiler-rt/include/sanitizer/hwasan_interface.h + - compiler-rt/test/hwasan/** + +compiler-rt:lsan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/lsan/** + - compiler-rt/include/sanitizer/lsan_interface.h + - compiler-rt/test/lsan/** + +compiler-rt:msan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/msan/** + - compiler-rt/include/sanitizer/msan_interface.h + - compiler-rt/test/msan/** + +compiler-rt:sanitizer: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Transforms/Instrumentation/*Sanitizer* + - compiler-rt/lib/interception/** + - compiler-rt/lib/*san*/** + - compiler-rt/include/sanitizer/** + - compiler-rt/test/*san*/** + - compiler-rt/lib/fuzzer/** + - compiler-rt/include/fuzzer/** + - compiler-rt/test/fuzzer/** + - compiler-rt/lib/scudo/** + - compiler-rt/test/scudo/** + +compiler-rt:scudo: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/scudo/** + - compiler-rt/test/scudo/** + +compiler-rt:tsan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/tsan/** + - compiler-rt/include/sanitizer/tsan_interface.h + - compiler-rt/include/sanitizer/tsan_interface_atomic.h + - compiler-rt/test/tsan/** + +compiler-rt:ubsan: + - changed-files: + - any-glob-to-any-file: + - compiler-rt/lib/ubsan/** + - compiler-rt/include/sanitizer/ubsan_interface.h + - compiler-rt/test/ubsan/** + - compiler-rt/lib/ubsan_minimal/** + - compiler-rt/test/ubsan_minimal/** + +xray: + - changed-files: + - any-glob-to-any-file: + - llvm/tools/llvm-xray/** + - compiler-rt/*/xray/** + - clang/include/clang/Basic/XRay* + - clang/lib/Basic/XRay* + - compiler-rt/*/xray/** + - llvm/include/llvm/XRay/** + - llvm/lib/XRay/** + - llvm/tools/llvm-xray/** + - llvm/unittests/XRay/** + - compiler-rt/*/xray/** + +clang:codegen: + - changed-files: + - any-glob-to-any-file: + - clang/lib/CodeGen/** + - clang/include/clang/CodeGen/** + +mlir: + - changed-files: + - any-glob-to-any-file: + - mlir/** + +mlir:core: + - changed-files: + - any-glob-to-any-file: + - mlir/include/mlir/Support/** + - mlir/lib/Support/** + - mlir/include/mlir/Parser/** + - mlir/lib/Parser/** + - mlir/include/mlir/IR/** + - mlir/lib/IR/** + - mlir/include/mlir/Bytecode/** + - mlir/lib/Bytecode/** + - mlir/include/mlir/AsmParser/** + - mlir/lib/AsmParser/** + - mlir/include/mlir/Pass/** + - mlir/lib/Pass/** + - mlir/include/mlir/Tools/** + - mlir/lib/Tools/** + - mlir/include/mlir/Reducer/** + - mlir/lib/Reducer/** + - mlir/include/mlir/Transforms/** + - mlir/lib/Transforms/** + - mlir/include/mlir/Debug/** + - mlir/lib/Debug/** + - mlir/tools/** + +mlir:ods: + - changed-files: + - any-glob-to-any-file: + - mlir/TableGen/** + - mlir/tblgen/** + - mlir/include/mlir/IR/*.td + +mlir:bindings: + - changed-files: + - any-glob-to-any-file: + - mlir/Bindings/** + +mlir:gpu: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*GPU*/** + +mlir:amdgpu: + - changed-files: + - any-glob-to-any-file: + - mlir/**/AMDGPU/** + +mlir:amx: + - changed-files: + - any-glob-to-any-file: + - mlir/**/AMX/** + +mlir:affine: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Affine/** + +mlir:arith: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Arith/** + +mlir:neon: + - changed-files: + - any-glob-to-any-file: + - mlir/**/ArmNeon/** + +mlir:sme: + - changed-files: + - any-glob-to-any-file: + - mlir/**/ArmSME/** + +mlir:sve: + - changed-files: + - any-glob-to-any-file: + - mlir/**/ArmSVE/** + +mlir:async: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Async/** + - mlir/**/Async/** + +mlir:bufferization: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Bufferization/** + +mlir:complex: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Complex/** + +mlir:cf: + - changed-files: + - any-glob-to-any-file: + - mlir/**/ControlFlow/** + +mlir:dlti: + - changed-files: + - any-glob-to-any-file: + - mlir/**/DLTI/** + +mlir:emitc: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*EmitC*/** + - mlir/lib/Target/Cpp/** + +mlir:func: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Func/** + +mlir:irdl: + - changed-files: + - any-glob-to-any-file: + - mlir/**/IRDL/** + +mlir:index: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Index/** + +mlir:llvm: + - changed-files: + - any-glob-to-any-file: + - mlir/**/LLVM* + - mlir/**/LLVM*/** + +mlir:linalg: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*linalg/** + - mlir/**/*Linalg/** + +mlir:mlprogram: + - changed-files: + - any-glob-to-any-file: + - mlir/**/MLProgram/** + +mlir:math: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Math/** + +mlir:memref: + - changed-files: + - any-glob-to-any-file: + - mlir/**/MemRef/** + +mlir:nvgpu: + - changed-files: + - any-glob-to-any-file: + - mlir/**/NVGPU/** + +mlir:openacc: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*OpenACC* + - mlir/**/*OpenACC*/** + +mlir:openmp: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*OpenMP* + - mlir/**/*OpenMP*/** + +mlir:pdl: + - changed-files: + - any-glob-to-any-file: + - mlir/**/PDL/** + +mlir:quant: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Quant/** + +mlir:scf: + - changed-files: + - any-glob-to-any-file: + - mlir/**/SCF/** + +mlir:spirv: + - changed-files: + - any-glob-to-any-file: + - mlir/**/SPIRV/** + - mlir/**/SPIRVTo*/** + - mlir/**/*ToSPIRV/** + - mlir/tools/mlir-spirv-cpu-runner/** + - mlir/tools/mlir-vulkan-runner/** + - mlir/tools/mlir-tblgen/SPIRVUtilsGen.cpp + +mlir:shape: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Shape/** + +mlir:sparse: + - changed-files: + - any-glob-to-any-file: + - mlir/**/SparseTensor/** + +mlir:tensor: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Tensor/** + +mlir:tosa: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*Tosa*/** + +mlir:ub: + - changed-files: + - any-glob-to-any-file: + - mlir/**/UB/** + +mlir:vector: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*Vector/** + +mlir:execution-engine: + - changed-files: + - any-glob-to-any-file: + - mlir/**/ExecutionEngine/** + +mlir:presburger: + - changed-files: + - any-glob-to-any-file: + - mlir/**/*Presburger*/** + +mlir:python: + - changed-files: + - any-glob-to-any-file: + - mlir/python/**/* + - mlir/include/mlir/Bindings/Python/** + - mlir/lib/Bindings/Python/** + - mlir/tools/mlir-tblgen/*Python* + +mlir:vectorops: + - changed-files: + - any-glob-to-any-file: + - mlir/**/Vector/**/* + +coroutines: + - changed-files: + - any-glob-to-any-file: + - clang/docs/DebuggingCoroutines.rst + - clang/lib/Sema/SemaCoroutine.cpp + - clang/lib/CodeGen/CGCoroutine.cpp + - clang/test/CodeGenCoroutines/** + - llvm/docs/Coroutines.rst + - llvm/include/llvm/Transforms/Coroutines/** + - llvm/lib/Transforms/Coroutines/** + - llvm/test/Transforms/Coroutines/* + +clang:modules: + - changed-files: + - any-glob-to-any-file: + - clang/docs/StandardCPlusPlusModules.rst + - clang/include/clang/AST/AbstractBasicReader.h + - clang/include/clang/AST/AbstractBasicWriter.h + - clang/include/clang/AST/AbstractTypeReader.h + - clang/include/clang/AST/AbstractTypeWriter.h + - clang/include/clang/AST/PropertiesBase.td + - clang/include/clang/AST/ODRHash.h + - clang/include/clang/AST/TypeProperties.td + - clang/include/clang/Basic/Module.h + - clang/include/clang/Frontend/PrecompiledPreamble.h + - clang/include/clang/Lex/ModuleLoader.h + - clang/include/clang/Lex/ModuleMap.h + - clang/include/clang/Serialization/** + - clang/lib/AST/ODRHash.cpp + - clang/lib/AST/StmtProfile.cpp + - clang/lib/Basic/Module.cpp + - clang/lib/Frontend/ModuleDependencyCollector.cpp + - clang/lib/Frontend/PrecompiledPreamble.cpp + - clang/lib/Lex/ModuleMap.cpp + - clang/lib/Sema/SemaModule.cpp + - clang/lib/Serialization/** + - clang/test/CXX/module/** + - clang/test/Modules/** + - clang/unittests/Serialization/* + +clang-tidy: + - changed-files: + - any-glob-to-any-file: + - clang-tools-extra/clang-tidy/** + - clang-tools-extra/docs/clang-tidy/** + - clang-tools-extra/test/clang-tidy/** + +clang-tools-extra: + - changed-files: + - any-glob-to-any-file: + - clang-tools-extra/** + +tools:llvm-mca: + - changed-files: + - any-glob-to-any-file: + - llvm/tools/llvm-mca/** + - llvm/include/llvm/MCA/** + - llvm/lib/MCA/** + +clang: + - changed-files: + - all-globs-to-all-files: + - clang/** + - '!clang/**/Format/**' + - '!clang/tools/clang-format/**' + +testing-tools: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/FileCheck/** + - llvm/lib/FileCheck/** + - llvm/test/FileCheck/** + - llvm/unittests/FileCheck/** + - llvm/utils/lit/** + - llvm/utils/split-file/** + - llvm/utils/not/** + - llvm/utils/count/** + - llvm/utils/FileCheck/** + - llvm/docs/CommandGuide/FileCheck.rst + - llvm/docs/CommandGuide/lit.rst + - llvm/docs/TestingGuide.rst + - llvm/test/Other/FileCheck-space.txt + - llvm/utils/UpdateTestChecks/** + - llvm/utils/update*_test_checks.py + +debuginfo: + - changed-files: + - any-glob-to-any-file: + - clang/lib/CodeGen/CGDebugInfo.* + - llvm/include/llvm/BinaryFormat/Dwarf.* + - llvm/include/llvm/CodeGen/*Debug*.* + - llvm/include/llvm/DebugInfo/** + - llvm/include/llvm/Debuginfod/** + - llvm/include/llvm/Frontend/Debug/** + - llvm/include/llvm/IR/Debug*.* + - llvm/include/llvm/Object/*Debug*.* + - llvm/include/llvm/ObjectYAML/*Debug*.* + - llvm/include/llvm/Transforms/Utils/*Debug*.* + - llvm/include/llvm-c/DebugInfo.h + - llvm/lib/BinaryFormat/Dwarf.cpp + - llvm/lib/CodeGen/AsmPrinter/*Debug*.* + - llvm/lib/CodeGen/AsmPrinter/Dwarf*.* + - llvm/lib/CodeGen/AsmPrinter/DIE*.* + - llvm/lib/CodeGen/LiveDebugValues/** + - llvm/lib/CodeGen/*Debug*.* + - llvm/lib/CodeGen/DwarfEHPrepare.cpp + - llvm/lib/DebugInfo/** + - llvm/lib/Debuginfod/** + - llvm/lib/DWARFLinkerParallel/** + - llvm/lib/IR/Debug*.cpp + - llvm/lib/MC/MCDwarf.cpp + - llvm/lib/Transforms/Utils/*Debug*.* + - llvm/test/DebugInfo/** + - llvm/test/tools/dsymutil/** + - llvm/test/tools/llvm-debuginfo-analyzer/** + - llvm/test/tools/llvm-debuginfod/** + - llvm/test/tools/llvm-debuginfod-find/** + - llvm/test/tools/llvm-dwarfdump/** + - llvm/test/tools/llvm-dwarfutil/** + - llvm/test/tools/llvm-dwp/** + - llvm/test/tools/llvm-gsymutil/** + - llvm/test/tools/llvm-pdbuti/** + - llvm/tools/dsymutil/** + - llvm/tools/llvm-debuginfo-analyzer/** + - llvm/tools/llvm-debuginfod/** + - llvm/tools/llvm-debuginfod-find/** + - llvm/tools/llvm-dwarfdump/** + - llvm/tools/llvm-dwarfutil/** + - llvm/tools/llvm-dwp/** + - llvm/tools/llvm-gsymutil/** + - llvm/tools/llvm-pdbutil/** + +github:workflow: + - changed-files: + - any-glob-to-any-file: + - .github/workflows/** + +cmake: + - changed-files: + - any-glob-to-any-file: + - cmake/** + - llvm/cmake/** + - runtimes/** + +flang:driver: + - changed-files: + - any-glob-to-any-file: + - flang/tools/flang-driver/** + - flang/unittests/Frontend/** + - flang/lib/FrontendTool/** + - flang/lib/Frontend/** + - flang/include/flang/Frontend/** + - flang/include/flang/FrontendTool/** + - flang/test/Driver/** + +backend:m68k: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Target/M68k/** + - clang/lib/Basic/Targets/M68k.* + - clang/lib/CodeGen/Targets/M68k.cpp + - llvm/test/CodeGen/M68k/** + - llvm/test/MC/Disassembler/M68k/** + - llvm/test/MC/M68k/** + +libc++: + - changed-files: + - any-glob-to-any-file: + - libcxx/** + - .github/workflows/libcxx-* + +libc++abi: + - changed-files: + - any-glob-to-any-file: + - libcxxabi/** + +libunwind: + - changed-files: + - any-glob-to-any-file: + - libunwind/** + +objectyaml: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/ObjectYAML/** + - llvm/lib/ObjectYAML/** + - llvm/test/tools/obj2yaml/** + - llvm/test/tools/yaml2obj/** + - llvm/tools/obj2yaml/** + - llvm/tools/yaml2obj/** + +clang:analysis: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/Analysis/** + - clang/lib/Analysis/** + +clang:static analyzer: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/StaticAnalyzer/** + - clang/lib/StaticAnalyzer/** + - clang/tools/scan-build/** + - clang/utils/analyzer/** + - clang/docs/analyzer/** + - clang/test/Analysis/** + +pgo: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Transforms/Instrumentation/CGProfile.cpp + - llvm/lib/Transforms/Instrumentation/ControlHeightReduction.cpp + - llvm/lib/Transforms/Instrumentation/IndirectCallPromotion.cpp + - llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp + - llvm/lib/Transforms/Instrumentation/PGO* + - llvm/lib/Transforms/Instrumentation/ValueProfile* + - llvm/test/Instrumentation/InstrProfiling/** + - llvm/test/Transforms/PGOProfile/** + - compiler-rt/lib/profile/** + - compiler-rt/lib/memprof/** + - compiler-rt/test/profile/** + - compiler-rt/test/memprof/** + - llvm/tools/llvm-profdata/** + - llvm/tools/llvm-profgen/** + - llvm/test/tools/llvm-profdata/** + - llvm/test/tools/llvm-profgen/** + - llvm/unittests/ProfileData/* + +openacc: + - changed-files: + - any-glob-to-any-file: + - flang/**/OpenACC/** + - flang/include/flang/Lower/OpenACC.h + - flang/docs/OpenACC.md + - flang/lib/Parser/openacc-parsers.cpp + - flang/lib/Lower/OpenACC.cpp + - llvm/**/Frontend/OpenACC/** + - llvm/unittests/Frontend/OpenACCTest.cpp + - mlir/test/Target/LLVMIR/openacc-llvm.mlir + - mlir/**/*OpenACC/** + +flang:runtime: + - changed-files: + - any-glob-to-any-file: + - flang/runtime/** + +flang:parser: + - changed-files: + - any-glob-to-any-file: + - flang/**/Parser/** + +flang:semantics: + - changed-files: + - any-glob-to-any-file: + - flang/**/Evaluate/** + - flang/**/Semantics/** + +flang:fir-hlfir: + - changed-files: + - any-glob-to-any-file: + - flang/**/Lower/** + - flang/**/Optimizer/** + +flang:codegen: + - changed-files: + - any-glob-to-any-file: + - flang/**/CodeGen/** + +llvm:codegen: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/CodeGen/* + - llvm/lib/CodeGen/MIRParser/* + - llvm/lib/CodeGen/LiveDebugValues/* + - llvm/lib/CodeGen/AsmPrinter/* + +llvm:globalisel: + - changed-files: + - any-glob-to-any-file: + - llvm/**/GlobalISel/** + - llvm/utils/TableGen/GlobalISel* + +function-specialization: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/Transforms/Utils/SCCPSolver.h + - llvm/lib/Transforms/Utils/SCCPSolver.cpp + - llvm/include/llvm/Transforms/IPO/FunctionSpecialization.h + - llvm/lib/Transforms/IPO/FunctionSpecialization.cpp + - llvm/test/Transforms/FunctionSpecialization/* + +libc: + - changed-files: + - any-glob-to-any-file: + - libc/** + - utils/bazel/llvm-project-overlay/libc/** + +clang-format: + - changed-files: + - any-glob-to-any-file: + - clang/**/Format/** + - clang/tools/clang-format/** + +flang:openmp: + - changed-files: + - any-glob-to-any-file: + - flang/test/**/OpenMP/** + - flang/lib/Lower/OpenMP.cpp + - flang/lib/Semantics/resolve-directives.cpp + - flang/lib/Semantics/check-omp-structure.cpp + - flang/lib/Optimizer/Transforms/OMP* + - flang/test/Fir/convert-to-llvm-openmp-and-fir.fir + - flang/test/Lower/OpenMP/** + - flang/test/Transforms/omp* + - mlir/**/*OpenMP* + - mlir/test/Target/LLVMIR/openmp* + - llvm/lib/Frontend/OpenMP/** + - llvm/include/llvm/Frontend/OpenMP/** + - llvm/unittests/Frontend/OpenMP* + +llvm:ir: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/IR/** + - llvm/include/llvm/IR/** + - llvm/docs/LangRef.rst + - llvm/unittests/IR/** + +llvm:SandboxIR: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/SandboxIR/** + - llvm/include/llvm/SandboxIR/** + - llvm/docs/SandboxIR.md + - llvm/unittests/SandboxIR/** + +llvm:analysis: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Analysis/** + - llvm/include/llvm/Analysis/** + - llvm/test/Analysis/** + - llvm/unittests/Analysis/** + +llvm:adt: + - changed-files: + - any-glob-to-any-file: + - llvm/**/ADT/* + +llvm:support: + - changed-files: + - any-glob-to-any-file: + - llvm/**/Support/** + +# Skip llvm/test/MC and llvm/unittests/MC, which includes target-specific directories. +llvm:mc: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/MC/** + - llvm/lib/MC/** + - llvm/tools/llvm-mc/** + +llvm:transforms: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Transforms/** + - llvm/include/llvm/Transforms/** + - llvm/test/Transforms/** + - llvm/unittests/Transforms/** + +llvm:instcombine: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Analysis/InstructionSimplify.cpp + - llvm/lib/Transforms/InstCombine/** + - llvm/include/llvm/Transforms/InstCombine/ + - llvm/include/llvm/Analysis/InstructionSimplify.h + - llvm/test/Transforms/InstCombine/** + - llvm/test/Transforms/InstSimplify/** + +llvm:vectorcombine: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Transforms/Vectorize/VectorCombine.cpp + - llvm/test/Transforms/VectorCombine/** + +clangd: + - changed-files: + - any-glob-to-any-file: + - clang-tools-extra/clangd/** + +hlsl: + - changed-files: + - any-glob-to-any-file: + - clang/test/ParserHLSL/** + - clang/test/SemaHLSL/** + - clang/test/AST/HLSL/** + - clang/test/CodeGenHLSL/** + - clang/cmake/caches/HLSL.cmake + - clang/include/clang/Basic/HLSL*.h + - clang/include/clang/Sema/HLSL*.h + - clang/docs/HLSL/** + - clang/lib/Driver/ToolChains/HLSL* + - clang/lib/Parse/ParseHLSL.cpp + - clang/lib/Sema/HLSLExternalSemaSource.cpp + - clang/lib/Sema/SemaHLSL.cpp + - clang/lib/CodeGen/CGHLSLRuntime.* + - clang/lib/CodeGen/CGHLSLBuiltins.cpp + - llvm/include/llvm/Frontend/HLSL/** + - llvm/lib/Frontend/HLSL/** + +llvm:SelectionDAG: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/CodeGen/SelectionDAG*.h + - llvm/include/llvm/CodeGen/SDNodeProperties.td + - llvm/include/llvm/Target/TargetSelectionDAG.td + - llvm/lib/CodeGen/SelectionDAG/** + - llvm/utils/TableGen/CodeGenDAG* + - llvm/utils/TableGen/DAGISel* + - llvm/include/llvm/CodeGen/DAGCombine.h + - llvm/include/llvm/CodeGen/ISDOpcodes.h + +backend:DirectX: + - changed-files: + - any-glob-to-any-file: + - '**/*DirectX*' + - '**/*DXIL*' + - '**/*dxil*' + - '**/*DirectX*/**' + - '**/*DXIL*/**' + - '**/*dxil*/**' + - '**/*DXContainer*' + - '**/*DXContainer*/**' + - clang/lib/Sema/SemaDirectX.cpp + - clang/include/clang/Sema/SemaDirectX.h + - clang/include/clang/Basic/BuiltinsDirectX.td + - clang/lib/CodeGen/TargetBuiltins/DirectX.cpp + - clang/test/CodeGenDirectX/** + - clang/test/SemaDirectX/** + +backend:SPIR-V: + - changed-files: + - any-glob-to-any-file: + - clang/lib/Driver/ToolChains/SPIRV.* + - clang/lib/Sema/SemaSPIRV.cpp + - clang/include/clang/Sema/SemaSPIRV.h + - clang/include/clang/Basic/BuiltinsSPIRV.td + - clang/test/CodeGenSPIRV/** + - clang/test/SemaSPIRV/** + - llvm/lib/Target/SPIRV/** + - llvm/test/CodeGen/SPIRV/** + - llvm/test/Frontend/HLSL/** + - llvm/docs/SPIRVUsage.rst + +mlgo: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Analysis/ML* + - llvm/include/llvm/Analysis/ML* + - llvm/lib/Analysis/*Runner.cpp + - llvm/include/llvm/Analysis/*Runner.h + - llvm/unittests/Analysis/ML* + - llvm/lib/Analysis/FunctionPropertiesAnalysis.cpp + - llvm/lib/Analysis/TrainingLogger.cpp + - llvm/include/llvm/Analysis/FunctionPropertiesAnalysis.h + - llvm/include/llvm/Analysis/Utils/TrainingLogger.h + - llvm/test/Analysis/FunctionPropertiesAnalysis/* + - llvm/unittests/Analysis/FunctionPropertiesAnalysisTest.cpp + - llvm/test/Transforms/inline/ML/** + - llvm/lib/CodeGen/ML* + - llvm/unittests/CodeGen/ML* + - llvm/test/CodeGen/MLRegAlloc/** + - llvm/utils/mlgo-utils/** + - llvm/docs/MLGO.rst + - llvm/include/llvm/Analysis/IR2Vec.h + - llvm/lib/Analysis/IR2Vec.cpp + - llvm/lib/Analysis/models/** + - llvm/include/llvm/CodeGen/MIR2Vec.h + - llvm/lib/CodeGen/MIR2Vec.cpp + - llvm/test/Analysis/IR2Vec/** + - llvm/test/CodeGen/MIR2Vec/** + - llvm/unittests/Analysis/IR2VecTest.cpp + - llvm/unittests/CodeGen/MIR2VecTest.cpp + - llvm/tools/llvm-ir2vec/** + - llvm/docs/CommandGuide/llvm-ir2vec.rst + +tools:llvm-exegesis: + - changed-files: + - any-glob-to-any-file: + - llvm/tools/llvm-exegesis/** + - llvm/test/tools/llvm-exegesis/** + - llvm/unittests/tools/llvm-exegesis/** + +tools:llvm-reduce: + - changed-files: + - any-glob-to-any-file: + - llvm/tools/llvm-reduce/** + +platform:windows: + - changed-files: + - any-glob-to-any-file: + - lld/COFF/** + - clang/lib/Driver/MSVC.cpp + - clang/lib/Driver/MinGW.cpp + - llvm/lib/DebugInfo/CodeView/** + - llvm/lib/DebugInfo/PDB/** + - llvm/lib/WindowsDriver/** + - llvm/lib/Support/Windows/** + - llvm/lib/BinaryFormat/COFF.cpp + +llvm:regalloc: + - changed-files: + - any-glob-to-any-file: + - llvm/**/CodeGen/CalcSpillWeights* + - llvm/**/CodeGen/InlineSpiller* + - llvm/**/CodeGen/InterferenceCache* + - llvm/**/CodeGen/LiveInterval* + - llvm/**/CodeGen/LiveRange* + - llvm/**/CodeGen/LiveReg* + - llvm/**/CodeGen/LiveVariables* + - llvm/**/CodeGen/MachineCopyPropagation* + - llvm/**/CodeGen/PHIElimination* + - llvm/**/CodeGen/ProcessImplicitDefs.cpp + - llvm/**/CodeGen/Register* + - llvm/**/CodeGen/RegUsage* + - llvm/**/CodeGen/RenameIndependentSubregs.cpp + - llvm/**/CodeGen/SlotIndexes.h + - llvm/**/CodeGen/SpillPlacement* + - llvm/**/CodeGen/SplitKit* + - llvm/**/CodeGen/VirtRegMap.h + - llvm/include/PBQP/** + - llvm/include/PBQPRAConstraint.h + - llvm/include/llvm/CodeGen/Spiller.h + - llvm/**/*RegAlloc + +lldb: + - changed-files: + - any-glob-to-any-file: + - lldb/** + +lldb-dap: + - changed-files: + - any-glob-to-any-file: + - lldb/tools/lldb-dap/** + +backend:AMDGPU: + - changed-files: + - any-glob-to-any-file: + - '**/*amdgpu*' + - '**/*AMDGPU*' + - '**/*amdgpu*/**' + - '**/*AMDGPU*/**' + +backend:NVPTX: + - changed-files: + - any-glob-to-any-file: + - 'llvm/**/*nvvm*' + - 'llvm/**/*NVVM*' + - 'llvm/**/*nvptx*' + - 'llvm/**/*NVPTX*' + - 'llvm/**/*nvvm*/**' + - 'llvm/**/*NVVM*/**' + - 'llvm/**/*nvptx*/**' + - 'llvm/**/*NVPTX*/**' + +backend:MIPS: + - changed-files: + - any-glob-to-any-file: + - '**/*mips*' + - '**/*Mips*' + - '**/*mips*/**' + - '**/*Mips*/**' + +backend:RISC-V: + - changed-files: + - any-glob-to-any-file: + - '**/*riscv*' + - '**/*RISCV*' + - '**/*riscv*/**' + - '**/*RISCV*/**' + +backend:Xtensa: + - changed-files: + - any-glob-to-any-file: + - '**/*xtensa*' + - '**/*Xtensa*' + - '**/*xtensa*/**' + - '**/*Xtensa*/**' + +lld:coff: + - changed-files: + - any-glob-to-any-file: + - lld/**/COFF/** + - lld/Common/** + +lld:elf: + - changed-files: + - any-glob-to-any-file: + - lld/**/ELF/** + - lld/Common/** + +lld:macho: + - changed-files: + - any-glob-to-any-file: + - lld/**/MachO/** + - lld/Common/** + +lld:wasm: + - changed-files: + - any-glob-to-any-file: + - lld/**/wasm/** + - lld/Common/** + +backend:ARC: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Target/ARC/** + - clang/lib/Basic/Targets/ARC.h + - clang/lib/Basic/Targets/ARC.cpp + - clang/lib/CodeGen/Targets/ARC.cpp + +backend:ARM: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsARM.td + - llvm/test/MC/ARM/** + - llvm/lib/Target/ARM/** + - llvm/test/CodeGen/ARM/** + - clang/lib/Basic/Targets/ARM* + - clang/lib/Driver/ToolChains/Arch/ARM.* + - clang/lib/CodeGen/Targets/ARM.cpp + - clang/include/clang/Basic/BuiltinsARM* + - llvm/test/MC/DisasemblerARM/** + - clang/include/clang/Sema/SemaARM.h + - clang/lib/Sema/SemaARM.cpp + +backend:AArch64: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsAArch64.td + - llvm/test/MC/AArch64/** + - llvm/lib/Target/AArch64/** + - llvm/test/CodeGen/AArch64/** + - clang/lib/Basic/Targets/AArch64* + - clang/lib/Driver/ToolChains/Arch/AArch64.* + - clang/lib/CodeGen/Targets/AArch64.cpp + - clang/include/clang/Basic/BuiltinsAArch64* + - llvm/test/MC/Disassembler/AArch64/** + - clang/include/clang/Sema/SemaARM.h + - clang/lib/Sema/SemaARM.cpp + +backend:CSKY: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Target/CSKY/** + - llvm/include/llvm/TargetParser/CSKYTargetParser.def + - llvm/include/llvm/TargetParser/CSKYTargetParser.h + - llvm/include/llvm/BinaryFormat/ELFRelocs/CSKY.def + - llvm/lib/TargetParser/CSKYTargetParser.cpp + - llvm/lib/Support/CSKYAttributes.cpp + - llvm/lib/Support/CSKYAttributeParser.cpp + - clang/lib/Basic/Targets/CSKY.h + - clang/lib/Basic/Targets/CSKY.cpp + - clang/lib/CodeGen/Targets/CSKY.cpp + - clang/lib/Driver/ToolChains/CSKY* + +backend:Hexagon: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/Basic/BuiltinsHexagon*.def + - clang/include/clang/Sema/SemaHexagon.h + - clang/lib/Basic/Targets/Hexagon.* + - clang/lib/CodeGen/Targets/Hexagon.cpp + - clang/lib/Driver/ToolChains/Hexagon.* + - clang/lib/Sema/SemaHexagon.cpp + - lld/ELF/Arch/Hexagon.cpp + - lldb/source/Plugins/ABI/Hexagon/** + - lldb/source/Plugins/DynamicLoader/Hexagon-DYLD/** + - llvm/include/llvm/BinaryFormat/ELFRelocs/Hexagon.def + - llvm/include/llvm/IR/IntrinsicsHexagon* + - llvm/include/llvm/Support/Hexagon* + - llvm/lib/Support/Hexagon* + - llvm/lib/Target/Hexagon/** + - llvm/test/CodeGen/Hexagon/** + - llvm/test/CodeGen/*/Hexagon/** + - llvm/test/DebugInfo/*/Hexagon/** + - llvm/test/Transforms/*/Hexagon + - llvm/test/MC/Disassembler/Hexagon/** + - llvm/test/MC/Hexagon/** + - llvm/test/tools/llvm-objdump/ELF/Hexagon/** + +backend:Lanai: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Target/Lanai/** + - clang/lib/Basic/Targets/Lanai.h + - clang/lib/Basic/Targets/Lanai.cpp + - clang/lib/CodeGen/Targets/Lanai.cpp + - clang/lib/Driver/ToolChains/Lanai* + +backend:loongarch: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsLoongArch.td + - llvm/test/MC/LoongArch/** + - llvm/lib/Target/LoongArch/** + - llvm/test/CodeGen/LoongArch/** + - clang/lib/Basic/Targets/LoongArch* + - clang/lib/Driver/ToolChains/Arch/LoongArch.* + - clang/lib/CodeGen/Targets/LoongArch.cpp + - clang/include/clang/Basic/BuiltinsLoongArch* + - clang/include/clang/Sema/SemaLoongArch.h + - clang/lib/Sema/SemaLoongArch.cpp + +backend:MSP430: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsMSP430.td + - llvm/test/MC/MSP430/** + - llvm/lib/Target/MSP430/** + - llvm/test/CodeGen/MSP430/** + - clang/lib/Basic/Targets/MSP430* + - clang/lib/Driver/ToolChains/Arch/MSP430.* + - clang/lib/CodeGen/Targets/MSP430.cpp + - clang/include/clang/Basic/BuiltinsMSP430* + - llvm/test/MC/Disassembler/MSP430/** + +backend:Sparc: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsSparc.td + - llvm/test/MC/Sparc/** + - llvm/lib/Target/Sparc/** + - llvm/test/CodeGen/Sparc/** + - clang/lib/Basic/Targets/Sparc* + - clang/lib/Driver/ToolChains/Arch/Sparc.* + - clang/lib/CodeGen/Targets/Sparc.cpp + - clang/include/clang/Basic/BuiltinsSparc* + - llvm/test/MC/Disassembler/Sparc/** + +backend:WebAssembly: + - changed-files: + - any-glob-to-any-file: + - llvm/lib/Target/WebAssembly/** + - llvm/test/CodeGen/WebAssembly/** + - clang/lib/Basic/Targets/WebAssembly* + - clang/include/clang/Basic/BuiltinsWebAssembly.def + - clang/include/clang/Basic/WebAssemblyReferenceTypes.def + - clang/lib/CodeGen/Targets/WebAssembly* + - llvm/include/llvm/IR/IntinsicsWebAssembly.td + - llvm/include/llvm/Object/Wasm* + - llvm/lib/CodeGen/AsmPrinter/Wasm* + - llvm/lib/CodeGen/Wasm* + - llvm/lib/MC/MCParser/Wasm* + - llvm/lib/MC/Wasm* + - llvm/lib/ObjCopy/wasm/** + - llvm/lib/Object/Wasm* + - clang/lib/Driver/Toolchains/WebAssembly* + - clang/lib/Headers/wasm_simd128.h + - clang/test/CodeGen/WebAssembly/** + - clang/test/SemaCXX/*wasm* + - clang/test/Sema/*wasm* + - llvm/include/llvm/BinaryFormat/Wasm.h + - llvm/unittests/Target/WebAssembly/** + - llvm/test/DebugInfo/WebAssembly/** + - llvm/test/MC/WebAssembly/** + - clang/include/clang/Sema/SemaWasm.h + - clang/lib/Sema/SemaLoongWasm.cpp + +backend:X86: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/IR/IntrinsicsX86.td + - llvm/lib/Target/X86/** + - llvm/test/CodeGen/X86/** + - llvm/test/MC/X86/** + - llvm/test/MC/Disassembler/X86/** + - llvm/test/Analysis/CostModel/X86/** + - llvm/test/tools/llvm-mca/X86/** + - clang/lib/Basic/Targets/X86/** + - clang/lib/Driver/ToolChains/Arch/X86.* + - clang/lib/CodeGen/Targets/X86.* + - clang/lib/Headers/** + - clang/test/CodeGen/X86/** + - clang/include/clang/Basic/BuiltinsX86* + - llvm/include/llvm/Support/X86DisassemblerDecoderCommon.h + - llvm/include/llvm/TargetParser/X86* + - llvm/lib/TargetParser/X86* + - llvm/utils/TableGen/X86* + - clang/include/clang/Sema/SemaX86.h + - clang/lib/Sema/SemaX86.cpp + +backend:PowerPC: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/BinaryFormat/ELFRelocs/PowerPC* + - llvm/include/llvm/BinaryFormat/XCOFF.h + - llvm/include/llvm/IR/IntrinsicsPowerPC.td + - llvm/lib/CodeGen/AsmPrinter/AIXException.cpp + - llvm/lib/Target/PowerPC/** + - llvm/test/Analysis/**/PowerPC/** + - llvm/test/CodeGen/PowerPC/** + - llvm/test/CodeGen/MIR/PowerPC/** + - llvm/test/DebugInfo/XCOFF/** + - llvm/test/DebugInfo/PowerPC/** + - llvm/test/LTO/PowerPC/** + - llvm/test/MC/Disassembler/PowerPC/** + - llvm/test/MC/PowerPC/** + - llvm/test/MC/XCOFF/** + - llvm/test/Transforms/**/PowerPC/** + - clang/include/clang/Basic/BuiltinsPPC.* + - clang/lib/Basic/Targets/PPC.* + - clang/lib/CodeGen/Targets/PPC.cpp + - clang/lib/Driver/ToolChains/PPC* + - clang/lib/Driver/ToolChains/AIX* + - clang/lib/Driver/ToolChains/Arch/PPC.* + - clang/test/CodeGen/PowerPC/** + - clang/include/clang/Sema/SemaPPC.h + - clang/lib/Sema/SemaPPC.cpp + +backend:SystemZ: + - changed-files: + - any-glob-to-any-file: + - llvm/include/llvm/BinaryFormat/ELFRelocs/SystemZ* + - llvm/include/llvm/BinaryFormat/GOFF.h + - llvm/include/llvm/IR/IntrinsicsSystemZ.td + - llvm/lib/Target/SystemZ/** + - llvm/test/Analysis/**/SystemZ/** + - llvm/test/CodeGen/SystemZ/** + - llvm/test/DebugInfo/SystemZ/** + - llvm/test/ExecutionEngine/**/SystemZ/** + - llvm/test/MC/Disassembler/SystemZ/** + - llvm/test/MC/GOFF/** + - llvm/test/MC/SystemZ/** + - llvm/test/Transforms/**/SystemZ/** + - clang/include/clang/Basic/BuiltinsSystemZ.* + - clang/lib/Basic/Targets/SystemZ.* + - clang/lib/CodeGen/Targets/SystemZ.cpp + - clang/lib/Driver/ToolChains/ZOS* + - clang/lib/Driver/ToolChains/Arch/SystemZ.* + - clang/test/CodeGen/SystemZ/** + - clang/include/clang/Sema/SemaSystemZ.h + - clang/lib/Sema/SemaSystemZ.cpp + +third-party:unittests: + - changed-files: + - any-glob-to-any-file: + - third-party/unittests/** + +third-party:benchmark: + - changed-files: + - any-glob-to-any-file: + - third-party/benchmark/** + +llvm:binary-utilities: + - changed-files: + - any-glob-to-any-file: + - llvm/docs/CommandGuide/llvm-* + - llvm/include/llvm/BinaryFormat/** + - llvm/include/llvm/DebugInfo/Symbolize/** + - llvm/include/llvm/ObjCopy/** + - llvm/include/llvm/Object/** + - llvm/lib/BinaryFormat/** + - llvm/lib/DebugInfo/Symbolize/** + - llvm/lib/ObjCopy/** + - llvm/lib/Object/** + - llvm/test/Object/** + - llvm/test/tools/llvm-ar/** + - llvm/test/tools/llvm-cxxfilt/** + - llvm/test/tools/llvm-nm/** + - llvm/test/tools/llvm-objcopy/** + - llvm/test/tools/llvm-objdump/** + - llvm/test/tools/llvm-readobj/** + - llvm/test/tools/llvm-size/** + - llvm/test/tools/llvm-strings/** + - llvm/test/tools/llvm-symbolizer/** + - llvm/tools/llvm-ar/** + - llvm/tools/llvm-cxxfilt/** + - llvm/tools/llvm-nm/** + - llvm/tools/llvm-objcopy/** + - llvm/tools/llvm-objdump/** + - llvm/tools/llvm-readobj/** + - llvm/tools/llvm-size/** + - llvm/tools/llvm-strings/** + - llvm/tools/llvm-symbolizer/** + +clang:openmp: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/Basic/OpenMP* + - clang/include/clang/AST/OpenMPClause.h + - clang/include/clang/AST/DeclOpenMP.h + - clang/include/clang/AST/ExprOpenMP.h + - clang/include/clang/AST/StmtOpenMP.h + - clang/lib/AST/DeclOpenMP.cpp + - clang/lib/AST/OpenMPClause.cpp + - clang/lib/AST/StmtOpenMP.cpp + - clang/lib/Headers/openmp_wrappers/** + - clang/lib/Parse/ParseOpenMP.cpp + - clang/lib/Basic/OpenMPKinds.cpp + - clang/lib/Driver/ToolChains/AMDGPUOpenMP.cpp + - clang/lib/Driver/ToolChains/AMDGPUOpenMP.h + - clang/lib/CodeGen/CgStmtOpenMP.cpp + - clang/lib/CodeGen/CGOpenMP* + - clang/lib/Sema/SemaOpenMP.cpp + - clang/test/OpenMP/** + - clang/test/AST/ast-dump-openmp-* + - llvm/lib/Frontend/OpenMP/** + - llvm/lib/Transforms/IPO/OpenMPOpt.cpp + - llvm/include/llvm/Frontend/OpenMP/** + - llvm/include/llvm/Transforms/IPO/OpenMPOpt.h + - llvm/unittests/Frontend/OpenMP* + - llvm/test/Transforms/OpenMP/** + +clang:temporal-safety: + - changed-files: + - any-glob-to-any-file: + - clang/include/clang/Analysis/Analyses/LifetimeSafety/** + - clang/lib/Analysis/LifetimeSafety/** + - clang/unittests/Analysis/LifetimeSafety* + - clang/test/Sema/*lifetime-safety* + - clang/test/Sema/*lifetime-analysis* + - clang/test/Analysis/LifetimeSafety/** + +clang:as-a-library: + - changed-files: + - any-glob-to-any-file: + - clang/tools/libclang/** + - clang/bindings/** + - clang/include/clang-c/** + - clang/test/LibClang/** + - clang/unittest/libclang/** + +openmp:libomp: + - changed-files: + - any-glob-to-any-file: + - 'openmp/**' + +openmp:libomptarget: + - changed-files: + - all-globs-to-all-files: + - openmp/** + - '!openmp/runtime/**' + +bazel: + - changed-files: + - any-glob-to-any-file: + - utils/bazel/** + +offload: + - changed-files: + - any-glob-to-any-file: + - offload/** + +tablegen: + - changed-files: + - any-glob-to-any-file: + - llvm/include/TableGen/** + - llvm/lib/TableGen/** + - llvm/utils/TableGen/** + +infrastructure: + - changed-files: + - any-glob-to-any-file: + - .ci/** diff --git a/.github/renovate.json b/.github/renovate.json new file mode 100644 index 000000000000..8e89ba8c4b32 --- /dev/null +++ b/.github/renovate.json @@ -0,0 +1,19 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:recommended" + ], + "includePaths": [".github/**"], + "schedule": "* 0 * * 1", + "minimumReleaseAge": "3 days", + "assignees": ["boomanaiden154"], + "ignorePaths": [".github/workflows/containers/**"], + "groupName": "[Github] Update GHA Dependencies", + "packageRules": [ + { + "matchPackageNames": ["windows", "macos"], + "matchManagers": ["github-actions"], + "enabled": false + } + ] +} diff --git a/.github/workflows/base-build.yml b/.github/workflows/base-build.yml new file mode 100644 index 000000000000..e77e5e8d2eed --- /dev/null +++ b/.github/workflows/base-build.yml @@ -0,0 +1,75 @@ +name: base-build + +on: + workflow_call: + inputs: + build-script: + required: true + type: string + is-nightly: + required: false + type: boolean + arch: + required: false + type: string + runner: + required: true + type: string + +permissions: + contents: read + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + runs-on: ${{ inputs.runner }} + env: + ARTIFACT_DIR: ${{ github.workspace }}/nightly-toolchain + + steps: + - name: Cleanup workspace + shell: bash + run: | + set -euo pipefail + echo "Cleaning workspace: ${GITHUB_WORKSPACE}" + shopt -s dotglob nullglob + rm -rf "${GITHUB_WORKSPACE:?}/"* + + - name: Checkout source + uses: actions/checkout@v4 + with: + repository: ${{ github.event.pull_request.head.repo.full_name || github.repository }} + ref: ${{ github.event.pull_request.head.sha || github.sha }} + path: src + clean: true + fetch-depth: 0 + + - name: Build + shell: bash + env: + GIT_AUTHOR_NAME: "Qualcomm CI" + GIT_AUTHOR_EMAIL: "ci@qti.qualcomm.com" + GIT_COMMITTER_NAME: "Qualcomm CI" + GIT_COMMITTER_EMAIL: "ci@qti.qualcomm.com" + run: | + set -euo pipefail + build_script="${{ inputs.build-script }}" + if [ "${{ inputs.is-nightly }}" = "true" ]; then + build_script="$build_script --artifact-dir $ARTIFACT_DIR --nightly" + fi + echo "Running: $build_script" + $build_script + + - name: Upload Artifact + if: ${{ inputs.is-nightly && success() }} + uses: actions/upload-artifact@v4 + with: + name: cpullvm-toolchain-${{ github.run_id }}-Linux-${{ inputs.arch }} + path: | + ${{ env.ARTIFACT_DIR }}/*.txz + ${{ env.ARTIFACT_DIR }}/*.tgz + if-no-files-found: ignore + retention-days: 30 diff --git a/.github/workflows/bazel-checks.yml b/.github/workflows/bazel-checks.yml new file mode 100644 index 000000000000..5b3b0241b58d --- /dev/null +++ b/.github/workflows/bazel-checks.yml @@ -0,0 +1,61 @@ +name: Bazel Checks + +permissions: + contents: read + +on: + push: + paths: + - '.github/workflows/bazel-checks.yml' + - 'utils/bazel/**' + branches: + - main + pull_request: + paths: + - '.github/workflows/bazel-checks.yml' + - 'utils/bazel/**' + +jobs: + buildifier: + name: "Buildifier" + runs-on: ubuntu-24.04 + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Setup Buildifier + run: | + sudo curl -L https://github.com/bazelbuild/buildtools/releases/download/v8.2.1/buildifier-linux-amd64 -o /usr/bin/buildifier --fail + sudo chmod +x /usr/bin/buildifier + - name: Run Buildifier + run: | + buildifier --mode=check $(find ./utils/bazel -name *BUILD*) + + bazel-build: + name: "Bazel Build/Test" + # Only run on US Central workers so we only have to keep one cache warm as + # the cache buckets are per cluster. + runs-on: + group: llvm-premerge-cluster-us-central + labels: llvm-premerge-linux-runners + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + # TODO(boomanaiden154): We should use a purpose built container for this. Move + # over when we have fixed the issues with using custom containers with Github + # ARC in GKE. + - name: Setup System Dependencies + run: | + sudo apt-get update + sudo apt-get install -y libmpfr-dev libpfm4-dev m4 libedit-dev + sudo curl -L https://github.com/bazelbuild/bazelisk/releases/download/v1.27.0/bazelisk-amd64.deb --fail > /tmp/bazelisk.deb + sudo apt-get install -y /tmp/bazelisk.deb + rm /tmp/bazelisk.deb + - name: Build/Test + working-directory: utils/bazel + run: | + bazelisk test --config=ci --sandbox_base="" \ + --remote_cache=https://storage.googleapis.com/$CACHE_GCS_BUCKET-bazel \ + --google_default_credentials \ + @llvm-project//... //... diff --git a/.github/workflows/build-ci-container-tooling.yml b/.github/workflows/build-ci-container-tooling.yml new file mode 100644 index 000000000000..f24dbe04516a --- /dev/null +++ b/.github/workflows/build-ci-container-tooling.yml @@ -0,0 +1,77 @@ +name: Build CI Tooling Containers + +permissions: + contents: read + +on: + push: + branches: + - main + paths: + - .github/workflows/build-ci-container-tooling.yml + - '.github/workflows/containers/github-action-ci-tooling/**' + - llvm/utils/git/requirements_formatting.txt + - llvm/utils/git/requirements_linting.txt + - '.github/actions/build-container/**' + - '.github/actions/push-container/**' + pull_request: + paths: + - .github/workflows/build-ci-container-tooling.yml + - '.github/workflows/containers/github-action-ci-tooling/**' + - llvm/utils/git/requirements_formatting.txt + - llvm/utils/git/requirements_linting.txt + - '.github/actions/build-container/**' + - '.github/actions/push-container/**' + +jobs: + build-ci-container-tooling: + name: Build Container ${{ matrix.container-name }} + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + strategy: + fail-fast: false + matrix: + include: + - container-name: format + test-command: 'cd $HOME && clang-format --version | grep version && git-clang-format -h | grep usage && black --version | grep black' + - container-name: lint + test-command: 'cd $HOME && clang-tidy --version | grep version && clang-tidy-diff.py -h | grep usage' + - container-name: abi-tests + test-command: 'cd $HOME && abi-compliance-checker --help' + target: abi-tests + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/workflows/containers/github-action-ci-tooling/ + llvm/utils/git/requirements_formatting.txt + llvm/utils/git/requirements_linting.txt + clang-tools-extra/clang-tidy/tool/clang-tidy-diff.py + .github/actions/build-container + + - name: Build Container + uses: ./.github/actions/build-container + with: + container-name: ci-ubuntu-24.04-${{ matrix.container-name }} + dockerfile: .github/workflows/containers/github-action-ci-tooling/Dockerfile + target: ci-container-${{ matrix.target || format('code-{0}', matrix.container-name) }} + test-command: ${{ matrix.test-command }} + + push-ci-container: + if: github.event_name == 'push' + needs: + - build-ci-container-tooling + permissions: + packages: write + runs-on: ubuntu-24.04 + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/actions/push-container + + - uses: ./.github/actions/push-container + with: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-ci-container-windows.yml b/.github/workflows/build-ci-container-windows.yml new file mode 100644 index 000000000000..d120c812bfc6 --- /dev/null +++ b/.github/workflows/build-ci-container-windows.yml @@ -0,0 +1,77 @@ +name: Build Windows CI Container + +permissions: + contents: read + +on: + push: + branches: + - main + paths: + - .github/workflows/build-ci-container-windows.yml + - '.github/workflows/containers/github-action-ci-windows/**' + pull_request: + paths: + - .github/workflows/build-ci-container-windows.yml + - '.github/workflows/containers/github-action-ci-windows/**' + +jobs: + build-ci-container-windows: + if: github.repository_owner == 'llvm' + runs-on: windows-2022 + outputs: + container-name: ${{ steps.vars.outputs.container-name }} + container-name-tag: ${{ steps.vars.outputs.container-name-tag }} + container-filename: ${{ steps.vars.outputs.container-filename }} + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: .github/workflows/containers/github-action-ci-windows + - name: Write Variables + id: vars + run: | + $tag = [int64](Get-Date -UFormat %s) + $container_name="ghcr.io/$env:GITHUB_REPOSITORY_OWNER/ci-windows-2022" + echo "container-name=${container_name}" >> $env:GITHUB_OUTPUT + echo "container-name-tag=${container_name}:${tag}" >> $env:GITHUB_OUTPUT + echo "container-filename=ci-windows-${tag}.tar" >> $env:GITHUB_OUTPUT + - name: Build Container + working-directory: .github/workflows/containers/github-action-ci-windows + run: | + docker build -t ${{ steps.vars.outputs.container-name-tag }} . + - name: Save container image + run: | + docker save ${{ steps.vars.outputs.container-name-tag }} > ${{ steps.vars.outputs.container-filename }} + - name: Upload container image + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: container + path: ${{ steps.vars.outputs.container-filename }} + retention-days: 14 + + push-ci-container: + if: github.event_name == 'push' + needs: + - build-ci-container-windows + permissions: + packages: write + runs-on: ubuntu-24.04 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - name: Download container + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: container + - name: Push Container + run: | + sudo apt-get update + sudo apt-get install -y skopeo + skopeo login -u ${{ github.actor }} -p ${{ secrets.GITHUB_TOKEN }} ghcr.io + skopeo copy docker-archive:${{ needs.build-ci-container-windows.outputs.container-filename }} \ + --dest-compress-format zstd \ + docker://${{ needs.build-ci-container-windows.outputs.container-name-tag }} + skopeo copy docker-archive:${{ needs.build-ci-container-windows.outputs.container-filename }} \ + --dest-compress-format zstd \ + docker://${{ needs.build-ci-container-windows.outputs.container-name }}:latest diff --git a/.github/workflows/check-ci.yml b/.github/workflows/check-ci.yml new file mode 100644 index 000000000000..10c35cc381ed --- /dev/null +++ b/.github/workflows/check-ci.yml @@ -0,0 +1,40 @@ +name: Check CI Scripts + +permissions: + contents: read + +on: + push: + branches: + - main + paths: + - '.ci/**' + - '.github/workflows/check-ci.yml' + pull_request: + paths: + - '.ci/**' + - '.github/workflows/check-ci.yml' + +jobs: + test-python: + name: "Check Python Tests" + runs-on: ubuntu-24.04 + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: .ci + - name: Setup Python + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: 3.14 + cache: 'pip' + - name: Install Python Dependencies + run: | + pip3 install -r .ci/all_requirements.txt + pip3 install -r .ci/metrics/requirements.lock.txt + pip3 install pytest==8.4.1 + - name: Run Tests + working-directory: .ci + run: pytest diff --git a/.github/workflows/ci-post-commit-analyzer.yml b/.github/workflows/ci-post-commit-analyzer.yml new file mode 100644 index 000000000000..fa41a39a4f01 --- /dev/null +++ b/.github/workflows/ci-post-commit-analyzer.yml @@ -0,0 +1,95 @@ +name: Post-Commit Static Analyzer + +permissions: + contents: read + +on: + push: + branches: + - 'release/**' + paths: + - 'clang/**' + - 'llvm/**' + - '.github/workflows/ci-post-commit-analyzer.yml' + pull_request: + types: + - opened + - synchronize + - reopened + - closed + paths: + - '.github/workflows/ci-post-commit-analyzer.yml' + - '.github/workflows/ci-post-commit-analyzer-run.py' + schedule: + - cron: '30 0 * * *' + +concurrency: + group: >- + llvm-project-${{ github.workflow }}-${{ github.event_name == 'pull_request' && + ( github.event.pull_request.number || github.ref) }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + post-commit-analyzer: + if: >- + github.repository_owner == 'llvm' && + github.event.action != 'closed' + runs-on: ubuntu-24.04 + container: + image: 'ghcr.io/llvm/ci-ubuntu-24.04:latest' + env: + LLVM_VERSION: 18 + steps: + - name: Checkout Source + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + + - name: Setup ccache + uses: hendrikmuhs/ccache-action@5ebbd400eff9e74630f759d94ddd7b6c26299639 # v1.2.20 + with: + # A full build of llvm, clang, lld, and lldb takes about 250MB + # of ccache space. There's not much reason to have more than this, + # because we usually won't need to save cache entries from older + # builds. Also, there is an overall 10GB cache limit, and each + # run creates a new cache entry so we want to ensure that we have + # enough cache space for all the tests to run at once and still + # fit under the 10 GB limit. + # Default to 2G to workaround: https://github.com/hendrikmuhs/ccache-action/issues/174 + max-size: 2G + key: post-commit-analyzer + variant: sccache + + - name: Configure + run: | + cmake -B build -S llvm -G Ninja \ + -DLLVM_ENABLE_ASSERTIONS=ON \ + -DLLVM_ENABLE_PROJECTS=clang \ + -DLLVM_BUILD_LLVM_DYLIB=ON \ + -DLLVM_LINK_LLVM_DYLIB=ON \ + -DCMAKE_CXX_COMPILER=clang++ \ + -DCMAKE_C_COMPILER=clang \ + -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ + -DCMAKE_C_COMPILER_LAUNCHER=sccache \ + -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ + -DLLVM_INCLUDE_TESTS=OFF \ + -DCLANG_INCLUDE_TESTS=OFF \ + -DCMAKE_BUILD_TYPE=Release + + - name: Build + run: | + # FIXME: We need to build all the generated header files in order to be able to run + # the analyzer on every file. Building libLLVM and libclang is probably overkill for + # this, but it's better than building every target. + ninja -v -C build libLLVM.so libclang.so + + # Run the analyzer. + python3 .github/workflows/ci-post-commit-analyzer-run.py build/compile_commands.json + + scan-build --generate-index-only build/analyzer-results + + - name: Upload Results + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: analyzer-results + path: 'build/analyzer-results/*' + diff --git a/.github/workflows/commit-access-review.yml b/.github/workflows/commit-access-review.yml new file mode 100644 index 000000000000..2809b1c3a7ef --- /dev/null +++ b/.github/workflows/commit-access-review.yml @@ -0,0 +1,43 @@ +name: Commit Access Review + +on: + workflow_dispatch: + schedule: + # * is a special character in YAML so you have to quote this string + - cron: '0 7 1 * *' + +permissions: + contents: read + issues: write + +jobs: + commit-access-review: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + + - name: Install dependencies + run: | + pip install --require-hashes -r ./llvm/utils/git/requirements.txt + + - name: Run Script + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} + run: | + python3 .github/workflows/commit-access-review.py $GITHUB_TOKEN + + - name: Upload Triage List + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: triagers + path: triagers.log + + - name: Create the issue + env: + GITHUB_TOKEN: ${{ github.token }} + run: | + # There is a limit to the number of mentions you can have in one comment, so + # we need to limit the number of users we mention. + cat triagers.log | head -n 25 | python3 .github/workflows/commit-create-issue.py $GITHUB_TOKEN diff --git a/.github/workflows/commit-create-issue.py b/.github/workflows/commit-create-issue.py new file mode 100644 index 000000000000..62bc76cd1b9b --- /dev/null +++ b/.github/workflows/commit-create-issue.py @@ -0,0 +1,42 @@ +import github +import sys + +token = sys.argv[1] +gh = github.Github(login_or_token=token) +repo = gh.get_repo("llvm/llvm-project") + +length = "4 weeks" +names = " ".join(["@" + name.rstrip() for name in sys.stdin]) + + +body = f"""### TLDR: If you want to retain your commit access, please comment on this issue. Otherwise, you can unsubscribe from this issue and ignore it. Commit access is not required to contribute to the project. You can still create Pull Requests without commit access. + +{names} + +LLVM has a policy of downgrading write access to its repositories for accounts with long term inactivity. This is done because inactive accounts with high levels of access tend to be at increased risk of compromise and this is one tactic that the project employs to guard itself from malicious actors. Note that write access is not required to contribute to the project. You can still submit pull requests and have someone else merge them. + +Our project policy is to ping anyone with less than five 'interactions' with the repositories over a 12 month period to see if they still need commit access. An 'interaction' and be any one of: + +* Pushing a commit. +* Merging a pull request (either their own or someone else’s). +* Commenting on a PR. + +If you want to retain your commit access, please post a comment on this issue. If you do not want to keep your commit access, you can just ignore this issue. If you have not responded in {length}, then you will move moved from the 'write' role within the project to the 'triage' role. The 'triage' role is still a privileged role and will allow you to do the following: + +* Review Pull Requests. +* Comment on issues. +* Apply/dismiss labels. +* Close, reopen, and assign all issues and pull requests. +* Apply milestones. +* Mark duplicate issues and pull requests. +* Request pull request reviews. +* Hide anyone’s comments. + +In the future, if you want to re-apply for commit access, you can follow the instructions +[here](https://llvm.org/docs/DeveloperPolicy.html#obtaining-commit-access). +""" + +issue = repo.create_issue( + title="Do you still need commit access?", body=body, labels=["infra:commit-access"] +) +print(issue.html_url) diff --git a/.github/workflows/containers/github-action-ci-tooling/Dockerfile b/.github/workflows/containers/github-action-ci-tooling/Dockerfile new file mode 100644 index 000000000000..cbd0b69b7619 --- /dev/null +++ b/.github/workflows/containers/github-action-ci-tooling/Dockerfile @@ -0,0 +1,115 @@ +ARG LLVM_VERSION=21.1.0 +# FIXME: Use "${LLVM_VERSION%%.*}" instead of "LLVM_VERSION_MAJOR" once we update runners to Ubuntu-26.04 with Buildah >= 1.37 +ARG LLVM_VERSION_MAJOR=21 + +FROM docker.io/library/ubuntu:24.04 AS llvm-downloader +ARG LLVM_VERSION +ARG LLVM_VERSION_MAJOR + +RUN apt-get update && \ + apt-get install -y wget xz-utils && \ + wget -O llvm.tar.xz https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/LLVM-${LLVM_VERSION}-Linux-X64.tar.xz && \ + mkdir -p /llvm-extract && \ + tar -xvJf llvm.tar.xz -C /llvm-extract \ + # Only unpack these tools to save space on Github runner. + LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-${LLVM_VERSION_MAJOR} \ + LLVM-${LLVM_VERSION}-Linux-X64/lib/clang/${LLVM_VERSION_MAJOR}/include \ + LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-tidy \ + LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-format \ + LLVM-${LLVM_VERSION}-Linux-X64/bin/git-clang-format && \ + rm llvm.tar.xz + + +FROM docker.io/library/ubuntu:24.04 AS base +ENV LLVM_SYSROOT=/opt/llvm +ENV PATH=${LLVM_SYSROOT}/bin:${PATH} + +# Need nodejs for some of the GitHub actions. +# Need git for git-clang-format. +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y \ + git \ + nodejs \ + sudo \ + # These are needed by the premerge pipeline. + # Pip is used to install dependent python packages. + python3-pip \ + python-is-python3 && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +# Create a new user with id 1001 as that is the user id that +# Github Actions uses to perform the checkout action. +RUN useradd gha -u 1001 -m -s /bin/bash +RUN adduser gha sudo +RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers +# Don't set USER gha right away because we still need to install packages +# as root in 'ci-container-code-format' and 'ci-container-code-lint' containers + + +FROM base AS ci-container-build-tools +ARG LLVM_VERSION +ARG LLVM_VERSION_MAJOR + +COPY --from=llvm-downloader /llvm-extract/LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-${LLVM_VERSION_MAJOR} \ + ${LLVM_SYSROOT}/bin/ +COPY --from=llvm-downloader /llvm-extract/LLVM-${LLVM_VERSION}-Linux-X64/lib/clang/${LLVM_VERSION_MAJOR}/include \ + ${LLVM_SYSROOT}/lib/clang/${LLVM_VERSION_MAJOR}/include +RUN ln -s ${LLVM_SYSROOT}/bin/clang-${LLVM_VERSION_MAJOR} ${LLVM_SYSROOT}/bin/clang && \ + ln -s ${LLVM_SYSROOT}/bin/clang ${LLVM_SYSROOT}/bin/clang++ + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y \ + cmake \ + ninja-build && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +ENV CC=${LLVM_SYSROOT}/bin/clang +ENV CXX=${LLVM_SYSROOT}/bin/clang++ + + +FROM base AS ci-container-code-format +ARG LLVM_VERSION + +COPY --from=llvm-downloader /llvm-extract/LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-format \ + /llvm-extract/LLVM-${LLVM_VERSION}-Linux-X64/bin/git-clang-format \ + ${LLVM_SYSROOT}/bin/ + + +# Install dependencies for 'pr-code-format.yml' job +COPY llvm/utils/git/requirements_formatting.txt requirements_formatting.txt +RUN pip install -r requirements_formatting.txt --break-system-packages && \ + rm requirements_formatting.txt +USER gha +WORKDIR /home/gha + + +FROM ci-container-build-tools AS ci-container-code-lint +ARG LLVM_VERSION +ARG LLVM_VERSION_MAJOR + +COPY --from=llvm-downloader /llvm-extract/LLVM-${LLVM_VERSION}-Linux-X64/bin/clang-tidy \ + ${LLVM_SYSROOT}/bin/ +COPY clang-tools-extra/clang-tidy/tool/clang-tidy-diff.py ${LLVM_SYSROOT}/bin/clang-tidy-diff.py + +# Install dependencies for 'pr-code-lint.yml' job +COPY llvm/utils/git/requirements_linting.txt requirements_linting.txt +RUN pip install -r requirements_linting.txt --break-system-packages && \ + rm requirements_linting.txt +USER gha +WORKDIR /home/gha + + +FROM ci-container-build-tools as ci-container-abi-tests + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y \ + abi-compliance-checker \ + abi-dumper \ + autoconf \ + parallel \ + pkg-config \ + universal-ctags && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 000000000000..6f6275b6c4de --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,215 @@ +# LLVM Documentation CI +# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +# See https://llvm.org/LICENSE.txt for license information. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +name: "Test documentation build" + +permissions: + contents: read + +on: + push: + branches: + - 'main' + paths: + - 'llvm/docs/**' + - 'clang/docs/**' + - 'clang/include/clang/Basic/AttrDocs.td' + - 'clang/include/clang/Driver/ClangOptionDocs.td' + - 'clang/include/clang/Basic/DiagnosticDocs.td' + - 'clang-tools-extra/docs/**' + - 'lldb/docs/**' + - 'libunwind/docs/**' + - 'libcxx/docs/**' + - 'libc/docs/**' + - 'lld/docs/**' + - 'openmp/docs/**' + - 'polly/docs/**' + - 'flang/docs/**' + - 'flang/include/flang/Optimizer/Dialect/FIROps.td' + - '.github/workflows/docs.yml' + pull_request: + paths: + - 'llvm/docs/**' + - 'clang/docs/**' + - 'clang/include/clang/Basic/AttrDocs.td' + - 'clang/include/clang/Driver/ClangOptionDocs.td' + - 'clang/include/clang/Basic/DiagnosticDocs.td' + - 'clang-tools-extra/docs/**' + - 'lldb/docs/**' + - 'libunwind/docs/**' + - 'libcxx/docs/**' + - 'libc/docs/**' + - 'lld/docs/**' + - 'openmp/docs/**' + - 'polly/docs/**' + - 'flang/docs/**' + - 'flang/include/flang/Optimizer/Dialect/FIROps.td' + - '.github/workflows/docs.yml' + +jobs: + check-docs-build: + name: "Test documentation build" + runs-on: ubuntu-24.04 + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + - name: Get subprojects that have doc changes + id: docs-changed-subprojects + uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0 + with: + skip_initial_fetch: true + base_sha: 'HEAD~1' + sha: 'HEAD' + files_yaml: | + llvm: + - 'llvm/docs/**' + clang: + - 'clang/docs/**' + - 'clang/include/clang/Basic/AttrDocs.td' + - 'clang/include/clang/Driver/ClangOptionDocs.td' + - 'clang/include/clang/Basic/DiagnosticDocs.td' + clang-tools-extra: + - 'clang-tools-extra/docs/**' + lldb: + - 'lldb/docs/**' + libunwind: + - 'libunwind/docs/**' + libcxx: + - 'libcxx/docs/**' + libc: + - 'libc/docs/**' + lld: + - 'lld/docs/**' + openmp: + - 'openmp/docs/**' + polly: + - 'polly/docs/**' + flang: + - 'flang/docs/**' + - 'flang/include/flang/Optimizer/Dialect/FIROps.td' + workflow: + - '.github/workflows/docs.yml' + - name: Setup Python env + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.14' + cache: 'pip' + cache-dependency-path: 'llvm/docs/requirements-hashed.txt' + - name: Install python dependencies + run: pip install -r llvm/docs/requirements-hashed.txt + - name: Install system dependencies + run: | + sudo apt-get update + # swig and graphviz are lldb specific dependencies + sudo apt-get install -y cmake ninja-build swig graphviz + - name: Setup output folder + run: mkdir built-docs + - name: Build LLVM docs + if: | + steps.docs-changed-subprojects.outputs.llvm_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B llvm-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C llvm-build docs-llvm-html docs-llvm-man + mkdir built-docs/llvm + cp -r llvm-build/docs/* built-docs/llvm/ + - name: Build Clang docs + if: | + steps.docs-changed-subprojects.outputs.clang_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B clang-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C clang-build docs-clang-html docs-clang-man + mkdir built-docs/clang + cp -r clang-build/docs/* built-docs/clang/ + - name: Build clang-tools-extra docs + if: | + steps.docs-changed-subprojects.outputs.clang-tools-extra_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B clang-tools-extra-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;clang-tools-extra" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C clang-tools-extra-build docs-clang-tools-html docs-clang-tools-man + mkdir built-docs/clang-tools-extra + cp -r clang-tools-extra-build/docs/* built-docs/clang-tools-extra/ + - name: Build LLDB docs + if: | + steps.docs-changed-subprojects.outputs.lldb_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B lldb-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;lldb" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C lldb-build docs-lldb-html docs-lldb-man + mkdir built-docs/lldb + cp -r lldb-build/docs/* built-docs/lldb/ + - name: Build libunwind docs + if: | + steps.docs-changed-subprojects.outputs.libunwind_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B libunwind-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libunwind" -DLLVM_ENABLE_SPHINX=ON ./runtimes + TZ=UTC ninja -C libunwind-build docs-libunwind-html + mkdir built-docs/libunwind + cp -r libunwind-build/libunwind/docs/* built-docs/libunwind + - name: Build libcxx docs + if: | + steps.docs-changed-subprojects.outputs.libcxx_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B libcxx-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libcxxabi;libcxx;libunwind" -DLLVM_ENABLE_SPHINX=ON ./runtimes + TZ=UTC ninja -C libcxx-build docs-libcxx-html + mkdir built-docs/libcxx + cp -r libcxx-build/libcxx/docs/* built-docs/libcxx/ + - name: Build libc docs + if: | + steps.docs-changed-subprojects.outputs.libc_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B libc-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_RUNTIMES="libc" -DLLVM_ENABLE_SPHINX=ON ./runtimes + TZ=UTC ninja -C libc-build docs-libc-html + mkdir built-docs/libc + cp -r libc-build/libc/docs/* built-docs/libc/ + - name: Build LLD docs + if: | + steps.docs-changed-subprojects.outputs.lld_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B lld-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="lld" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C lld-build docs-lld-html + mkdir built-docs/lld + cp -r lld-build/docs/* built-docs/lld/ + - name: Build OpenMP docs + if: | + steps.docs-changed-subprojects.outputs.openmp_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B openmp-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;openmp" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C openmp-build docs-openmp-html + mkdir built-docs/openmp + cp -r openmp-build/docs/* built-docs/openmp/ + - name: Build Polly docs + if: | + steps.docs-changed-subprojects.outputs.polly_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B polly-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="polly" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C polly-build docs-polly-html docs-polly-man + mkdir built-docs/polly + cp -r polly-build/docs/* built-docs/polly/ + - name: Build Flang docs + if: | + steps.docs-changed-subprojects.outputs.flang_any_changed == 'true' || + steps.docs-changed-subprojects.outputs.workflow_any_changed == 'true' + run: | + cmake -B flang-build -GNinja -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_PROJECTS="clang;mlir;flang" -DLLVM_ENABLE_SPHINX=ON ./llvm + TZ=UTC ninja -C flang-build docs-flang-html docs-flang-man + mkdir built-docs/flang + cp -r flang-build/docs/* built-docs/flang/ + - name: Upload docs + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: docs-output + path: built-docs/ diff --git a/.github/workflows/email-check.yaml b/.github/workflows/email-check.yaml new file mode 100644 index 000000000000..00e528e7e135 --- /dev/null +++ b/.github/workflows/email-check.yaml @@ -0,0 +1,47 @@ +name: "Check for private emails used in PRs" + +on: + pull_request: + types: + - opened + +permissions: + contents: read + +jobs: + validate_email: + runs-on: ubuntu-24.04 + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Extract author email + id: author + run: | + git log -1 + echo "EMAIL=$(git show -s --format='%ae' HEAD~0)" >> $GITHUB_OUTPUT + # Create empty comment file + echo "[]" > comments + + - name: Validate author email + if: ${{ endsWith(steps.author.outputs.EMAIL, 'noreply.github.com') }} + env: + COMMENT: >- + ⚠️ We detected that you are using a GitHub private e-mail address to contribute to the repo.
+ Please turn off [Keep my email addresses private](https://github.com/settings/emails) setting in your account.
+ See [LLVM Developer Policy](https://llvm.org/docs/DeveloperPolicy.html#email-addresses) and + [LLVM Discourse](https://discourse.llvm.org/t/hidden-emails-on-github-should-we-do-something-about-it) for more information. + run: | + cat << EOF > comments + [{"body" : "$COMMENT"}] + EOF + + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: workflow-args + path: | + comments diff --git a/.github/workflows/gha-codeql.yml b/.github/workflows/gha-codeql.yml new file mode 100644 index 000000000000..10da1838cfeb --- /dev/null +++ b/.github/workflows/gha-codeql.yml @@ -0,0 +1,37 @@ +name: Github Actions CodeQL + +permissions: + contents: read + +on: + pull_request: + branches: + - main + paths: + - '.github/**' + schedule: + - cron: '30 0 * * *' + +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true + +jobs: + codeql: + name: 'Github Actions CodeQL' + runs-on: ubuntu-24.04 + permissions: + security-events: write + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/ + - name: Initialize CodeQL + uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6 + with: + languages: actions + queries: security-extended + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6 diff --git a/.github/workflows/libclang-abi-tests.yml b/.github/workflows/libclang-abi-tests.yml new file mode 100644 index 000000000000..2c0b13ee5ec0 --- /dev/null +++ b/.github/workflows/libclang-abi-tests.yml @@ -0,0 +1,158 @@ +name: libclang ABI Tests + +permissions: + contents: read + +on: + workflow_dispatch: + push: + branches: + - 'release/**' + paths: + - 'clang/**' + - '.github/workflows/libclang-abi-tests.yml' + pull_request: + branches: + - 'release/**' + paths: + - 'clang/**' + - '.github/workflows/libclang-abi-tests.yml' + +concurrency: + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + abi-dump-setup: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + outputs: + BASELINE_REF: ${{ steps.vars.outputs.BASELINE_REF }} + ABI_HEADERS: ${{ steps.vars.outputs.ABI_HEADERS }} + ABI_LIBS: ${{ steps.vars.outputs.ABI_LIBS }} + BASELINE_VERSION_MAJOR: ${{ steps.vars.outputs.BASELINE_VERSION_MAJOR }} + LLVM_VERSION_MAJOR: ${{ steps.version.outputs.major }} + LLVM_VERSION_MINOR: ${{ steps.version.outputs.minor }} + LLVM_VERSION_PATCH: ${{ steps.version.outputs.patch }} + steps: + - name: Checkout source + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 250 + + - name: Get LLVM version + id: version + uses: ./.github/workflows/get-llvm-version + + - name: Setup Variables + id: vars + run: | + remote_repo='https://github.com/llvm/llvm-project' + if [ ${{ steps.version.outputs.patch }} -eq 0 ]; then + major_version=$(( ${{ steps.version.outputs.major }} - 1)) + baseline_ref="llvmorg-$major_version.1.0" + + # If there is a minor release, we want to use that as the base line. + minor_ref=$(git ls-remote --refs -t "$remote_repo" llvmorg-"$major_version".[1-9].[0-9] | tail -n1 | grep -o 'llvmorg-.\+' || true) + if [ -n "$minor_ref" ]; then + baseline_ref="$minor_ref" + else + # Check if we have a release candidate + rc_ref=$(git ls-remote --refs -t "$remote_repo" llvmorg-"$major_version".[1-9].[0-9]-rc* | tail -n1 | grep -o 'llvmorg-.\+' || true) + if [ -n "$rc_ref" ]; then + baseline_ref="$rc_ref" + fi + fi + { + echo "BASELINE_VERSION_MAJOR=$major_version" + echo "BASELINE_REF=$baseline_ref" + echo "ABI_HEADERS=clang-c" + echo "ABI_LIBS=libclang.so" + } >> "$GITHUB_OUTPUT" + else + { + echo "BASELINE_VERSION_MAJOR=${{ steps.version.outputs.major }}" + echo "BASELINE_REF=llvmorg-${{ steps.version.outputs.major }}.1.0" + echo "ABI_HEADERS=." + echo "ABI_LIBS=libclang.so libclang-cpp.so" + } >> "$GITHUB_OUTPUT" + fi + + abi-dump: + if: github.repository_owner == 'llvm' + needs: abi-dump-setup + runs-on: ubuntu-24.04 + container: + image: "ghcr.io/llvm/ci-ubuntu-24.04-abi-tests@sha256:9138b6aea737d935e92ad2afdf5d49325880f9b187b5b979b135ac80cd456135" #ghcr.io/llvm/ci-ubuntu-24.04-abi-tests:9524b37c503f + strategy: + matrix: + name: + - build-baseline + - build-latest + include: + - name: build-baseline + llvm_version_major: ${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }} + ref: ${{ needs.abi-dump-setup.outputs.BASELINE_REF }} + repo: llvm/llvm-project + - name: build-latest + llvm_version_major: ${{ needs.abi-dump-setup.outputs.LLVM_VERSION_MAJOR }} + ref: ${{ github.sha }} + repo: ${{ github.repository }} + steps: + - name: Download source code + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ matrix.ref }} + repository: ${{ matrix.repo }} + - name: Configure + run: | + mkdir install + cmake -B build -S llvm -G Ninja -DLLVM_ENABLE_PROJECTS=clang -DCMAKE_BUILD_TYPE=Debug -DLLVM_TARGETS_TO_BUILD="" -DLLVM_BUILD_LLVM_DYLIB=ON -DLLVM_LINK_LLVM_DYLIB=ON -DCMAKE_INSTALL_PREFIX="$(pwd)"/install llvm + - name: Build + run: ninja -C build/ ${{ needs.abi-dump-setup.outputs.ABI_LIBS }} install-clang-headers + - name: Dump ABI + run: | + parallel abi-dumper -lver ${{ matrix.ref }} -skip-cxx -public-headers ./install/include/${{ needs.abi-dump-setup.outputs.ABI_HEADERS }} -o {}-${{ matrix.ref }}.abi ./build/lib/{} ::: ${{ needs.abi-dump-setup.outputs.ABI_LIBS }} + for lib in ${{ needs.abi-dump-setup.outputs.ABI_LIBS }}; do + # Remove symbol versioning from dumps, so we can compare across major versions. + sed -i 's/LLVM_[0-9]\+/LLVM_NOVERSION/' $lib-${{ matrix.ref }}.abi + done + - name: Upload ABI file + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: ${{ matrix.name }} + path: '*${{ matrix.ref }}.abi' + + abi-compare: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + container: + image: "ghcr.io/llvm/ci-ubuntu-24.04-abi-tests@sha256:9138b6aea737d935e92ad2afdf5d49325880f9b187b5b979b135ac80cd456135" #ghcr.io/llvm/ci-ubuntu-24.04-abi-tests:9524b37c503f + needs: + - abi-dump-setup + - abi-dump + steps: + - name: Download baseline + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: build-baseline + path: build-baseline + - name: Download latest + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: build-latest + path: build-latest + + - name: Compare ABI + run: | + for lib in ${{ needs.abi-dump-setup.outputs.ABI_LIBS }}; do + abi-compliance-checker -lib $lib -old build-baseline/$lib*.abi -new build-latest/$lib*.abi + done + - name: Upload ABI Comparison + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: compat-report-${{ github.sha }} + path: compat_reports/ diff --git a/.github/workflows/libcxx-build-and-test.yaml b/.github/workflows/libcxx-build-and-test.yaml new file mode 100644 index 000000000000..f9defec2b52c --- /dev/null +++ b/.github/workflows/libcxx-build-and-test.yaml @@ -0,0 +1,295 @@ +# This file defines pre-commit CI for libc++, libc++abi, and libunwind (on Github). +# +# We split the configurations in multiple stages with the intent of saving compute time +# when a job fails early in the pipeline. This is why the jobs are marked as `continue-on-error: false`. +# We try to run the CI configurations with the most signal in the first stage. +# +# Stages 1 & 2 are meant to be "smoke tests", and are meant to catch most build/test failures quickly and without using +# too many resources. +# Stage 3 is "everything else", and is meant to catch breakages on more niche or unique configurations. +# +# Therefore, we "fail-fast" for any failures during stages 1 & 2, meaning any job failing cancels all other running jobs, +# under the assumption that if the "smoke tests" fail, then the other configurations will likely fail in the same way. +# However, stage 3 does not fail fast, as it's more likely that any one job failing is a flake or a configuration-specific +# +name: Build and Test libc++ +on: + pull_request: + paths: + - 'libcxx/**' + - 'libcxxabi/**' + - 'libunwind/**' + - 'runtimes/**' + - 'cmake/**' + - '.github/workflows/libcxx-build-and-test.yaml' + schedule: + # Run nightly at 08:00 UTC (aka 00:00 Pacific, aka 03:00 Eastern) + - cron: '0 8 * * *' + +permissions: + contents: read # Default everything to read-only + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + stage1: + if: github.repository_owner == 'llvm' + runs-on: llvm-premerge-libcxx-release-runners + continue-on-error: false + strategy: + fail-fast: false + matrix: + config: [ + 'frozen-cxx03-headers', + 'generic-cxx03', + 'generic-cxx26', + 'generic-modules' + ] + cc: [ 'clang-22' ] + cxx: [ 'clang++-22' ] + include: + - config: 'generic-gcc' + cc: 'gcc-15' + cxx: 'g++-15' + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: ${{ matrix.config }}.${{ matrix.cxx }} + run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} + env: + CC: ${{ matrix.cc }} + CXX: ${{ matrix.cxx }} + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ${{ matrix.config }}-${{ matrix.cxx }}-results + path: | + **/test-results.xml + **/*.abilist + **/CMakeConfigureLog.yaml + **/CMakeError.log + **/CMakeOutput.log + **/crash_diagnostics/* + stage2: + if: github.repository_owner == 'llvm' + runs-on: llvm-premerge-libcxx-release-runners + needs: [ stage1 ] + continue-on-error: false + strategy: + fail-fast: false + matrix: + config: [ + 'generic-cxx11', + 'generic-cxx14', + 'generic-cxx17', + 'generic-cxx20', + 'generic-cxx23' + ] + cc: [ 'clang-22' ] + cxx: [ 'clang++-22' ] + include: + - config: 'generic-gcc-cxx11' + cc: 'gcc-15' + cxx: 'g++-15' + - config: 'generic-cxx26' + cc: 'clang-21' + cxx: 'clang++-21' + - config: 'generic-cxx26' + cc: 'clang-20' + cxx: 'clang++-20' + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: ${{ matrix.config }} + run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} + env: + CC: ${{ matrix.cc }} + CXX: ${{ matrix.cxx }} + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() # Upload artifacts even if the build or test suite fails + with: + name: ${{ matrix.config }}-${{ matrix.cxx }}-results + path: | + **/test-results.xml + **/*.abilist + **/CMakeConfigureLog.yaml + **/CMakeError.log + **/CMakeOutput.log + **/crash_diagnostics/* + stage3: + if: github.repository_owner == 'llvm' + needs: [ stage2 ] + continue-on-error: false + strategy: + fail-fast: false + max-parallel: 8 + matrix: + config: [ + 'generic-abi-unstable', + 'generic-hardening-mode-debug', + 'generic-hardening-mode-extensive', + 'generic-hardening-mode-extensive-observe-semantic', + 'generic-hardening-mode-fast', + 'generic-hardening-mode-fast-with-abi-breaks', + 'generic-merged', + 'generic-modules-cxx17-lsv', + 'generic-no-exceptions', + 'generic-no-experimental', + 'generic-no-filesystem', + 'generic-no-localization', + 'generic-no-terminal', + 'generic-no-random_device', + 'generic-no-threads', + 'generic-no-tzdb', + 'generic-no-unicode', + 'generic-no-wide-characters', + 'generic-no-rtti', + 'generic-optimized-speed', + 'generic-static', + 'bootstrapping-build' + ] + machine: [ 'llvm-premerge-libcxx-release-runners' ] + include: + - config: 'generic-cxx26' + machine: llvm-premerge-libcxx-release-runners + - config: 'generic-asan' + machine: llvm-premerge-libcxx-release-runners + - config: 'generic-tsan' + machine: llvm-premerge-libcxx-release-runners + - config: 'generic-ubsan' + machine: llvm-premerge-libcxx-release-runners + # Use a larger machine for MSAN to avoid timeout and memory allocation issues. + - config: 'generic-msan' + machine: llvm-premerge-libcxx-release-runners + runs-on: ${{ matrix.machine }} + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: ${{ matrix.config }} + run: libcxx/utils/ci/run-buildbot ${{ matrix.config }} + env: + CC: clang-22 + CXX: clang++-22 + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: ${{ matrix.config }}-results + path: | + **/test-results.xml + **/*.abilist + **/CMakeConfigureLog.yaml + **/CMakeError.log + **/CMakeOutput.log + **/crash_diagnostics/* + + macos: + needs: [ stage2 ] + strategy: + fail-fast: false + matrix: + include: + - config: generic-cxx03 + os: macos-15 + - config: generic-cxx23 + os: macos-15 + - config: generic-modules + os: macos-15 + - config: apple-configuration + os: macos-15 + # TODO: These jobs are intended to test back-deployment (building against ToT libc++ but running against an + # older system-provided libc++.dylib). Doing this properly would require building the test suite on a + # recent macOS using a recent Clang (hence recent Xcode), and then running the actual test suite on an + # older mac. We could do that by e.g. sharing artifacts between the two jobs. + # + # However, our Lit configuration currently doesn't provide a good way to do that in a batch, so our only + # alternative is to actually build on the same host that we're going to run on. Sadly, that doesn't work + # since older macOSes don't support newer Xcodes. For now, we run the "backdeployment" jobs on recent + # macOS versions as a way to avoid rotting that configuration, but it doesn't provide a lot of additional + # coverage. + - config: apple-system + os: macos-15 + - config: apple-system-hardened + os: macos-15 + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 + with: + # https://github.com/actions/runner-images/blob/main/images/macos/macos-15-Readme.md + xcode-version: '26.0' + - uses: seanmiddleditch/gha-setup-ninja@3b1f8f94a2f8254bd26914c4ab9474d4f0015f67 # v6 + - name: Build and test + run: | + python3 -m venv .venv + source .venv/bin/activate + python -m pip install psutil + xcrun bash libcxx/utils/ci/run-buildbot ${{ matrix.config }} + env: + CC: clang + CXX: clang++ + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() # Upload artifacts even if the build or test suite fails + with: + name: macos-${{ matrix.config }}-results + path: | + **/test-results.xml + **/*.abilist + **/CMakeConfigureLog.yaml + **/CMakeError.log + **/CMakeOutput.log + **/crash_diagnostics/* + + windows: + needs: [ stage2 ] + strategy: + fail-fast: false + matrix: + include: + - { config: clang-cl-dll, mingw: false, cc: clang-cl, cxx: clang-cl } + - { config: clang-cl-static, mingw: false, cc: clang-cl, cxx: clang-cl } + - { config: clang-cl-no-vcruntime, mingw: false, cc: clang-cl, cxx: clang-cl } + - { config: clang-cl-debug, mingw: false, cc: clang-cl, cxx: clang-cl } + - { config: clang-cl-static-crt, mingw: false, cc: clang-cl, cxx: clang-cl } + - { config: mingw-dll, mingw: true, cc: cc, cxx: c++ } + - { config: mingw-dll, mingw: true, cc: i686-w64-mingw32-clang, cxx: i686-w64-mingw32-clang++ } + - { config: mingw-static, mingw: true, cc: cc, cxx: c++ } + - { config: mingw-incomplete-sysroot, mingw: true, cc: cc, cxx: c++ } + - { config: mingw-static, mingw: true, cc: cc, cxx: c++, runner: windows-11-arm } + runs-on: ${{ matrix.runner != '' && matrix.runner || 'windows-2022' }} + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Install dependencies + run: | + pip install psutil + - name: Install a current LLVM + if: ${{ matrix.mingw != true }} + run: | + choco install -y llvm --version=20.1.8 --allow-downgrade + - name: Install llvm-mingw + if: ${{ matrix.mingw == true }} + run: | + curl -LO https://github.com/mstorsjo/llvm-mingw/releases/download/20250709/llvm-mingw-20250709-ucrt-${{ matrix.runner == 'windows-11-arm' && 'aarch64' || 'x86_64' }}.zip + powershell Expand-Archive llvm-mingw*.zip -DestinationPath . + del llvm-mingw*.zip + mv llvm-mingw* c:\llvm-mingw + echo "c:\llvm-mingw\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append + - name: Simulate a from-scratch build of llvm-mingw + if: ${{ matrix.config == 'mingw-incomplete-sysroot' }} + run: | + rm -r c:\llvm-mingw\include\c++ + rm -r c:\llvm-mingw\*-w64-mingw32\lib\libc++* + rm -r c:\llvm-mingw\*-w64-mingw32\lib\libunwind* + - name: Add Git Bash to the path + run: | + echo "c:\Program Files\Git\usr\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append + - name: Set up the MSVC dev environment + if: ${{ matrix.mingw != true }} + uses: ilammy/msvc-dev-cmd@0b201ec74fa43914dc39ae48a89fd1d8cb592756 # v1.13.0 + - name: Add the installed Clang at the start of the path + if: ${{ matrix.mingw != true }} + run: | + echo "c:\Program Files\LLVM\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append + - name: Build and test + run: bash libcxx/utils/ci/run-buildbot ${{ matrix.config }} + env: + CC: ${{ matrix.cc }} + CXX: ${{ matrix.cxx }} diff --git a/.github/workflows/libcxx-build-containers.yml b/.github/workflows/libcxx-build-containers.yml new file mode 100644 index 000000000000..e73f15e87a7a --- /dev/null +++ b/.github/workflows/libcxx-build-containers.yml @@ -0,0 +1,84 @@ +# This file defines an action that builds the various Docker images used to run +# libc++ CI whenever modifications to those Docker files are pushed to `main`. +# +# The images are pushed to the LLVM package registry at https://github.com/orgs/llvm/packages +# and tagged appropriately. The selection of which Docker image version is used by the libc++ +# CI nodes at any given point is controlled from the workflow files themselves. + +name: Build Docker images for libc++ CI + +permissions: + contents: read + +on: + push: + branches: + - main + paths: + - 'libcxx/utils/ci/docker/**' + - '.github/workflows/libcxx-build-containers.yml' + pull_request: + paths: + - 'libcxx/utils/ci/docker/**' + - '.github/workflows/libcxx-build-containers.yml' + +jobs: + build-and-push: + runs-on: ubuntu-24.04 + if: github.repository_owner == 'llvm' + permissions: + packages: write + + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + + # The default Docker storage location for GitHub Actions doesn't have + # enough disk space, so change it to /mnt, which has more disk space. + - name: Change Docker storage location + run: | + sudo mkdir /mnt/docker + echo '{ "data-root": "/mnt/docker" }' | sudo tee /etc/docker/daemon.json + sudo systemctl restart docker + + - name: Build the base image + run: docker compose --file libcxx/utils/ci/docker/docker-compose.yml build libcxx-linux-builder-base + env: + TAG: ${{ github.sha }} + + - name: Build the Linux Github Actions image + run: docker compose --file libcxx/utils/ci/docker/docker-compose.yml build libcxx-linux-builder + env: + TAG: ${{ github.sha }} + + - name: Build the Android builder image + run: docker compose --file libcxx/utils/ci/docker/docker-compose.yml build libcxx-android-builder + env: + TAG: ${{ github.sha }} + + - name: Log in to GitHub Container Registry + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Push the images + if: github.event_name == 'push' + run: docker compose --file libcxx/utils/ci/docker/docker-compose.yml push libcxx-linux-builder-base libcxx-linux-builder libcxx-android-builder + env: + TAG: ${{ github.sha }} + + # We create tarballs with the images and upload them as artifacts, since that's useful for testing + # the images when making changes. + - name: Create image tarballs + run: | + docker image save ghcr.io/llvm/libcxx-linux-builder-base:${{ github.sha }} | gzip > libcxx-linux-builder-base.tar.gz + docker image save ghcr.io/llvm/libcxx-linux-builder:${{ github.sha }} | gzip > libcxx-linux-builder.tar.gz + docker image save ghcr.io/llvm/libcxx-android-builder:${{ github.sha }} | gzip > libcxx-android-builder.tar.gz + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: libcxx-docker-images + path: | + libcxx-linux-builder-base.tar.gz + libcxx-linux-builder.tar.gz + libcxx-android-builder.tar.gz diff --git a/.github/workflows/libcxx-run-benchmarks.yml b/.github/workflows/libcxx-run-benchmarks.yml new file mode 100644 index 000000000000..eb7793039235 --- /dev/null +++ b/.github/workflows/libcxx-run-benchmarks.yml @@ -0,0 +1,116 @@ +# This file defines a workflow that runs the libc++ benchmarks when a comment is added to the PR. +# +# The comment is of the form: +# +# /libcxx-bot benchmark +# +# That will cause the specified benchmarks to be run on the PR and on the pull-request target, and +# their results to be compared. + +name: Benchmark libc++ + +permissions: + contents: read + +on: + issue_comment: + types: + - created + - edited + +env: + CC: clang-22 + CXX: clang++-22 + +jobs: + run-benchmarks: + permissions: + pull-requests: write + + if: >- + github.event.issue.pull_request && + contains(github.event.comment.body, '/libcxx-bot benchmark') + + runs-on: llvm-premerge-libcxx-next-runners # TODO: This should run on a dedicated set of machines + steps: + - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.14' + + - name: Extract information from the PR + id: vars + env: + COMMENT_BODY: ${{ github.event.comment.body }} + run: | + python3 -m venv .venv + source .venv/bin/activate + python -m pip install pygithub + + cat <> ${GITHUB_OUTPUT} + import github + repo = github.Github("${{ github.token }}").get_repo("${{ github.repository }}") + pr = repo.get_pull(${{ github.event.issue.number }}) + print(f"pr_base={pr.base.sha}") + print(f"pr_head={pr.head.sha}") + EOF + BENCHMARKS=$(echo "$COMMENT_BODY" | sed -nE 's/\/libcxx-bot benchmark (.+)/\1/p') + echo "benchmarks=${BENCHMARKS}" >> ${GITHUB_OUTPUT} + + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ steps.vars.outputs.pr_head }} + fetch-depth: 0 + fetch-tags: true # This job requires access to all the Git branches so it can diff against (usually) main + path: repo # Avoid nuking the workspace, where we have the Python virtualenv + + - name: Run baseline + env: + BENCHMARKS: ${{ steps.vars.outputs.benchmarks }} + run: | + source .venv/bin/activate && cd repo + python -m pip install -r libcxx/utils/requirements.txt + baseline_commit=$(git merge-base ${{ steps.vars.outputs.pr_base }} ${{ steps.vars.outputs.pr_head }}) + ./libcxx/utils/test-at-commit --commit ${baseline_commit} -B build/baseline -- -sv -j1 --param optimization=speed "$BENCHMARKS" + ./libcxx/utils/consolidate-benchmarks build/baseline | tee baseline.lnt + + - name: Run candidate + env: + BENCHMARKS: ${{ steps.vars.outputs.benchmarks }} + run: | + source .venv/bin/activate && cd repo + ./libcxx/utils/test-at-commit --commit ${{ steps.vars.outputs.pr_head }} -B build/candidate -- -sv -j1 --param optimization=speed "$BENCHMARKS" + ./libcxx/utils/consolidate-benchmarks build/candidate | tee candidate.lnt + + - name: Compare baseline and candidate runs + run: | + source .venv/bin/activate && cd repo + ./libcxx/utils/compare-benchmarks baseline.lnt candidate.lnt | tee results.txt + + - name: Update comment with results + run: | + source .venv/bin/activate && cd repo + cat < + + Benchmark results: + + + \`\`\` + {benchmark_results} + \`\`\` + + + """ + + comment.edit(new_comment_text) + EOF diff --git a/.github/workflows/linux-premerge.yml b/.github/workflows/linux-premerge.yml deleted file mode 100644 index 4a30527cb845..000000000000 --- a/.github/workflows/linux-premerge.yml +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -name: Linux Premerge Checks - -on: - pull_request: - types: [opened, synchronize, reopened] - paths-ignore: - - '.github/workflows/qcom-preflight-checks.yml' - -permissions: - contents: read - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - build-and-test: - # Don't run on forks - if: github.repository == 'qualcomm/cpullvm-toolchain' - name: ${{ matrix.build_script }} on ${{ matrix.target_os }} - runs-on: ${{ matrix.runner }} - - strategy: - fail-fast: false - matrix: - include: - - build_script: build.sh - test_script: test.sh - target_os: Linux-x86_64 - runner: self-hosted - - - build_script: build_no_runtimes.sh - test_script: test_no_runtimes.sh - target_os: Linux-AArch64 - runner: ubuntu-22.04-arm - - - build_script: build_musl-embedded_overlay.sh - test_script: '' - target_os: Linux-x86_64 - runner: self-hosted - - steps: - - name: Cleanup workspace - shell: bash - run: | - set -euo pipefail - echo "Cleaning workspace: ${GITHUB_WORKSPACE}" - shopt -s dotglob nullglob - rm -rf "${GITHUB_WORKSPACE:?}/"* - - - name: Checkout source - uses: actions/checkout@v4 - with: - repository: ${{ github.event.pull_request.head.repo.full_name }} - ref: ${{ github.event.pull_request.head.sha }} - clean: true - - - name: Apply llvm-project patches - run: python3 qualcomm-software/cmake/patch_repo.py --method apply qualcomm-software/patches/llvm-project - - - name: Build toolchain - run: ./qualcomm-software/scripts/${{ matrix.build_script }} - - - name: Test - if: matrix.test_script != '' - run: ./qualcomm-software/scripts/${{ matrix.test_script }} diff --git a/.github/workflows/llvm-abi-tests.yml b/.github/workflows/llvm-abi-tests.yml new file mode 100644 index 000000000000..34b477d9a4b1 --- /dev/null +++ b/.github/workflows/llvm-abi-tests.yml @@ -0,0 +1,172 @@ +name: LLVM ABI Tests + +permissions: + contents: read + +on: + workflow_dispatch: + push: + branches: + - 'release/**' + paths: + - 'llvm/**' + - '.github/workflows/llvm-abi-tests.yml' + pull_request: + branches: + - 'release/**' + paths: + - 'llvm/**' + - '.github/workflows/llvm-abi-tests.yml' + +concurrency: + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + abi-dump-setup: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + outputs: + BASELINE_REF: ${{ steps.vars.outputs.BASELINE_REF }} + ABI_HEADERS: ${{ steps.vars.outputs.ABI_HEADERS }} + BASELINE_VERSION_MAJOR: ${{ steps.vars.outputs.BASELINE_VERSION_MAJOR }} + BASELINE_VERSION_MINOR: ${{ steps.vars.outputs.BASELINE_VERSION_MINOR }} + LLVM_VERSION_MAJOR: ${{ steps.version.outputs.major }} + LLVM_VERSION_MINOR: ${{ steps.version.outputs.minor }} + LLVM_VERSION_PATCH: ${{ steps.version.outputs.patch }} + steps: + - name: Checkout source + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 250 + + - name: Get LLVM version + id: version + uses: ./.github/workflows/get-llvm-version + + - name: Setup Variables + id: vars + run: | + # C++ ABI: + # 18.1.0 we aren't doing ABI checks. + # 18.1.1 We want to check 18.1.0. + # C ABI: + # 18.1.0 We want to check 17.0.x + # 18.1.1 We want to check 18.1.0 + echo "BASELINE_VERSION_MINOR=1" >> "$GITHUB_OUTPUT" + if [ ${{ steps.version.outputs.patch }} -eq 0 ]; then + { + echo "BASELINE_VERSION_MAJOR=$(( ${{ steps.version.outputs.major }} - 1))" + echo "ABI_HEADERS=llvm-c" + } >> "$GITHUB_OUTPUT" + else + { + echo "BASELINE_VERSION_MAJOR=${{ steps.version.outputs.major }}" + echo "ABI_HEADERS=." + } >> "$GITHUB_OUTPUT" + fi + + abi-dump: + if: github.repository_owner == 'llvm' + needs: abi-dump-setup + runs-on: ubuntu-24.04 + container: + image: "ghcr.io/llvm/ci-ubuntu-24.04-abi-tests@sha256:9138b6aea737d935e92ad2afdf5d49325880f9b187b5b979b135ac80cd456135" #ghcr.io/llvm/ci-ubuntu-24.04-abi-tests:bb0bd382ab2b" + strategy: + matrix: + name: + - build-baseline + - build-latest + include: + - name: build-baseline + llvm_version_major: ${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }} + ref: llvmorg-${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MAJOR }}.${{ needs.abi-dump-setup.outputs.BASELINE_VERSION_MINOR }}.0 + repo: llvm/llvm-project + - name: build-latest + llvm_version_major: ${{ needs.abi-dump-setup.outputs.LLVM_VERSION_MAJOR }} + ref: ${{ github.sha }} + repo: ${{ github.repository }} + steps: + - name: Download source code + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ matrix.ref }} + repository: ${{ matrix.repo }} + - name: Configure + run: | + mkdir install + cmake -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug -DLLVM_TARGETS_TO_BUILD="" -DLLVM_BUILD_LLVM_DYLIB=ON -DCMAKE_INSTALL_PREFIX="$(pwd)"/install llvm + - name: Build + # Need to run install-LLVM twice to ensure the symlink is installed (this is a bug). + run: | + ninja -C build install-LLVM + ninja -C build install-LLVM + ninja -C build install-llvm-headers + - name: Dump ABI + run: | + if [ "${{ needs.abi-dump-setup.outputs.ABI_HEADERS }}" = "llvm-c" ]; then + nm ./install/lib/libLLVM.so | awk "/T _LLVM/ || /T LLVM/ { print $3 }" | sort -u | sed -e "s/^_//g" | cut -d ' ' -f 3 > llvm.symbols + # Even though the -symbols-list option doesn't seem to filter out the symbols, I believe it speeds up processing, so I'm leaving it in. + export EXTRA_ARGS="-symbols-list llvm.symbols" + else + touch llvm.symbols + fi + abi-dumper $EXTRA_ARGS -lver ${{ matrix.ref }} -skip-cxx -public-headers ./install/include/${{ needs.abi-dump-setup.outputs.ABI_HEADERS }} -o ${{ matrix.ref }}.abi ./install/lib/libLLVM.so + # Remove symbol versioning from dumps, so we can compare across major versions. + sed -i 's/LLVM_${{ matrix.llvm_version_major }}/LLVM_NOVERSION/' ${{ matrix.ref }}.abi + - name: Upload ABI file + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: ${{ matrix.name }} + path: ${{ matrix.ref }}.abi + + - name: Upload symbol list file + if: matrix.name == 'build-baseline' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: symbol-list + path: llvm.symbols + + abi-compare: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + container: + image: "ghcr.io/llvm/ci-ubuntu-24.04-abi-tests@sha256:9138b6aea737d935e92ad2afdf5d49325880f9b187b5b979b135ac80cd456135" #ghcr.io/llvm/ci-ubuntu-24.04-abi-tests:bb0bd382ab2b + needs: + - abi-dump-setup + - abi-dump + steps: + - name: Download baseline + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: build-baseline + path: build-baseline + - name: Download latest + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: build-latest + path: build-latest + - name: Download symbol list + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + name: symbol-list + path: symbol-list + + - name: Compare ABI + run: | + if [ -s symbol-list/llvm.symbols ]; then + # This option doesn't seem to work with the ABI dumper, so passing it here. + export EXTRA_ARGS="-symbols-list symbol-list/llvm.symbols" + fi + # FIXME: Reading of gzip'd abi files on the GitHub runners stop + # working some time in March of 2021, likely due to a change in the + # runner's environment. + abi-compliance-checker $EXTRA_ARGS -l libLLVM.so -old build-baseline/*.abi -new build-latest/*.abi || test "${{ needs.abi-dump-setup.outputs.ABI_HEADERS }}" = "llvm-c" + - name: Upload ABI Comparison + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: compat-report-${{ github.sha }} + path: compat_reports/ diff --git a/.github/workflows/mlir-spirv-tests.yml b/.github/workflows/mlir-spirv-tests.yml new file mode 100644 index 000000000000..b5818b327e6b --- /dev/null +++ b/.github/workflows/mlir-spirv-tests.yml @@ -0,0 +1,51 @@ +name: MLIR SPIR-V Tests + +permissions: + contents: read + +on: + workflow_dispatch: + pull_request: + paths: + - 'mlir/include/mlir/Dialect/SPIRV/**' + - 'mlir/lib/Dialect/SPIRV/**' + - 'mlir/include/mlir/Target/SPIRV/**' + - 'mlir/lib/Target/SPIRV/**' + - 'mlir/test/Target/SPIRV/**' + - '.github/workflows/mlir-spirv-tests.yml' + +concurrency: + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + check_spirv: + if: github.repository_owner == 'llvm' + name: Test MLIR SPIR-V + runs-on: ubuntu-24.04 + container: + image: ghcr.io/llvm/ci-ubuntu-24.04:latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + - name: Setup ccache + uses: hendrikmuhs/ccache-action@5ebbd400eff9e74630f759d94ddd7b6c26299639 # v1.2.20 + with: + max-size: 2G + key: spirv-mlir-ubuntu-24.04 + variant: sccache + - name: Build and Test + run: | + mkdir build + cmake -GNinja \ + -S llvm \ + -B build \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_ENABLE_ASSERTIONS=ON \ + -DCMAKE_C_COMPILER_LAUNCHER=sccache \ + -DCMAKE_CXX_COMPILER_LAUNCHER=sccache \ + -DLLVM_TARGETS_TO_BUILD="host" \ + -DLLVM_INCLUDE_SPIRV_TOOLS_TESTS=ON \ + -DLLVM_ENABLE_PROJECTS=mlir + ninja -C build check-mlir diff --git a/.github/workflows/nightly-aarch64.yml b/.github/workflows/nightly-aarch64.yml new file mode 100644 index 000000000000..0b31053147a7 --- /dev/null +++ b/.github/workflows/nightly-aarch64.yml @@ -0,0 +1,18 @@ +name: Nightly-Linux-AArch64 + +on: + workflow_dispatch: + repository_dispatch: + types: [trigger-nightly-aarch64-linux] + + schedule: + - cron: '0 5 * * *' # 9 PM PT (UTC) + +jobs: + base: + uses: ./.github/workflows/base-build.yml + with: + runner: ubuntu-22.04-arm + build-script: "src/qualcomm-software/embedded/scripts/build.sh" + is-nightly: true + arch: AArch64 diff --git a/.github/workflows/nightly-win-x86.yml b/.github/workflows/nightly-win-x86.yml new file mode 100644 index 000000000000..4ab80c34b71b --- /dev/null +++ b/.github/workflows/nightly-win-x86.yml @@ -0,0 +1,53 @@ +name: nightly-win-x86_64 + +on: + schedule: + - cron: '0 5 * * *' + workflow_dispatch: + +jobs: + build-windows: + runs-on: windows-latest + env: + ARTIFACT_DIR: ${{ github.workspace }}/nightly-toolchain + + steps: + - name: Preconfigure Git (LF line endings, long paths) + shell: bash + run: | + git config --global core.autocrlf false + git config --global core.eol lf + git config --global core.longpaths true + + - name: Checkout source (to src/) + uses: actions/checkout@v4 + with: + path: src + + - name: Install PyYAML + run: pip install pyyaml + + - name: Verify PyYAML + run: python -c "import yaml; print('PyYAML OK:', yaml.__version__)" + + - name: Build + working-directory: src + shell: pwsh + env: + GIT_AUTHOR_NAME: Qualcomm CI + GIT_AUTHOR_EMAIL: ci@qti.qualcomm.com + GIT_COMMITTER_NAME: Qualcomm CI + GIT_COMMITTER_EMAIL: ci@qti.qualcomm.com + run: | + ./qualcomm-software/embedded/scripts/build.ps1 + + - name: Upload Artifact + if: ${{ success() }} + uses: actions/upload-artifact@v4 + with: + name: cpullvm-toolchain-${{ github.run_id }}-Windows-x86 + path: | + ${{ env.ARTIFACT_DIR }}/*.txz + ${{ env.ARTIFACT_DIR }}/*.tgz + if-no-files-found: ignore + retention-days: 7 diff --git a/.github/workflows/nightly-woa.yml b/.github/workflows/nightly-woa.yml new file mode 100644 index 000000000000..cc5f93c89c1a --- /dev/null +++ b/.github/workflows/nightly-woa.yml @@ -0,0 +1,53 @@ +name: nightly-win-arm64 + +on: + schedule: + - cron: '0 5 * * *' + workflow_dispatch: + +jobs: + build-windows-on-aarch64 : + runs-on: windows-11-arm + env: + ARTIFACT_DIR: ${{ github.workspace }}/nightly-toolchain + + steps: + - name: Preconfigure Git (LF line endings, long paths) + shell: bash + run: | + git config --global core.autocrlf false + git config --global core.eol lf + git config --global core.longpaths true + + - name: Checkout source (to src/) + uses: actions/checkout@v4 + with: + path: src + + - name: Install PyYAML + run: pip install pyyaml + + - name: Verify PyYAML + run: python -c "import yaml; print('PyYAML OK:', yaml.__version__)" + + - name: Build + working-directory: src + shell: pwsh + env: + GIT_AUTHOR_NAME: Qualcomm CI + GIT_AUTHOR_EMAIL: ci@qti.qualcomm.com + GIT_COMMITTER_NAME: Qualcomm CI + GIT_COMMITTER_EMAIL: ci@qti.qualcomm.com + run: | + ./qualcomm-software/embedded/scripts/build.ps1 + + - name: Upload Artifact + if: ${{ success() }} + uses: actions/upload-artifact@v4 + with: + name: cpullvm-toolchain-${{ github.run_id }}-Windows-arm64 + path: | + ${{ env.ARTIFACT_DIR }}/*.txz + ${{ env.ARTIFACT_DIR }}/*.tgz + if-no-files-found: ignore + retention-days: 7 diff --git a/.github/workflows/nightly-x86-linux.yml b/.github/workflows/nightly-x86-linux.yml new file mode 100644 index 000000000000..f5f122e35278 --- /dev/null +++ b/.github/workflows/nightly-x86-linux.yml @@ -0,0 +1,18 @@ +name: Nightly-Linux-x86_64 + +on: + workflow_dispatch: + repository_dispatch: + types: [trigger-nightly-x86-linux] + + schedule: + - cron: '0 5 * * *' # 9 PM PT (UTC) + +jobs: + base: + uses: ./.github/workflows/base-build.yml + with: + runner: self-hosted + build-script: "src/qualcomm-software/embedded/scripts/build.sh" + is-nightly: true + arch: x86_64 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml deleted file mode 100644 index 4fa9b5c725ab..000000000000 --- a/.github/workflows/nightly.yml +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# This workflow runs all build scripts in parallel for multiple operating -# systems and architectures. -# It is intended to be triggered as part of a **nightly build** to -# validate all build configurations and test them automatically. - -name: Nightly Build and Test - -on: - workflow_dispatch: - -permissions: - contents: read - -jobs: - build-and-test: - # Don't run on forks - if: github.repository == 'qualcomm/cpullvm-toolchain' - name: ${{ matrix.build_script }} on ${{ matrix.target_os }} - runs-on: ${{ matrix.runner }} - - strategy: - fail-fast: false - matrix: - include: - - build_script: build.sh - test_script: test.sh - target_os: Linux-x86_64 - runner: self-hosted - - - build_script: build_no_runtimes.sh - test_script: test_no_runtimes.sh - target_os: Linux-AArch64 - runner: ubuntu-22.04-arm - - - build_script: build_musl-embedded_overlay.sh - test_script: '' - target_os: Linux-x86_64 - runner: self-hosted - - # FIXME: there's lit failures in Windows, probably from missing some - # dependency. Just disable testing for now for both x64/AArch64 - - build_script: build.ps1 - test_script: '' - target_os: Windows-x86_64 - runner: windows-latest - - - build_script: build.ps1 - test_script: '' - target_os: Windows-AArch64 - runner: windows-11-arm - - steps: - # FIXME: Need to discuss if this is actually needed. - - name: Cleanup workspace - if: runner.os != 'Windows' - shell: bash - run: | - set -euo pipefail - echo "Cleaning workspace: ${GITHUB_WORKSPACE}" - shopt -s dotglob nullglob - rm -rf "${GITHUB_WORKSPACE:?}/"* - - - name: Checkout source - uses: actions/checkout@v4 - - - name: Apply llvm-project patches - run: python3 qualcomm-software/cmake/patch_repo.py --method apply qualcomm-software/patches/llvm-project - - # Needed for eld tests. - # If there are more dependencies, add an install script. - - name: Install PyYAML (Windows) - if: runner.os == 'Windows' - run: pip install pyyaml - - - name: Build - run: ./qualcomm-software/scripts/${{ matrix.build_script }} - - - name: Test - if: matrix.test_script != '' - run: ./qualcomm-software/scripts/${{ matrix.test_script }} - - - name: Upload built packages - uses: actions/upload-artifact@v4 - if: success() - with: - name: cpullvm-packages-${{ matrix.build_script }}-${{ matrix.target_os }} - path: | - build*/cpullvm-*.tar.xz - build*/cpullvm-*.zip - retention-days: 7 diff --git a/.github/workflows/pr-code-format.yml b/.github/workflows/pr-code-format.yml new file mode 100644 index 000000000000..34904f42fe0c --- /dev/null +++ b/.github/workflows/pr-code-format.yml @@ -0,0 +1,64 @@ +name: "Check code formatting" + +permissions: + contents: read + +on: + pull_request: + branches: + - main + - 'users/**' + +jobs: + code_formatter: + runs-on: ubuntu-24.04 + container: + image: 'ghcr.io/llvm/ci-ubuntu-24.04-format' + timeout-minutes: 30 + concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + if: github.repository == 'llvm/llvm-project' + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0 + with: + separator: "," + skip_initial_fetch: true + base_sha: 'HEAD~1' + sha: 'HEAD' + + - name: "Listed files" + env: + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + run: | + echo "Formatting files:" + echo "$CHANGED_FILES" + + - name: Run code formatter + env: + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + # Create an empty comments file so the pr-write job doesn't fail. + run: | + echo "[]" > comments && + python ./llvm/utils/git/code-format-helper.py \ + --write-comment-to-file \ + --token ${{ secrets.GITHUB_TOKEN }} \ + --issue-number $GITHUB_PR_NUMBER \ + --start-rev HEAD~1 \ + --end-rev HEAD \ + --changed-files "$CHANGED_FILES" + + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: workflow-args + path: | + comments diff --git a/.github/workflows/pr-code-lint.yml b/.github/workflows/pr-code-lint.yml new file mode 100644 index 000000000000..77383bc00f00 --- /dev/null +++ b/.github/workflows/pr-code-lint.yml @@ -0,0 +1,100 @@ +name: "Code lint" + +permissions: + contents: read + +on: + pull_request: + branches: + - main + - 'users/**' + paths: + - 'clang-tools-extra/clang-tidy/**' + - 'clang-tools-extra/docs/clang-tidy/**' + - '.github/workflows/pr-code-lint.yml' + +jobs: + code_linter: + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + defaults: + run: + shell: bash + container: + image: 'ghcr.io/llvm/ci-ubuntu-24.04-lint' + timeout-minutes: 60 + concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + steps: + - name: Fetch LLVM sources + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0 + with: + separator: "," + skip_initial_fetch: true + base_sha: 'HEAD~1' + sha: 'HEAD' + + - name: Listed files + env: + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + run: | + echo "Changed files:" + echo "$CHANGED_FILES" + + # TODO: create special mapping for 'codegen' targets, for now build predefined set + # TODO: add entrypoint in 'compute_projects.py' that only adds a project and its direct dependencies + - name: Configure and CodeGen + run: | + git config --global --add safe.directory '*' + + . <(git diff --name-only HEAD~1...HEAD | python3 .ci/compute_projects.py) + + if [[ "${projects_to_build}" == "" ]]; then + echo "No projects to analyze" + exit 0 + fi + + cmake -G Ninja \ + -B build \ + -S llvm \ + -DLLVM_ENABLE_ASSERTIONS=OFF \ + -DLLVM_ENABLE_PROJECTS="${projects_to_build}" \ + -DCMAKE_CXX_COMPILER=clang++ \ + -DCMAKE_C_COMPILER=clang \ + -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ + -DLLVM_INCLUDE_TESTS=OFF \ + -DCLANG_INCLUDE_TESTS=OFF \ + -DCMAKE_BUILD_TYPE=Release + + ninja -C build \ + clang-tablegen-targets \ + genconfusable # for "ConfusableIdentifierCheck.h" + + - name: Run code linters + env: + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} + run: | + echo "[]" > comments && + python3 llvm/utils/git/code-lint-helper.py \ + --token ${{ secrets.GITHUB_TOKEN }} \ + --issue-number $GITHUB_PR_NUMBER \ + --start-rev HEAD~1 \ + --end-rev HEAD \ + --verbose \ + --changed-files "$CHANGED_FILES" + + - name: Upload results + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + with: + name: workflow-args + path: | + comments diff --git a/.github/workflows/pre-checkin-aarch64.yml b/.github/workflows/pre-checkin-aarch64.yml new file mode 100644 index 000000000000..ad449568bd0e --- /dev/null +++ b/.github/workflows/pre-checkin-aarch64.yml @@ -0,0 +1,11 @@ +name: pre-checkin-aarch64 + +on: + pull_request: + +jobs: + base: + uses: ./.github/workflows/base-build.yml + with: + runner: ubuntu-22.04-arm + build-script: "src/qualcomm-software/embedded/scripts/build.sh" diff --git a/.github/workflows/pre-checkin.yml b/.github/workflows/pre-checkin.yml new file mode 100644 index 000000000000..a7bacc9a5453 --- /dev/null +++ b/.github/workflows/pre-checkin.yml @@ -0,0 +1,16 @@ +name: pre-checkin + +on: + pull_request: + paths-ignore: + - 'qualcomm-software/embedded/scripts/build.ps1' + - '.github/workflows/nightly-win-x86.yml' + - '.github/workflows/nightly-woa.yml' + + +jobs: + base: + uses: ./.github/workflows/base-build.yml + with: + runner: self-hosted + build-script: "src/qualcomm-software/embedded/scripts/build.sh" diff --git a/.github/workflows/premerge.yaml b/.github/workflows/premerge.yaml new file mode 100644 index 000000000000..1795d5256750 --- /dev/null +++ b/.github/workflows/premerge.yaml @@ -0,0 +1,243 @@ +name: CI Checks + +permissions: + contents: read + +on: + pull_request: + types: + - opened + - synchronize + - reopened + # When a PR is closed, we still start this workflow, but then skip + # all the jobs, which makes it effectively a no-op. The reason to + # do this is that it allows us to take advantage of concurrency groups + # to cancel in progress CI jobs whenever the PR is closed. + - closed + push: + branches: + - 'release/**' + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +jobs: + premerge-checks-linux: + name: Build and Test Linux${{ (startsWith(matrix.runs-on, 'depot-ubuntu-24.04-arm') && ' AArch64') || '' }} + if: >- + github.repository_owner == 'llvm' && + (github.event_name != 'pull_request' || github.event.action != 'closed') + strategy: + fail-fast: false + matrix: + runs-on: + - depot-ubuntu-24.04-arm-16 + - llvm-premerge-linux-runners + runs-on: ${{ matrix.runs-on }} + container: + # The llvm-premerge agents are already containers and running the + # this same image, so we can't use a container for the github action + # job. The depot containers are running on VMs, so we can use a + # container. This helps ensure the build environment is as close + # as possible on both the depot runners and the llvm-premerge runners. + image: ${{ (startsWith(matrix.runs-on, 'depot-ubuntu-24.04-arm') && format('ghcr.io/{0}/arm64v8/ci-ubuntu-24.04',github.repository_owner) ) || null }} + # --privileged is needed to run the lldb tests that disable aslr. + # The SCCACHE environment variables are need to be copied from the host + # to the container to make sure it is configured correctly to use the + # depot cache. + options: >- + --privileged + --env SCCACHE_WEBDAV_ENDPOINT + --env SCCACHE_WEBDAV_TOKEN + defaults: + run: + # The run step defaults to using sh as the shell when running in a + # container, so make bash the default to ensure consistency between + # container and non-container jobs. + shell: bash + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + - name: Build and Test + timeout-minutes: 120 + env: + GITHUB_TOKEN: ${{ github.token }} + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + git config --global --add safe.directory '*' + + source <(git diff --name-only HEAD~1...HEAD | python3 .ci/compute_projects.py) + + if [[ "${projects_to_build}" == "" ]]; then + echo "No projects to build" + exit 0 + fi + + echo "Building projects: ${projects_to_build}" + echo "Running project checks targets: ${project_check_targets}" + echo "Building runtimes: ${runtimes_to_build}" + echo "Running runtimes checks targets: ${runtimes_check_targets}" + echo "Running runtimes checks requiring reconfiguring targets: ${runtimes_check_targets_needs_reconfig}" + + export CC=/opt/llvm/bin/clang + export CXX=/opt/llvm/bin/clang++ + + # The linux-premerge runners are hosted on GCP and have a different + # cache setup than the depot runners. + if [[ "${{ matrix.runs-on }}" = "llvm-premerge-linux-runners" ]]; then + # This environment variable is passes into the container through the + # runner pod definition. This differs between our two clusters which + # why we do not hardcode it. + export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET + export SCCACHE_GCS_RW_MODE=READ_WRITE + fi + env + + # Set the idle timeout to zero to ensure sccache runs for the + # entire duration of the job. Otherwise it might stop if we run + # several test suites in a row and discard statistics that we want + # to save in the end. + export SCCACHE_IDLE_TIMEOUT=0 + mkdir artifacts + SCCACHE_LOG=info SCCACHE_ERROR_LOG=$(pwd)/artifacts/sccache.log sccache --start-server + + ./.ci/monolithic-linux.sh "${projects_to_build}" "${project_check_targets}" "${runtimes_to_build}" "${runtimes_check_targets}" "${runtimes_check_targets_needs_reconfig}" "${enable_cir}" + - name: Upload Artifacts + # In some cases, Github will fail to upload the artifact. We want to + # continue anyways as a failed artifact upload is an infra failure, not + # a checks failure. + # https://github.com/actions/upload-artifact/issues/569 + continue-on-error: true + if: '!cancelled()' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: Premerge Artifacts (Linux ${{ runner.arch }}) + path: artifacts/ + retention-days: 5 + include-hidden-files: 'true' + - name: Upload Comment + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: ${{ always() && !startsWith(matrix.runs-on, 'depot-ubuntu-24.04-arm') }} + continue-on-error: true + with: + name: workflow-args-x86-linux + path: | + comments-Linux-x86_64 + + premerge-checks-windows: + name: Build and Test Windows + if: >- + github.repository_owner == 'llvm' && + (github.event_name != 'pull_request' || github.event.action != 'closed') + runs-on: llvm-premerge-windows-2022-runners + defaults: + run: + shell: bash + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + - name: Compute Projects + id: vars + run: | + source <(git diff --name-only HEAD~1...HEAD | python .ci/compute_projects.py) + + if [[ "${projects_to_build}" == "" ]]; then + echo "No projects to build" + fi + + echo "Building projects: ${projects_to_build}" + echo "Running project checks targets: ${project_check_targets}" + echo "Building runtimes: ${runtimes_to_build}" + echo "Running runtimes checks targets: ${runtimes_check_targets}" + + echo "windows-projects=${projects_to_build}" >> $GITHUB_OUTPUT + echo "windows-check-targets=${project_check_targets}" >> $GITHUB_OUTPUT + echo "windows-runtimes=${runtimes_to_build}" >> $GITHUB_OUTPUT + echo "windows-runtimes-check-targets=${runtimes_check_targets}" >> $GITHUB_OUTPUT + - name: Build and Test + timeout-minutes: 180 + if: ${{ steps.vars.outputs.windows-projects != '' }} + shell: cmd + env: + GITHUB_TOKEN: ${{ github.token }} + GITHUB_PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + call C:\\BuildTools\\Common7\\Tools\\VsDevCmd.bat -arch=amd64 -host_arch=amd64 + # See the comments above in the Linux job for why we define each of + # these environment variables. + bash -c "export SCCACHE_GCS_BUCKET=$CACHE_GCS_BUCKET; export SCCACHE_GCS_RW_MODE=READ_WRITE; export SCCACHE_IDLE_TIMEOUT=0; mkdir artifacts; SCCACHE_LOG=info SCCACHE_ERROR_LOG=$(pwd)/artifacts/sccache.log sccache --start-server; .ci/monolithic-windows.sh \"${{ steps.vars.outputs.windows-projects }}\" \"${{ steps.vars.outputs.windows-check-targets }}\" \"${{ steps.vars.outputs.windows-runtimes }}\" \"${{ steps.vars.outputs.windows-runtimes-check-targets }}\"" + - name: Upload Artifacts + # In some cases, Github will fail to upload the artifact. We want to + # continue anyways as a failed artifact upload is an infra failure, not + # a checks failure. + # https://github.com/actions/upload-artifact/issues/569 + continue-on-error: true + if: '!cancelled()' + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: Premerge Artifacts (Windows) + path: artifacts/ + retention-days: 5 + include-hidden-files: 'true' + - name: Upload Comment + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + if: always() + continue-on-error: true + with: + name: workflow-args-windows + path: | + comments-Windows-AMD64 + + premerge-check-macos: + name: MacOS Premerge Checks + runs-on: macos-14 + if: >- + github.repository_owner == 'llvm' && + (startswith(github.ref_name, 'release/') || + startswith(github.base_ref, 'release/')) && + (github.event_name != 'pull_request' || github.event.action != 'closed') + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + fetch-depth: 2 + - name: Setup ccache + uses: hendrikmuhs/ccache-action@5ebbd400eff9e74630f759d94ddd7b6c26299639 # v1.2.20 + with: + max-size: "2000M" + - name: Install Ninja + run: | + brew install ninja + - name: Build and Test + run: | + source <(git diff --name-only HEAD~1...HEAD | python3 .ci/compute_projects.py) + + if [[ "${projects_to_build}" == "" ]]; then + echo "No projects to build" + exit 0 + fi + + echo "Building projects: ${projects_to_build}" + echo "Running project checks targets: ${project_check_targets}" + + # -DLLVM_DISABLE_ASSEMBLY_FILES=ON is for + # https://github.com/llvm/llvm-project/issues/81967 + # Disable sharding in lit so that the LIT_XFAIL environment var works. + cmake -G Ninja \ + -B build \ + -S llvm \ + -DLLVM_ENABLE_PROJECTS="${projects_to_build}" \ + -DLLVM_DISABLE_ASSEMBLY_FILES=ON \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLDB_INCLUDE_TESTS=OFF \ + -DLLVM_ENABLE_ASSERTIONS=ON \ + -DCMAKE_C_COMPILER_LAUNCHER=ccache \ + -DCMAKE_CXX_COMPILER_LAUNCHER=ccache + + # The libcxx tests fail, so we are skipping the runtime targets. + ninja -C build ${project_check_targets} diff --git a/.github/workflows/qcom-preflight-checks.yml b/.github/workflows/qcom-preflight-checks.yml index 56b483b2f3d1..8d8a5007c1d4 100644 --- a/.github/workflows/qcom-preflight-checks.yml +++ b/.github/workflows/qcom-preflight-checks.yml @@ -18,7 +18,6 @@ permissions: jobs: qcom-preflight-checks: - if: github.repository == 'qualcomm/cpullvm-toolchain' uses: qualcomm/qcom-reusable-workflows/.github/workflows/qcom-preflight-checks-reusable-workflow.yml@v1.1.4 with: # ✅ Preflight Checkers diff --git a/.github/workflows/release-binaries.yml b/.github/workflows/release-binaries.yml new file mode 100644 index 000000000000..3bf6860d4e53 --- /dev/null +++ b/.github/workflows/release-binaries.yml @@ -0,0 +1,348 @@ +name: Release Binaries + +on: + workflow_dispatch: + inputs: + release-version: + description: 'Release Version' + required: false + type: string + upload: + description: 'Upload binaries to the release page' + required: true + default: false + type: boolean + runs-on: + description: "Runner to use for the build" + required: true + type: choice + # We use ubuntu-22.04 rather than the latest version to make the built + # binaries more portable (eg functional aginast older glibc). + options: + - ubuntu-22.04 + - ubuntu-22.04-arm + - macos-14 + + workflow_call: + inputs: + release-version: + description: 'Release Version' + required: false + type: string + upload: + description: 'Upload binaries to the release page' + required: true + default: false + type: boolean + runs-on: + description: "Runner to use for the build" + required: true + type: string + secrets: + RELEASE_TASKS_USER_TOKEN: + description: "Secret used to check user permissions." + required: false + + +permissions: + contents: read # Default everything to read-only + +jobs: + prepare: + name: Prepare to build binaries + runs-on: ${{ inputs.runs-on }} + if: github.repository_owner == 'llvm' + outputs: + release-version: ${{ steps.vars.outputs.release-version }} + ref: ${{ steps.vars.outputs.ref }} + upload: ${{ steps.vars.outputs.upload }} + target-cmake-flags: ${{ steps.vars.outputs.target-cmake-flags }} + build-flang: ${{ steps.vars.outputs.build-flang }} + release-binary-basename: ${{ steps.vars.outputs.release-binary-basename }} + release-binary-filename: ${{ steps.vars.outputs.release-binary-filename }} + build-runs-on: ${{ steps.vars.outputs.build-runs-on }} + test-runs-on: ${{ steps.vars.outputs.build-runs-on }} + attestation-name: ${{ steps.vars.outptus.attestation-name }} + + steps: + # It's good practice to use setup-python, but this is also required on macos-14 + # due to https://github.com/actions/runner-images/issues/10385 + - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.14' + + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + + - name: Install Dependencies + shell: bash + run: | + pip install --require-hashes -r ./llvm/utils/git/requirements.txt + + - name: Check Permissions + if: github.event_name != 'pull_request' + env: + GITHUB_TOKEN: ${{ github.token }} + USER_TOKEN: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} + shell: bash + run: | + ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --user "$GITHUB_ACTOR" --user-token "$USER_TOKEN" check-permissions + + # The name of the Windows binaries uses the version from source, so we need + # to fetch it here. + - id: version-from-source + if: runner.os == 'Windows' + uses: ./.github/workflows/get-llvm-version + + - name: Collect Variables + id: vars + env: + LLVM_VERSION_FROM_SOURCE: ${{ steps.version-from-source.outputs.full-no-suffix }} + shell: bash + # In order for the test-release.sh script to run correctly, the LLVM + # source needs to be at the following location relative to the build dir: + # | X.Y.Z-rcN | ./rcN/llvm-project + # | X.Y.Z | ./final/llvm-project + # + # We also need to set divergent flags based on the release version: + # | X.Y.Z-rcN | -rc N -test-asserts + # | X.Y.Z | -final + run: | + trimmed=$(echo ${{ inputs.release-version }} | xargs) + if [ -n "$trimmed" ]; then + release_version="$trimmed" + ref="llvmorg-$release_version" + else + if [ "$RUNNER_OS" = "Windows" ]; then + release_version="$LLVM_VERSION_FROM_SOURCE" + else + release_version="${{ (github.event_name == 'pull_request' && format('PR{0}', github.event.pull_request.number)) || 'CI'}}-$GITHUB_SHA" + fi + ref="$GITHUB_SHA" + fi + if [ -n "${{ inputs.upload }}" ]; then + upload="${{ inputs.upload }}" + else + upload="false" + fi + echo "release-version=$release_version">> $GITHUB_OUTPUT + echo "ref=$ref" >> $GITHUB_OUTPUT + echo "upload=$upload" >> $GITHUB_OUTPUT + + if [ "$RUNNER_OS" = "Windows" ]; then + case $RUNNER_ARCH in + "X64" ) + tar_arch="x86_64" + ;; + "ARM64" ) + tar_arch="aarch64" + ;; + esac + release_binary_basename="clang+llvm-$release_version-$tar_arch-pc-windows-msvc" + else + release_binary_basename="LLVM-$release_version-$RUNNER_OS-$RUNNER_ARCH" + fi + echo "release-binary-basename=$release_binary_basename" >> $GITHUB_OUTPUT + echo "release-binary-filename=$release_binary_basename.tar.xz" >> $GITHUB_OUTPUT + + target="$RUNNER_OS-$RUNNER_ARCH" + + # The macOS builds try to cross compile some libraries so we need to + # add extra CMake args to disable them. + # See https://github.com/llvm/llvm-project/issues/99767 + if [ "$RUNNER_OS" = "macOS" ]; then + target_cmake_flags="$target_cmake_flags -DBOOTSTRAP_BOOTSTRAP_COMPILER_RT_ENABLE_IOS=OFF" + if [ "$RUNNER_ARCH" = "ARM64" ]; then + arches=arm64 + fi + target_cmake_flags="$target_cmake_flags -DBOOTSTRAP_BOOTSTRAP_DARWIN_osx_ARCHS=$arches -DBOOTSTRAP_BOOTSTRAP_DARWIN_osx_BUILTIN_ARCHS=$arches" + fi + + if [ "$RUNNER_OS" = "Windows" ]; then + # The build times out on Windows, so we need to disable LTO. + target_cmake_flags="$target_cmake_flags -DLLVM_RELEASE_ENABLE_LTO=OFF" + fi + + case "${{ inputs.runs-on }}" in + ubuntu-22.04* | windows-2022) + build_runs_on="depot-${{ inputs.runs-on }}-16" + test_runs_on=$build_runs_on + ;; + macos-14) + if [ "$GITHUB_EVENT_NAME" = "pull_request" ]; then + build_runs_on="${{ inputs.runs-on }}" + else + build_runs_on="depot-macos-14" + fi + test_runs_on="${{ inputs.runs-on }}" + ;; + *) + test_runs_on="${{ inputs.runs-on }}" + build_runs_on=$test_runs_on + ;; + esac + + case "$build_runs_on" in + # These runners cannot build the full release package faster than + # the 6 hours timeout limit, so we need to use a configuration + # that builds more quickly. + macos-14) + bootstrap_prefix="BOOTSTRAP" + target_cmake_flags="$target_cmake_flags -DLLVM_RELEASE_ENABLE_LTO=OFF -DLLVM_RELEASE_ENABLE_PGO=OFF" + ;; + *) + bootstrap_prefix="BOOTSTRAP_BOOTSTRAP" + ;; + esac + + target_cmake_flags="$target_cmake_flags -D${bootstrap_prefix}_CPACK_PACKAGE_FILE_NAME=$release_binary_basename" + + echo "target-cmake-flags=$target_cmake_flags" >> $GITHUB_OUTPUT + echo "build-runs-on=$build_runs_on" >> $GITHUB_OUTPUT + echo "test-runs-on=$test_runs_on" >> $GITHUB_OUTPUT + echo "attestation-name=$RUNNER_OS-$RUNNER_ARCH-release-binary-attestation" >> $GITHUB_OUTPUT + + build-release-package: + name: "Build Release Package" + needs: prepare + if: github.repository_owner == 'llvm' + runs-on: ${{ needs.prepare.outputs.build-runs-on }} + outputs: + digest: ${{ steps.digest.outputs.digest }} + artifact-id: ${{ steps.artifact-upload.outputs.artifact-id }} + steps: + # We need to hard code the python library path for Windows, so in order + # to do that we need to specify a specific python version. It's also + # good practice to do this on other OSes so the version of python doesn't + # get changed unexpectedly. + - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + python-version: '3.11.9' + + # For some reason this is needed on Windows or else the build system can't find python3.lib. + - name: Setup Python library path + if: runner.os == 'Windows' + run: | + echo "LIB=$env:LIB;C:\hostedtoolcache\windows\Python\3.11.9\x64\libs" >> $env:GITHUB_ENV + + - name: Setup crlf + if: runner.os == 'Windows' + run: | + git config --global core.autocrlf false + + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ needs.prepare.outputs.ref }} + + - name: Set Build Prefix + id: setup-stage + shell: bash + run: | + build_prefix=`pwd` + if [ "${{ runner.os }}" = "Linux" ]; then + sudo chown $USER:$USER /mnt/ + build_prefix=/mnt/ + fi + echo "build-prefix=$build_prefix" >> $GITHUB_OUTPUT + + - name: Configure Linux/MacOS + id: build + if: runner.os != 'Windows' + shell: bash + run: | + # There were some issues on the ARM64 MacOS runners with trying to build x86 object, + # so we need to set some extra cmake flags to disable this. + cmake -G Ninja -S llvm -B ${{ steps.setup-stage.outputs.build-prefix }}/build \ + ${{ needs.prepare.outputs.target-cmake-flags }} \ + -C clang/cmake/caches/Release.cmake + + - name: Build Linux/MacOS + if: runner.os != 'Windows' + shell: bash + run: | + ninja -v -C ${{ steps.setup-stage.outputs.build-prefix }}/build stage2-package + release_dir=`find ${{ steps.setup-stage.outputs.build-prefix }}/build -iname 'stage2-bins'` + mv $release_dir/${{ needs.prepare.outputs.release-binary-filename }} . + + - name: Build Windows + id: build-windows + if: runner.os == 'Windows' + env: + LLVM_VERSION: ${{ needs.prepare.outputs.release-version }} + run: | + subst S: ${{ github.workspace }} + cd S:\llvm\utils\release\ + .\build_llvm_release.bat "--$($env:RUNNER_ARCH.ToLower())" --version $env:LLVM_VERSION --local-python --skip-checkout + $installer = (Get-ChildItem -Recurse -Filter "*.exe" | Select-Object -First 1).fullName + $tarball = (Get-ChildItem -Recurse -Filter "*.tar.xz" | Select-Object -First 1).fullName + # Move installer to top-level directory so it is easier to upload. + mv $installer $env:GITHUB_WORKSPACE + mv $tarball $env:GITHUB_WORKSPACE + echo "windows-installer-filename=$(Split-Path -Path $installer -Leaf)" >> $env:GITHUB_OUTPUT + + - name: Generate sha256 digest for binaries + id: digest + shell: bash + env: + RELEASE_BINARY_FILENAME: ${{ needs.prepare.outputs.release-binary-filename }} + # This will be empty on non-Windows builds. + WINDOWS_INSTALLER_FILENAME: ${{ steps.build-windows.outputs.windows-installer-filename }} + run: | + if [ "$RUNNER_OS" = "macOS" ]; then + # Mac runners don't have sha256sum. + sha256sum="shasum -a 256" + else + sha256sum="sha256sum" + fi + echo "digest=$(cat $WINDOWS_INSTALLER_FILENAME $RELEASE_BINARY_FILENAME | $sha256sum | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT + + - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + id: artifact-upload + with: + name: ${{ runner.os }}-${{ runner.arch }}-release-binary + # Due to path differences on Windows when running in bash vs running on node, + # we need to search for files in the current workspace. + # The steps.build-windows.* variables will be empty on Linux/MacOS. + path: | + ${{ needs.prepare.outputs.release-binary-filename }} + ${{ steps.build-windows.outputs.windows-installer-filename }} + + - name: Run Tests + # These almost always fail so don't let them fail the build and prevent the uploads. + if : runner.os != 'Windows' + continue-on-error: true + run: | + ninja -C ${{ steps.setup-stage.outputs.build-prefix }}/build stage2-check-all + + upload-release-binaries: + name: "Upload Release Binaries" + needs: + - prepare + - build-release-package + if: >- + github.event_name != 'pull_request' + runs-on: ubuntu-24.04 + permissions: + contents: write # For release uploads + id-token: write # For artifact attestations + attestations: write # For artifact attestations + + steps: + - name: Checkout Release Scripts + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/workflows/upload-release-artifact + llvm/utils/release/github-upload-release.py + llvm/utils/git/requirements.txt + sparse-checkout-cone-mode: false + + - name: Upload Artifacts + uses: ./.github/workflows/upload-release-artifact + with: + artifact-id: ${{ needs.build-release-package.outputs.artifact-id }} + attestation-name: ${{ needs.prepare.outputs.attestation-name }} + digest: ${{ needs.build-release-package.outputs.digest }} + upload: ${{ needs.prepare.outputs.upload }} + user-token: ${{ secrets.RELEASE_TASKS_USER_TOKEN }} diff --git a/.github/workflows/release-documentation.yml b/.github/workflows/release-documentation.yml new file mode 100644 index 000000000000..89edca334aa9 --- /dev/null +++ b/.github/workflows/release-documentation.yml @@ -0,0 +1,95 @@ +name: Release Documentation + +permissions: + contents: read + +on: + workflow_dispatch: + inputs: + release-version: + description: 'Release Version' + required: true + type: string + upload: + description: 'Upload documentation' + required: false + type: boolean + + workflow_call: + inputs: + release-version: + description: 'Release Version' + required: true + type: string + upload: + description: 'Upload documentation' + required: false + type: boolean + secrets: + WWW_RELEASES_TOKEN: + description: "Secret used to create a PR with the documentation changes." + required: false + +jobs: + release-documentation: + name: Build and Upload Release Documentation + runs-on: ubuntu-24.04 + env: + upload: ${{ inputs.upload && !contains(inputs.release-version, 'rc') }} + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + + - name: Setup Python env + uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0 + with: + cache: 'pip' + cache-dependency-path: './llvm/docs/requirements.txt' + + - name: Install Dependencies + run: | + sudo apt-get update + sudo apt-get install -y \ + graphviz \ + python3-github \ + ninja-build \ + texlive-font-utils + pip3 install --user -r ./llvm/docs/requirements.txt + + - name: Build Documentation + env: + GITHUB_TOKEN: ${{ github.token }} + run: | + ./llvm/utils/release/build-docs.sh -release "${{ inputs.release-version }}" -no-doxygen + + - name: Create Release Notes Artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # 6.0.0 + with: + name: release-notes + path: docs-build/html-export/ + + - name: Clone www-releases + if: env.upload + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + repository: ${{ github.repository_owner }}/www-releases + ref: main + fetch-depth: 0 + path: www-releases + persist-credentials: false + + - name: Upload Release Notes + if: env.upload + env: + GH_TOKEN: ${{ secrets.WWW_RELEASES_TOKEN }} + run: | + mkdir -p www-releases/${{ inputs.release-version }} + mv ./docs-build/html-export/* www-releases/${{ inputs.release-version }} + cd www-releases + git checkout -b ${{ inputs.release-version }} + git add ${{ inputs.release-version }} + git config user.email "llvmbot@llvm.org" + git config user.name "llvmbot" + git commit -a -m "Add ${{ inputs.release-version }} documentation" + git push --force "https://$GH_TOKEN@github.com/llvmbot/www-releases.git" HEAD:refs/heads/${{ inputs.release-version }} + gh pr create -f -B main -H llvmbot:${{ inputs.release-version }} diff --git a/.github/workflows/release-sources.yml b/.github/workflows/release-sources.yml new file mode 100644 index 000000000000..48cdd608c622 --- /dev/null +++ b/.github/workflows/release-sources.yml @@ -0,0 +1,126 @@ +name: Release Sources + +permissions: + contents: read + +on: + workflow_dispatch: + inputs: + release-version: + description: Release Version + required: true + type: string + workflow_call: + inputs: + release-version: + description: Release Version + required: true + type: string + secrets: + RELEASE_TASKS_USER_TOKEN: + description: "Secret used to check user permissions." + required: false + # Run on pull_requests for testing purposes. + pull_request: + paths: + - '.github/workflows/release-sources.yml' + - 'llvm/utils/release/export.sh' + types: + - opened + - synchronize + - reopened + # When a PR is closed, we still start this workflow, but then skip + # all the jobs, which makes it effectively a no-op. The reason to + # do this is that it allows us to take advantage of concurrency groups + # to cancel in progress CI jobs whenever the PR is closed. + - closed + +concurrency: + group: ${{ github.workflow }}-${{ inputs.release-version || github.event.pull_request.number }} + cancel-in-progress: True + +jobs: + inputs: + name: Collect Job Inputs + if: >- + github.repository_owner == 'llvm' && + github.event.action != 'closed' + outputs: + ref: ${{ steps.inputs.outputs.ref }} + export-args: ${{ steps.inputs.outputs.export-args }} + runs-on: ubuntu-24.04 + steps: + - id: inputs + run: | + ref=${{ (inputs.release-version && format('llvmorg-{0}', inputs.release-version)) || github.sha }} + if [ -n "${{ inputs.release-version }}" ]; then + export_args="-release ${{ inputs.release-version }} -final" + else + export_args="-git-ref ${{ github.sha }}" + fi + echo "ref=$ref" >> $GITHUB_OUTPUT + echo "export-args=$export_args" >> $GITHUB_OUTPUT + + release-sources: + name: Package Release Sources + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + outputs: + digest: ${{ steps.digest.outputs.digest }} + artifact-id: ${{ steps.artifact-upload.outputs.artifact-id }} + needs: + - inputs + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + ref: ${{ needs.inputs.outputs.ref }} + fetch-tags: true + - name: Install Dependencies + run: | + pip install --require-hashes -r ./llvm/utils/git/requirements.txt + + - name: Create Tarballs + run: | + ./llvm/utils/release/export.sh ${{ needs.inputs.outputs.export-args }} + + - name: Generate sha256 digest for sources + id: digest + run: | + echo "digest=$(cat *.xz | sha256sum | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT + + - name: Release Sources Artifact + id: artifact-upload + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ needs.inputs.outputs.ref }}-sources + path: | + *.xz + + attest-release-sources: + name: Attest Release Sources + runs-on: ubuntu-24.04 + if: github.event_name != 'pull_request' + needs: + - inputs + - release-sources + permissions: + id-token: write + attestations: write + steps: + - name: Checkout Release Scripts + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/workflows/upload-release-artifact + llvm/utils/release/github-upload-release.py + llvm/utils/git/requirements.txt + sparse-checkout-cone-mode: false + + - name: Upload Artifacts + uses: ./.github/workflows/upload-release-artifact + with: + artifact-id: ${{ needs.release-sources.outputs.artifact-id }} + attestation-name: ${{ needs.inputs.outputs.ref }}-sources-attestation + digest: ${{ needs.release-sources.outputs.digest }} + upload: false diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 000000000000..36b4b6b4391b --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,62 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +# Check current LLVM-Project results here: https://securityscorecards.dev/viewer/?uri=github.com/llvm/llvm-project + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '38 20 * * *' + +# Declare default permissions as read only. +permissions: + contents: read + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-24.04 + if: github.repository == 'llvm/llvm-project' + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + + steps: + - name: "Checkout code" + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3 + with: + results_file: results.sarif + results_format: sarif + + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6 + with: + sarif_file: results.sarif diff --git a/.github/workflows/test-unprivileged-download-artifact.yml b/.github/workflows/test-unprivileged-download-artifact.yml new file mode 100644 index 000000000000..ce807287abd2 --- /dev/null +++ b/.github/workflows/test-unprivileged-download-artifact.yml @@ -0,0 +1,64 @@ +name: Test Unprivileged Download Artifact Action + +permissions: + contents: read + +on: + push: + branches: + - main + paths: + - .github/workflows/test-unprivileged-download-artifact.yml + - '.github/workflows/unprivileged-download-artifact/**' + pull_request: + paths: + - .github/workflows/test-unprivileged-download-artifact.yml + - '.github/workflows/unprivileged-download-artifact/**' + +jobs: + upload-test-artifact: + name: Upload Test Artifact + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + steps: + - name: Create Test Files + run: | + echo "foo" > comment1 + echo "bar" > comment2 + - name: Upload Test File 1 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: artifact-name-1 + path: | + comment1 + - name: Upload Test File 2 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: artifact-name-2 + path: | + comment2 + + + test-download: + name: Test Unprivileged Download Artifact + if: github.repository_owner == 'llvm' + runs-on: ubuntu-24.04 + needs: [ upload-test-artifact ] + steps: + - name: Checkout LLVM + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + sparse-checkout: | + .github/workflows/unprivileged-download-artifact/action.yml + - name: Download Artifact + uses: ./.github/workflows/unprivileged-download-artifact + id: download-artifact + with: + run-id: ${{ github.run_id }} + artifact-name: artifact-name- + - name: Assert That Contents are the Same + run: | + cat comment1 + [[ "$(cat comment1)" == "foo" ]] + cat comment2 + [[ "$(cat comment2)" == "bar" ]] diff --git a/.github/workflows/upload-release-artifact/action.yml b/.github/workflows/upload-release-artifact/action.yml new file mode 100644 index 000000000000..76f6acd02700 --- /dev/null +++ b/.github/workflows/upload-release-artifact/action.yml @@ -0,0 +1,107 @@ +name: Upload Release Artifact +description: >- + Upload release artifact along with an attestation. The action assumes that + the llvm-project repository has already been checked out. +inputs: + release-version: + description: >- + The release where the artifact will be attached. + required: true + upload: + description: >- + Whether or not to upload the file and attestation to the release. If this + is set to false, then the file will be uploaded to the job as an artifact, + but no atteastion will be generated and the artifact won't be uploaded + to the release. + default: true + user-token: + description: >- + Token with premissions to read llvm teams that is used to ensure that + the person who triggred the action has permission to upload artifacts. + This is required if upload is true. + required: false + attestation-name: + description: >- + This will be used for the artifact name that is attached to the workflow and + will be used as the basename for the attestation file which will be called + $attestation-name.jsonl. If this is not set, it will default + to the falue of `files`. + required: false + artifact-id: + description: >- + Artifact id of the artifact with the files to upload. + required: true + digest: + description: >- + sha256 digest to verify the authenticity of the files being uploaded. + required: true + +runs: + using: "composite" + steps: + - name: Download Artifact + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + id: download-artifact + with: + artifact-ids: ${{ inputs.artifact-id }} + path: downloads + + # In theory github artifacts are immutable so we could just rely on using + # the artifact-id to download it, but just to be extra safe we want to + # generated a digest for the files we are uploading so we can verify it + # when downloading. + # See also: https://irsl.medium.com/github-artifact-immutability-is-a-lie-9b6244095694 + - name: Verify Files + shell: bash + env: + INPUTS_DIGEST: ${{ inputs.digest }} + run: | + digest_file="sha256" + echo "$INPUTS_DIGEST -" > $digest_file + cat ${{ steps.download-artifact.outputs.download-path }}/* | sha256sum -c $digest_file + + - name: Attest Build Provenance + id: provenance + uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 + with: + subject-path: ${{ steps.download-artifact.outputs.download-path }}/* + + # Generate an attestation copy for each file to make it easier for users to verify + # the files. + - name: Rename attestation file + shell: bash + run: | + for f in ${{ steps.download-artifact.outputs.download-path }}/*; do + cp ${{ steps.provenance.outputs.bundle-path }} $(basename $f).jsonl + done + + - name: Upload Build Provenance + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ inputs.attestation-name }} + path: | + *.jsonl + + - name: Install Python Requirements + if: inputs.upload == 'true' + shell: bash + run: | + pip install --require-hashes -r ./llvm/utils/git/requirements.txt + + - name: Check Permissions + if: inputs.upload == 'true' + env: + GITHUB_TOKEN: ${{ github.token }} + USER_TOKEN: ${{ inputs.user-token }} + shell: bash + run: | + ./llvm/utils/release/./github-upload-release.py --token "$GITHUB_TOKEN" --user "$GITHUB_ACTOR" --user-token "$USER_TOKEN" check-permissions + - name: Upload Release + shell: bash + if: inputs.upload == 'true' + run: | + ./llvm/utils/release/github-upload-release.py \ + --token ${{ github.token }} \ + --release ${{ inputs.release-version }} \ + upload \ + --files ${{ steps.download-artifact.outputs.download-path }}/* *.jsonl diff --git a/qualcomm-software/.gitignore b/qualcomm-software/.gitignore deleted file mode 100644 index 54a05e98a791..000000000000 --- a/qualcomm-software/.gitignore +++ /dev/null @@ -1,12 +0,0 @@ -.*.swp -.idea -/repos*/ -/build*/ -/install*/ -/venv/ -__pycache__ -*.egg-info - -# IDEs -.vscode -compile_commands.json diff --git a/qualcomm-software/ATfE-LICENSE.txt b/qualcomm-software/ATfE-LICENSE.txt deleted file mode 100644 index 8726b3211bad..000000000000 --- a/qualcomm-software/ATfE-LICENSE.txt +++ /dev/null @@ -1,278 +0,0 @@ -============================================================================== -The LLVM Project is under the Apache License v2.0 with LLVM Exceptions: -============================================================================== - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - ----- LLVM Exceptions to the Apache 2.0 License ---- - -As an exception, if, as a result of your compiling your source code, portions -of this Software are embedded into an Object form of such source code, you -may redistribute such embedded portions in such Object form without complying -with the conditions of Sections 4(a), 4(b) and 4(d) of the License. - -In addition, if you combine or link compiled forms of this Software with -software that is licensed under the GPLv2 ("Combined Software") and if a -court of competent jurisdiction determines that the patent provision (Section -3), the indemnity provision (Section 9) or other Section of the License -conflicts with the conditions of the GPLv2, you may retroactively and -prospectively choose to deem waived or otherwise exclude such Section(s) of -the License, but only in their entirety and only with respect to the Combined -Software. - -============================================================================== -Software from third parties included in the LLVM Project: -============================================================================== -The LLVM Project contains third party software which is under different license -terms. All such code will be identified clearly using at least one of two -mechanisms: -1) It will be in a separate directory tree with its own `LICENSE.txt` or - `LICENSE` file at the top containing the specific license and restrictions - which apply to that software, or -2) It will contain specific license and restriction terms at the top of every - file. - -============================================================================== -Legacy LLVM License (https://llvm.org/docs/DeveloperPolicy.html#legacy): -============================================================================== -University of Illinois/NCSA -Open Source License - -Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign. -All rights reserved. - -Developed by: - - LLVM Team - - University of Illinois at Urbana-Champaign - - http://llvm.org - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal with -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the - documentation and/or other materials provided with the distribution. - - * Neither the names of the LLVM Team, University of Illinois at - Urbana-Champaign, nor the names of its contributors may be used to - endorse or promote products derived from this Software without specific - prior written permission. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE -SOFTWARE. \ No newline at end of file diff --git a/qualcomm-software/CHANGELOG.md b/qualcomm-software/CHANGELOG.md deleted file mode 100644 index dc834317cace..000000000000 --- a/qualcomm-software/CHANGELOG.md +++ /dev/null @@ -1,15 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [Unreleased] - -### Added -### Changed -### Deprecated -### Removed -### Fixed -### Security - diff --git a/qualcomm-software/CMakeLists.txt b/qualcomm-software/CMakeLists.txt deleted file mode 100644 index 1822878514d6..000000000000 --- a/qualcomm-software/CMakeLists.txt +++ /dev/null @@ -1,821 +0,0 @@ -# -# Copyright (c) 2022-2025, Arm Limited and affiliates. -# -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# - -# How to use this file -# -# Recent versions of the following tools are pre-requisites: -# * A toolchain such as gcc & binutils -# * cmake -# * meson -# * ninja -# * python3 -# * make and qemu to run tests -# -# Commands to build: -# mkdir build -# cd build -# cmake .. -GNinja -DFETCHCONTENT_QUIET=OFF -# ninja -# ninja check-llvm-toolchain -# -# To make it easy to get started, the above command checks out -# the picolibc Git repos automatically. -# -# If the repos are checked out automatically then cmake will fetch the -# latest changes and check them out every time it runs. To disable this -# behaviour run: -# cmake . -DFETCHCONTENT_FULLY_DISCONNECTED=ON -# -# If you prefer you can check out and patch the repos manually and use those: -# mkdir repos -# git -C repos clone https://github.com/picolibc/picolibc.git -# git -C repos/picolibc am -k $PWD/patches/picolibc/*.patch -# mkdir build -# cd build -# cmake .. -GNinja -DFETCHCONTENT_SOURCE_DIR_PICOLIBC=../repos/picolibc -# ninja -# ninja check-llvm-toolchain -# -# To install the toolchain run: -# cmake . --install-prefix /absolute/path/to/install/directory -# ninja install-llvm-toolchain -# -# -# This file is designed to be used in a way that will be familiar to -# LLVM developers. Targets like clang and check-all can be built as usual. -# In addition there are targets to build picolibc & runtimes variants. -# -# -# When building the toolchain repeatedly, the most time-consuming part -# can be building the libraries since each one is configured separately. -# To work around this, the variants that get built can be limited using -# the LLVM_TOOLCHAIN_LIBRARY_VARIANTS option e.g.: -# cmake . '-DLLVM_TOOLCHAIN_LIBRARY_VARIANTS=aarch64;armv6m_soft_nofp' - - -# CONFIGURE_HANDLED_BY_BUILD was introduced in CMake 3.20 and it -# greatly speeds up incremental builds. -cmake_minimum_required(VERSION 3.20) - -option( - PREBUILT_TARGET_LIBRARIES - "Target libraries are prebuilt so no need to build them" -) -set(TARGET_LIBRARIES_DIR - "lib/clang-runtimes" CACHE STRING - "Directory containing the target libraries." -) -set(LLVM_TOOLCHAIN_MULTILIB_JSON - "${CMAKE_CURRENT_SOURCE_DIR}/embedded-multilib/json/multilib.json" CACHE STRING - "JSON file defining the multilib." -) -set(LLVM_TOOLCHAIN_LIBRARY_VARIANTS - "all" CACHE STRING - "Build only the specified library variants, or \"all\"." -) -option( - LIBS_DEPEND_ON_TOOLS - "Automatically ensure tools like clang are up to date before building libraries. - Set this to OFF if you're working on the libraries and want to avoid rebuilding - the tools every time you update llvm-project." - ON -) -option( - LIBS_USE_COMPILER_LAUNCHER - "Pass CMAKE_C_COMPILER_LAUNCHER and CMAKE_CXX_COMPILER_LAUNCHER - down to the library builds, so that programs such as ccache can - be used to speed up repeated builds. This is not done by default, - as it can also make the inital build slower due to the cold cache." -) -option( - ENABLE_PARALLEL_LIB_CONFIG - "Run the library variant configuration steps in parallel." - ON -) -option( - ENABLE_PARALLEL_LIB_BUILD - "Run the library variant build steps in parallel." - OFF -) -set(PARALLEL_LIB_BUILD_LEVELS - "1" CACHE STRING - "If ENABLE_PARALLEL_LIB_BUILD is ON, this number of processes will be assigned to each variant built." -) -option( - ENABLE_QEMU_TESTING - "Enable tests that use QEMU. This option is ON by default." - ON -) -set(LLVM_TOOLCHAIN_C_LIBRARY - "picolibc" CACHE STRING - "Which C library to use." -) -set_property(CACHE LLVM_TOOLCHAIN_C_LIBRARY - PROPERTY STRINGS picolibc musl-embedded) - -option( - SHORT_BUILD_PATHS - "Shorten the lengths of internal build paths, which may help with OS path - length limits. This replaces the variant suffixes in build directories with - index numbers, which is shorter but less descriptive." - OFF -) -option( - ENABLE_LINUX_LIBRARIES - "Include additional library variants for use on Linux targets" - OFF -) -if(ENABLE_LINUX_LIBRARIES AND WIN32) - message(FATAL_ERROR "ENABLE_LINUX_LIBRARIES is not permitted on Windows") -endif() -option(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL - "Make cpack build an overlay package that can be unpacked over the main toolchain to install a secondary set of libraries based on musl-embedded." - ${overlay_install_default}) -if(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL "picolibc") - message(FATAL_ERROR "LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL is only permitted for C libraries other than the default picolibc") - endif() - if(ENABLE_LINUX_LIBRARIES) - message(FATAL_ERROR "ENABLE_LINUX_LIBRARIES is not permitted with LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL") - endif() -endif() - -set(BUG_REPORT_URL "https://github.com/qualcomm/cpullvm-toolchain/issues" CACHE STRING "") -set(LLVM_DISTRIBUTION_COMPONENTS - clang-resource-headers - clang - dsymutil - lld - ld.eld - LW - PluginAPIHeaders - YAMLMapParser - llvm-ar - llvm-config - llvm-cov - llvm-cxxfilt - llvm-dwarfdump - llvm-mc - llvm-nm - llvm-objcopy - llvm-objdump - llvm-profdata - llvm-ranlib - llvm-readelf - llvm-readobj - llvm-size - llvm-strings - llvm-strip - llvm-symbolizer - LTO - CACHE STRING "" -) -set(LLVM_TOOLCHAIN_DISTRIBUTION_COMPONENTS - llvm-toolchain-docs - llvm-toolchain-libs - llvm-toolchain-third-party-licenses - CACHE STRING "Components defined by this CMakeLists that should be -installed by the install-llvm-toolchain target" -) -set(LLVM_ENABLE_PROJECTS clang;lld;polly CACHE STRING "") -set(LLVM_TARGETS_TO_BUILD AArch64;ARM;RISCV;X86 CACHE STRING "") -# There's test failures when enabling x86 support and testing on native -# AArch64 Linux machines. Just disable x86 in eld until these are -# resolved. -set(ELD_TARGETS_TO_BUILD AArch64;ARM;RISCV CACHE STRING "") -set(LLVM_DEFAULT_TARGET_TRIPLE aarch64-unknown-linux-gnu CACHE STRING "") -set(LLVM_BUILD_RUNTIME OFF CACHE BOOL "") -set(LIBCLANG_BUILD_STATIC ON CACHE BOOL "") -set(LLVM_POLLY_LINK_INTO_TOOLS ON CACHE BOOL "") -set(CLANG_DEFAULT_LINKER lld CACHE STRING "") - -# Default to a release build -# (CMAKE_BUILD_TYPE is a special CMake variable so if you want to set -# it then you have to FORCE it). -if(NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE Release CACHE BOOL "" FORCE) -endif() - -find_package(Python3 REQUIRED COMPONENTS Interpreter) - -if(NOT CMAKE_C_COMPILER_LAUNCHER AND NOT CMAKE_CXX_COMPILER_LAUNCHER) - # If ccache is available then use it by default. - find_program(CCACHE_EXECUTABLE ccache) - if(CCACHE_EXECUTABLE) - set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE FILEPATH "" FORCE) - set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE FILEPATH "" FORCE) - endif() -endif() - -# If lld is available then use it by default. -find_program(LLD_EXECUTABLE lld) -if(LLD_EXECUTABLE) - set(LLVM_USE_LINKER lld CACHE STRING "") -endif() - -# A lot of files get installed which makes the install messages too -# noisy to be useful so default to disabling them. -set(CMAKE_INSTALL_MESSAGE NEVER CACHE STRING "") - -include(ExternalProject) -include(FetchContent) -include(ProcessorCount) - -# Check out and patch eld -include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/fetch_eld.cmake) - -# Check out and patch picolibc or musl-embedded if required. -# -# If you'd rather check out and patch manually then run cmake with -# -DFETCHCONTENT_SOURCE_DIR_PICOLIBC=/path/to/picolibc -# -DFETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED=/path/to/musl-embedded -# -# By default check out will be silent but this can be changed by running -# cmake with -DFETCHCONTENT_QUIET=OFF -# -# If you want to stop cmake updating the repos then run -# cmake . -DFETCHCONTENT_FULLY_DISCONNECTED=ON -if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL picolibc) - include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/fetch_picolibc.cmake) -endif() -if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL musl-embedded OR ENABLE_LINUX_LIBRARIES) - include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/fetch_musl-embedded.cmake) -endif() -if(ENABLE_LINUX_LIBRARIES) - include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/fetch_musl.cmake) -endif() - -################################################################################################## -# We set all project properties later, this call is just to enable the -# CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT check -project(CPULLVMToolchain) -# We generally want to install to a local directory to see what the -# output will look like rather than install into the system, so change -# the default accordingly. -# See https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT.html -# Note that THIS CODE ONLY WORKS AFTER THE FIRST CALL TO PROJECT so it -# can't be moved after the add_subdirectory() command below as it will be too late - -# the llvm project will set it to the default system install directory. -if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT) - set(CMAKE_INSTALL_PREFIX - "${CMAKE_BINARY_DIR}/install" - CACHE PATH "" FORCE - ) -endif() -################################################################################################## - -if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL musl-embedded) - install( - FILES - ${CMAKE_CURRENT_SOURCE_DIR}/${LLVM_TOOLCHAIN_C_LIBRARY}.cfg - DESTINATION bin - COMPONENT llvm-toolchain-${LLVM_TOOLCHAIN_C_LIBRARY}-configs - ) -endif() - -# Include eld -set(LLVM_EXTERNAL_PROJECTS eld) -set(LLVM_EXTERNAL_ELD_SOURCE_DIR "${eld_SOURCE_DIR}") - -set(clang_lit_xfails_path "${CMAKE_CURRENT_BINARY_DIR}/clang_lit_xfails.txt") -set(CLANG_TEST_EXTRA_ARGS "@${clang_lit_xfails_path}") - -set(llvmproject_src_dir ${CMAKE_CURRENT_SOURCE_DIR}/..) -add_subdirectory( - ${llvmproject_src_dir}/llvm llvm -) - -add_dependencies(check-all check-eld) - -# Update the xfail/xfail-not list before running the tests. -add_custom_target( - clang-xfail-lit-args - COMMAND ${Python3_EXECUTABLE} - ${CMAKE_CURRENT_SOURCE_DIR}/embedded-runtimes/test-support/xfails.py - --project clang - --clang ${LLVM_BINARY_DIR}/bin/clang - --output-args ${clang_lit_xfails_path} - DEPENDS - clang -) -add_dependencies(check-clang clang-xfail-lit-args) -add_dependencies(check-all clang-xfail-lit-args) - -get_directory_property(LLVM_VERSION_MAJOR DIRECTORY ${llvmproject_src_dir}/llvm DEFINITION LLVM_VERSION_MAJOR) -get_directory_property(LLVM_VERSION_MINOR DIRECTORY ${llvmproject_src_dir}/llvm DEFINITION LLVM_VERSION_MINOR) -get_directory_property(LLVM_VERSION_PATCH DIRECTORY ${llvmproject_src_dir}/llvm DEFINITION LLVM_VERSION_PATCH) - -project( - CPULLVM - VERSION ${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH} - DESCRIPTION "CPULLVM Toolchain" - HOMEPAGE_URL "https://github.com/qualcomm/cpullvm-toolchain" -) - -# Set package name for shorter archive file name -set(SHORT_PACKAGE_NAME "cpullvm") - -# Set directory name containing CFI ignore file -set(TARGET_CFI_DIR - "lib/clang/${LLVM_VERSION_MAJOR}/share" CACHE STRING - "Directory containing the CFI ignore file." -) - -# Set package name and version. -if(DEFINED LLVM_TOOLCHAIN_PACKAGE_NAME) - set(PACKAGE_NAME ${LLVM_TOOLCHAIN_PACKAGE_NAME}) -else() - set(PACKAGE_NAME ${SHORT_PACKAGE_NAME}) -endif() -set(CPACK_PACKAGE_NAME ${PACKAGE_NAME}) -set(PACKAGE_VERSION "${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}") - -# Restrict which LLVM components are installed. -if(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - set(CPACK_COMPONENTS_ALL - llvm-toolchain-libs - llvm-toolchain-${LLVM_TOOLCHAIN_C_LIBRARY}-configs - llvm-toolchain-third-party-licenses) -else() - set(CPACK_COMPONENTS_ALL ${LLVM_TOOLCHAIN_DISTRIBUTION_COMPONENTS} ${LLVM_DISTRIBUTION_COMPONENTS}) -endif() -# Enable limiting the installed components in TGZ and ZIP packages. -set(CPACK_ARCHIVE_COMPONENT_INSTALL TRUE) -# Don't create a separate archive for each component. -set(CPACK_COMPONENTS_GROUPING ALL_COMPONENTS_IN_ONE) -# When extracting the files put them in an ArmCompiler-.../ directory. -# Exception: the overlay packages do not do this, because they have -# to be able to unpack over the top of an existing installation on all -# platforms, and each platform has a different top-level directory name. -if(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - set(CPACK_COMPONENT_INCLUDE_TOPLEVEL_DIRECTORY FALSE) -else() - set(CPACK_COMPONENT_INCLUDE_TOPLEVEL_DIRECTORY TRUE) -endif() -# Compress package in parallel. -set(CPACK_THREADS 0 CACHE STRING "") - -# set processor_name -string(TOLOWER ${CMAKE_SYSTEM_PROCESSOR} processor_name) -string(REGEX MATCH "amd64|x64|x86" x86_match ${processor_name}) -if(x86_match) - set(processor_name "x86_64") -else() - set(processor_name "AArch64") -endif() - -# If a specific system name is supplied, use that instead. -if (PACKAGE_SYSTEM_NAME) - set(CPACK_SYSTEM_NAME "${PACKAGE_SYSTEM_NAME}-${processor_name}") -else() - set(CPACK_SYSTEM_NAME "${CMAKE_SYSTEM_NAME}-${processor_name}") -endif() - -set(CPACK_PACKAGE_VERSION ${PACKAGE_VERSION}) - -if(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - set(PACKAGE_FILE_NAME ${PACKAGE_NAME}-${LLVM_TOOLCHAIN_C_LIBRARY}-overlay-${PACKAGE_VERSION}) -else() - if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL "picolibc") - set(PACKAGE_FILE_NAME ${PACKAGE_NAME}-${PACKAGE_VERSION}-${CPACK_SYSTEM_NAME}) - else() - set(PACKAGE_FILE_NAME ${PACKAGE_NAME}_${LLVM_TOOLCHAIN_C_LIBRARY}-${PACKAGE_VERSION}-${CPACK_SYSTEM_NAME}) - endif() -endif() -set(CPACK_PACKAGE_FILE_NAME ${PACKAGE_FILE_NAME}) - -# Including CPack again after llvm CMakeLists.txt included it -# resets CPACK_PACKAGE_VERSION to the default MAJOR.MINOR.PATCH format. -include(CPack) - -# Ensure LLVM tool symlinks are installed. -list(APPEND CMAKE_MODULE_PATH ${llvmproject_src_dir}/llvm/cmake/modules) -llvm_install_symlink(LLVM llvm-ranlib llvm-ar ALWAYS_GENERATE) -llvm_install_symlink(LLVM llvm-readelf llvm-readobj ALWAYS_GENERATE) -llvm_install_symlink(LLVM llvm-strip llvm-objcopy ALWAYS_GENERATE) - -# Generate VERSION.txt -# Use add_custom_target instead of add_custom_command so that the target -# is always considered out-of-date, ensuring that VERSION.txt will be -# updated when the git revision changes. -add_custom_target( - version_txt - COMMAND - "${CMAKE_COMMAND}" - -DCPULLVMToolchain_VERSION=${CPULLVMToolchain_VERSION} - -DCPULLVMToolchain_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR} - -Dllvmproject_src_dir=${llvmproject_src_dir} - -Deld_SOURCE_DIR=${eld_SOURCE_DIR} - # at most one of picolibc and musl options is needed, but easiest to - # specify both definitions - -Dpicolibc_SOURCE_DIR=${picolibc_SOURCE_DIR} - -Dpicolibc_URL=${picolibc_URL} - -Dmusl-embedded_SOURCE_DIR=${musl-embedded_SOURCE_DIR} - -Dmusl-embedded_URL=${musl-embedded_URL} - # but we do tell the script which library we're actually using - -DLLVM_TOOLCHAIN_C_LIBRARY=${LLVM_TOOLCHAIN_C_LIBRARY} - -Dmusl_SOURCE_DIR=${musl_SOURCE_DIR} - -Dmusl_URL=${musl_URL} - -DENABLE_LINUX_LIBRARIES=${ENABLE_LINUX_LIBRARIES} - -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/generate_version_txt.cmake - BYPRODUCTS ${CMAKE_CURRENT_BINARY_DIR}/VERSION.txt -) -if(NOT (LLVM_TOOLCHAIN_C_LIBRARY STREQUAL picolibc)) - install( - FILES "${CMAKE_CURRENT_BINARY_DIR}/VERSION.txt" - DESTINATION . - RENAME VERSION_${LLVM_TOOLCHAIN_C_LIBRARY}.txt - COMPONENT llvm-toolchain-third-party-licenses - ) -else() - install( - FILES "${CMAKE_CURRENT_BINARY_DIR}/VERSION.txt" - DESTINATION . - COMPONENT llvm-toolchain-third-party-licenses - ) -endif() - - -# Groups all the targets that comprise the toolchain. -add_custom_target(llvm-toolchain ALL) - -# Groups all the runtime targets -add_custom_target(llvm-toolchain-runtimes) - -add_dependencies( - llvm-toolchain - ${LLVM_DISTRIBUTION_COMPONENTS} -) - -add_dependencies( - llvm-toolchain - llvm-toolchain-runtimes - version_txt -) - -# Set LLVM_DEFAULT_EXTERNAL_LIT to the directory of clang -# which was build in previous step. This path is not exported -# by add_subdirectory of llvm project -set(LLVM_DEFAULT_EXTERNAL_LIT "${LLVM_BINARY_DIR}/bin/llvm-lit") - -add_custom_target(check-llvm-toolchain-runtimes) -add_custom_target(check-${LLVM_TOOLCHAIN_C_LIBRARY}) -add_custom_target(check-compiler-rt) -add_custom_target(check-cxx) -add_custom_target(check-cxxabi) -add_custom_target(check-unwind) - -if(NOT PREBUILT_TARGET_LIBRARIES) - if(LIBS_DEPEND_ON_TOOLS) - set(lib_tool_dependencies - clang - lld - ld.eld - llvm-ar - llvm-config - llvm-nm - llvm-ranlib - llvm-strip - ) - endif() - - - add_dependencies( - check-llvm-toolchain-runtimes - check-${LLVM_TOOLCHAIN_C_LIBRARY} - check-compiler-rt - check-cxx - check-cxxabi - check-unwind - ) - - if(LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - # If we're building a non-default libc with the intention of - # installing it as an overlay on the main package archive, then - # all of its includes, libraries and multilib.yaml go in a - # subdirectory of lib/clang-runtimes. Configuration files in the - # bin directory will make it easy to reset the sysroot to point at - # that subdir. - set(library_subdir "/${LLVM_TOOLCHAIN_C_LIBRARY}") - else() - set(library_subdir "") - endif() - - if(LIBS_USE_COMPILER_LAUNCHER) - if(CMAKE_C_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER}") - endif() - if(CMAKE_CXX_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER}") - endif() - endif() - - # ENABLE_VARIANTS expects a semi-colon separated list. - # To prevent CMake expanding it automatically while passing it - # down, switch to comma separated. Enabling the ExternalProject - # LIST_SEPARATOR option will handle switching it back. - string(REPLACE ";" "," ENABLE_VARIANTS_PASSTHROUGH "${LLVM_TOOLCHAIN_LIBRARY_VARIANTS}") - - set(multilib_prefix ${CMAKE_BINARY_DIR}/multilib-builds) - - ExternalProject_Add( - multilib-${LLVM_TOOLCHAIN_C_LIBRARY} - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/embedded-multilib - STAMP_DIR ${multilib_prefix}/multilib/${LLVM_TOOLCHAIN_C_LIBRARY}-stamp - BINARY_DIR ${multilib_prefix}/multilib/${LLVM_TOOLCHAIN_C_LIBRARY}-build - DOWNLOAD_DIR ${multilib_prefix}/multilib/${LLVM_TOOLCHAIN_C_LIBRARY}-dl - TMP_DIR ${multilib_prefix}/multilib/${LLVM_TOOLCHAIN_C_LIBRARY}-tmp - INSTALL_DIR ${CMAKE_CURRENT_BINARY_DIR}/llvm/${TARGET_LIBRARIES_DIR}${library_subdir} - DEPENDS ${lib_tool_dependencies} - CMAKE_ARGS - ${compiler_launcher_cmake_args} - -DC_LIBRARY=${LLVM_TOOLCHAIN_C_LIBRARY} - -DLLVM_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}/llvm - -DMULTILIB_JSON=${LLVM_TOOLCHAIN_MULTILIB_JSON} - -DENABLE_VARIANTS=${ENABLE_VARIANTS_PASSTHROUGH} - -DENABLE_PARALLEL_LIB_CONFIG=${ENABLE_PARALLEL_LIB_CONFIG} - -DENABLE_PARALLEL_LIB_BUILD=${ENABLE_PARALLEL_LIB_BUILD} - -DPARALLEL_LIB_BUILD_LEVELS=${PARALLEL_LIB_BUILD_LEVELS} - -DENABLE_QEMU_TESTING=${ENABLE_QEMU_TESTING} - -DFETCHCONTENT_SOURCE_DIR_PICOLIBC=${FETCHCONTENT_SOURCE_DIR_PICOLIBC} - -DFETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED=${FETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED} - -DCMAKE_INSTALL_PREFIX= - -DPROJECT_PREFIX=${multilib_prefix} - -DNUMERICAL_BUILD_NAMES=${SHORT_BUILD_PATHS} - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - LIST_SEPARATOR , - CONFIGURE_HANDLED_BY_BUILD TRUE - TEST_EXCLUDE_FROM_MAIN TRUE - STEP_TARGETS build install - ) - - add_dependencies( - llvm-toolchain-runtimes - multilib-${LLVM_TOOLCHAIN_C_LIBRARY}-install - ) - - foreach(check_target check-${LLVM_TOOLCHAIN_C_LIBRARY} check-compiler-rt check-cxx check-cxxabi check-unwind) - ExternalProject_Add_Step( - multilib-${LLVM_TOOLCHAIN_C_LIBRARY} - ${check_target} - COMMAND "${CMAKE_COMMAND}" --build --target ${check_target} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - ) - ExternalProject_Add_StepTargets(multilib-${LLVM_TOOLCHAIN_C_LIBRARY} ${check_target}) - ExternalProject_Add_StepDependencies( - multilib-${LLVM_TOOLCHAIN_C_LIBRARY} - ${check_target} - multilib-${LLVM_TOOLCHAIN_C_LIBRARY}-install - ) - add_dependencies(${check_target} multilib-${LLVM_TOOLCHAIN_C_LIBRARY}-${check_target}) - endforeach() - - # Read the json to generate variant specific target names for convenience. - file(READ ${LLVM_TOOLCHAIN_MULTILIB_JSON} multilib_json_str) - string(JSON multilib_defs GET ${multilib_json_str} "libs") - - string(JSON lib_count LENGTH ${multilib_defs}) - math(EXPR lib_count_dec "${lib_count} - 1") - - foreach(lib_idx RANGE ${lib_count_dec}) - string(JSON lib_def GET ${multilib_defs} ${lib_idx}) - string(JSON variant GET ${lib_def} "variant") - foreach(check_target check-${LLVM_TOOLCHAIN_C_LIBRARY} check-compiler-rt check-cxx check-cxxabi check-unwind) - ExternalProject_Add_Step( - multilib-${LLVM_TOOLCHAIN_C_LIBRARY} - ${check_target}-${variant} - COMMAND "${CMAKE_COMMAND}" --build --target ${check_target}-${variant} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - ) - ExternalProject_Add_StepTargets(multilib-${LLVM_TOOLCHAIN_C_LIBRARY} ${check_target}-${variant}) - ExternalProject_Add_StepDependencies( - multilib-${LLVM_TOOLCHAIN_C_LIBRARY} - ${check_target}-${variant} - multilib-${LLVM_TOOLCHAIN_C_LIBRARY}-install - ) - add_custom_target(${check_target}-${variant}) - add_dependencies(${check_target}-${variant} multilib-${LLVM_TOOLCHAIN_C_LIBRARY}-${check_target}-${variant}) - endforeach() - endforeach() - - # Linux - if(ENABLE_LINUX_LIBRARIES) - set(linux_library_prefix ${CMAKE_BINARY_DIR}/linux-library-builds) - ExternalProject_Add( - linux-libraries - STAMP_DIR ${linux_library_prefix}/stamp - BINARY_DIR ${linux_library_prefix}/build - DOWNLOAD_DIR ${linux_library_prefix}/dl - TMP_DIR ${linux_library_prefix}/tmp - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/scripts - INSTALL_DIR ${CMAKE_CURRENT_BINARY_DIR}/llvm/linux-libraries - DEPENDS ${lib_tool_dependencies} - CONFIGURE_COMMAND "" - BUILD_COMMAND - /build_linux_runtimes.sh - --tools-path ${CMAKE_CURRENT_BINARY_DIR}/llvm/bin - --base-build-dir - --base-install-dir - --llvm-src-dir ${llvmproject_src_dir} - --musl-src-dir ${musl_SOURCE_DIR} - --musl-emb-src-dir ${musl-embedded_SOURCE_DIR} - --download-dir - INSTALL_COMMAND "" - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - USES_TERMINAL_INSTALL TRUE - STEP_TARGETS build install - ) - - add_dependencies( - llvm-toolchain-runtimes - linux-libraries-install - ) - endif() -endif() - -FILE(WRITE "${CMAKE_BINARY_DIR}/llvm/${TARGET_CFI_DIR}/cfi_ignorelist.txt" "") -install( - FILES - ${CMAKE_BINARY_DIR}/llvm/${TARGET_CFI_DIR}/cfi_ignorelist.txt - DESTINATION ${TARGET_CFI_DIR} - COMPONENT llvm-toolchain-libs -) - -install( - DIRECTORY ${LLVM_BINARY_DIR}/${TARGET_LIBRARIES_DIR}/. - DESTINATION ${TARGET_LIBRARIES_DIR} - COMPONENT llvm-toolchain-libs -) - -if(ENABLE_LINUX_LIBRARIES) - # libc, libc++/libc++abi/libunwind only - install( - DIRECTORY ${LLVM_BINARY_DIR}/linux-libraries/. - DESTINATION . - COMPONENT llvm-toolchain-libs - REGEX "resource-dir" EXCLUDE - ) - # compiler-rt - install( - DIRECTORY ${LLVM_BINARY_DIR}/linux-libraries/resource-dir/. - DESTINATION lib/clang/${LLVM_VERSION_MAJOR} - COMPONENT llvm-toolchain-libs - ) -endif() - -install( - FILES CHANGELOG.md LICENSE.txt README.md - DESTINATION . - COMPONENT llvm-toolchain-docs -) - -configure_file(cmake/THIRD-PARTY-LICENSES.txt.in THIRD-PARTY-LICENSES.txt) - -if(NOT LLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL) - set(third_party_license_summary_install_dir .) - set(third_party_license_files_install_dir third-party-licenses) -else() - # If we're building an overlay archive, put all the license files - # one level down in third-party-licenses/, so that - # COPYING.NEWLIB doesn't collide with the file of the same name - # from picolibc, and the LLVM license files are also duplicated - # (in case the overlay archive is used with a non-matching version - # of the main toolchain). - set(third_party_license_summary_install_dir third-party-licenses/${LLVM_TOOLCHAIN_C_LIBRARY}) - set(third_party_license_files_install_dir third-party-licenses/${LLVM_TOOLCHAIN_C_LIBRARY}) -endif() - -install( - FILES ${CMAKE_CURRENT_BINARY_DIR}/THIRD-PARTY-LICENSES.txt - DESTINATION ${third_party_license_summary_install_dir} - COMPONENT llvm-toolchain-third-party-licenses -) - -set(third_party_license_files - ${llvmproject_src_dir}/llvm/LICENSE.TXT LLVM-LICENSE.txt - ${llvmproject_src_dir}/clang/LICENSE.TXT CLANG-LICENSE.txt - ${llvmproject_src_dir}/lld/LICENSE.TXT LLD-LICENSE.txt - ${llvmproject_src_dir}/compiler-rt/LICENSE.TXT COMPILER-RT-LICENSE.txt - ${llvmproject_src_dir}/libcxx/LICENSE.TXT LIBCXX-LICENSE.txt - ${llvmproject_src_dir}/libcxxabi/LICENSE.TXT LIBCXXABI-LICENSE.txt - ${llvmproject_src_dir}/libunwind/LICENSE.TXT LIBUNWIND-LICENSE.txt -) - -list(APPEND third_party_license_files ${eld_SOURCE_DIR}/LICENSE eld-LICENSE.txt) -list(APPEND third_party_license_files - ${CMAKE_CURRENT_LIST_DIR}/ATfE-LICENSE.txt ATfE-LICENSE.txt -) - -if(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL picolibc) - list(APPEND third_party_license_files - ${picolibc_SOURCE_DIR}/COPYING.NEWLIB COPYING.NEWLIB - ${picolibc_SOURCE_DIR}/COPYING.picolibc COPYING.picolibc - ) -elseif(LLVM_TOOLCHAIN_C_LIBRARY STREQUAL musl-embedded) - list(APPEND third_party_license_files - ${musl-embedded_SOURCE_DIR}/COPYRIGHT musl-embedded-COPYRIGHT.txt - ) -endif() -if(ENABLE_LINUX_LIBRARIES) - list(APPEND third_party_license_files - ${musl_SOURCE_DIR}/COPYRIGHT musl-COPYRIGHT.txt) - - # We added this above if musl-embedded was selected. - if(NOT LLVM_TOOLCHAIN_C_LIBRARY STREQUAL musl-embedded) - list(APPEND third_party_license_files - ${musl-embedded_SOURCE_DIR}/COPYRIGHT musl-embedded-COPYRIGHT.txt) - endif() -endif() - -while(third_party_license_files) - list(POP_FRONT third_party_license_files source_file destination_name) - install( - FILES ${source_file} - DESTINATION ${third_party_license_files_install_dir} - COMPONENT llvm-toolchain-third-party-licenses - RENAME ${destination_name} - ) -endwhile() - -# LLVM-style install -# To use it: -# ninja install-llvm-toolchain -add_custom_target( - install-llvm-toolchain -) -foreach(component ${LLVM_TOOLCHAIN_DISTRIBUTION_COMPONENTS}) - add_custom_target( - install-${component} - COMMAND - "${CMAKE_COMMAND}" - --install ${CMAKE_BINARY_DIR} - --component ${component} - USES_TERMINAL - ) - add_dependencies( - install-${component} - llvm-toolchain - ) - add_dependencies( - install-llvm-toolchain - install-${component} - ) -endforeach() -# Also run install-distribution to install the LLVM -# binaries. -add_dependencies( - install-llvm-toolchain - install-distribution -) - -# package-llvm-toolchain - target to create package -if(WIN32) - set(cpack_generator ZIP) - set(package_filename_extension ".zip") -else() - set(cpack_generator TXZ) - set(package_filename_extension ".tar.xz") -endif() -set(package_filepath ${CMAKE_BINARY_DIR}/${PACKAGE_FILE_NAME}${package_filename_extension}) -add_custom_command( - OUTPUT ${package_filepath} - COMMAND "${CMAKE_COMMAND}" -E rm -f ${package_filepath} - COMMAND cpack -G ${cpack_generator} - DEPENDS llvm-toolchain - USES_TERMINAL - WORKING_DIRECTORY ${CMAKE_BINARY_DIR} -) -add_custom_target( - package-llvm-toolchain - DEPENDS ${package_filepath} -) - -add_custom_target(check-llvm-toolchain) -add_dependencies(check-llvm-toolchain check-${LLVM_TOOLCHAIN_C_LIBRARY}) -add_dependencies(check-llvm-toolchain check-compiler-rt) -add_subdirectory(test) -add_dependencies(check-llvm-toolchain check-llvm-toolchain-lit) - -add_custom_target(check-all-llvm-toolchain) - -add_dependencies( - check-all-llvm-toolchain - check-all - check-llvm-toolchain - check-llvm-toolchain-runtimes -) diff --git a/qualcomm-software/LICENSE.txt b/qualcomm-software/LICENSE.txt deleted file mode 100644 index fa6ac5400070..000000000000 --- a/qualcomm-software/LICENSE.txt +++ /dev/null @@ -1,279 +0,0 @@ -============================================================================== -The LLVM Project is under the Apache License v2.0 with LLVM Exceptions: -============================================================================== - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - - ----- LLVM Exceptions to the Apache 2.0 License ---- - -As an exception, if, as a result of your compiling your source code, portions -of this Software are embedded into an Object form of such source code, you -may redistribute such embedded portions in such Object form without complying -with the conditions of Sections 4(a), 4(b) and 4(d) of the License. - -In addition, if you combine or link compiled forms of this Software with -software that is licensed under the GPLv2 ("Combined Software") and if a -court of competent jurisdiction determines that the patent provision (Section -3), the indemnity provision (Section 9) or other Section of the License -conflicts with the conditions of the GPLv2, you may retroactively and -prospectively choose to deem waived or otherwise exclude such Section(s) of -the License, but only in their entirety and only with respect to the Combined -Software. - -============================================================================== -Software from third parties included in the LLVM Project: -============================================================================== -The LLVM Project contains third party software which is under different license -terms. All such code will be identified clearly using at least one of two -mechanisms: -1) It will be in a separate directory tree with its own `LICENSE.txt` or - `LICENSE` file at the top containing the specific license and restrictions - which apply to that software, or -2) It will contain specific license and restriction terms at the top of every - file. - -============================================================================== -Legacy LLVM License (https://llvm.org/docs/DeveloperPolicy.html#legacy): -============================================================================== -University of Illinois/NCSA -Open Source License - -Copyright (c) 2003-2019 University of Illinois at Urbana-Champaign. -All rights reserved. - -Developed by: - - LLVM Team - - University of Illinois at Urbana-Champaign - - http://llvm.org - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal with -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the - documentation and/or other materials provided with the distribution. - - * Neither the names of the LLVM Team, University of Illinois at - Urbana-Champaign, nor the names of its contributors may be used to - endorse or promote products derived from this Software without specific - prior written permission. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE -SOFTWARE. - diff --git a/qualcomm-software/cmake/.gitattributes b/qualcomm-software/cmake/.gitattributes deleted file mode 100644 index 6ba136211d41..000000000000 --- a/qualcomm-software/cmake/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# During git archive, replace $Format:%H$ with the commit hash. -generate_version_txt.cmake export-subst diff --git a/qualcomm-software/cmake/THIRD-PARTY-LICENSES.txt.in b/qualcomm-software/cmake/THIRD-PARTY-LICENSES.txt.in deleted file mode 100644 index 60eb03ef8a84..000000000000 --- a/qualcomm-software/cmake/THIRD-PARTY-LICENSES.txt.in +++ /dev/null @@ -1,16 +0,0 @@ -This product embeds and uses the following pieces of software which have -additional or alternate licenses: - - LLVM: third-party-licenses/LLVM-LICENSE.txt - - Clang: third-party-licenses/CLANG-LICENSE.txt - - lld: third-party-licenses/LLD-LICENSE.txt - - compiler-rt: third-party-licenses/COMPILER-RT-LICENSE.txt - - libc++: third-party-licenses/LIBCXX-LICENSE.txt - - libc++abi: third-party-licenses/LIBCXXABI-LICENSE.txt - - libunwind: third-party-licenses/LIBUNWIND-LICENSE.txt - - Picolibc: third-party-licenses/COPYING.NEWLIB, third-party-licenses/COPYING.picolibc - - eld: third-party-licenses/eld-LICENSE.txt - - musl-embedded: third-party-licenses/musl-embedded-COPYRIGHT.txt - - musl: third-party-licenses/musl-COPYRIGHT.txt - - Arm Toolchain for Embedded: third-party-licenses/ATfE-LICENSE.txt - -Picolibc licenses refer to its source files. Sources are identified in VERSION.txt. diff --git a/qualcomm-software/cmake/VERSION.txt.in b/qualcomm-software/cmake/VERSION.txt.in deleted file mode 100644 index 314b3c0ec476..000000000000 --- a/qualcomm-software/cmake/VERSION.txt.in +++ /dev/null @@ -1,7 +0,0 @@ -CPULLVM Toolchain ${cpullvm_VERSION} - -Sources: -* cpullvm: https://github.com/qualcomm/cpullvm-toolchain (commit ${cpullvm_COMMIT}) -* eld: ${eld_URL} (commit ${eld_COMMIT}) -* ${LLVM_TOOLCHAIN_C_LIBRARY}: ${LLVM_TOOLCHAIN_C_LIBRARY_URL} (commit ${LLVM_TOOLCHAIN_C_LIBRARY_COMMIT}) -${musl_version_string}${musl-embedded_version_string} \ No newline at end of file diff --git a/qualcomm-software/cmake/copy_target_libraries.py b/qualcomm-software/cmake/copy_target_libraries.py deleted file mode 100755 index fcc2e7fa9375..000000000000 --- a/qualcomm-software/cmake/copy_target_libraries.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 - -""" -Script to copy target libraries into the build tree. -Building libraries can take a very long time on some platforms so -building them on another platform and copying them in can be a big -time saver. -""" - -import argparse -import glob -import os -import shutil -import tarfile -import tempfile - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--distribution-file", - required=True, - help="""Copy from this distribution tarfile. This is a glob to make - things easier on Windows.""", - ) - parser.add_argument( - "--build-dir", - required=True, - help="The build root directory to copy into", - ) - args = parser.parse_args() - - # Find the distribution. This is a glob because scripts may not - # know the version number and we can't rely on the Windows shell to - # do it. - for distribution_file in glob.glob(args.distribution_file): - break - else: - raise RuntimeError(f"Distribution glob '{args.distribution_file}' not found") - - lib_dir = os.path.join(args.build_dir, "llvm", "lib") - os.makedirs(lib_dir, exist_ok=True) - - destination = os.path.join(lib_dir, "clang-runtimes") - - if os.path.isdir(destination): - shutil.rmtree(destination) - - with tempfile.TemporaryDirectory( - dir=args.build_dir, - ) as tmp: - # Extract the distribution package. - with tarfile.open(distribution_file) as tf: - tf.extractall(tmp) - - # Find the clang-runtimes directory in the extracted package - # directory. - for clang_runtimes in glob.glob( - os.path.join(tmp, "*", "lib", "clang-runtimes") - ): - break - else: - raise RuntimeError("Extracted distribution directory not found") - - # Move the directory containing the target libraries into - # position. The rest of the files in the distribution folder - # will be deleted automatically when the tmp object goes out of - # scope. - shutil.move(clang_runtimes, lib_dir) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/cmake/fetch_eld.cmake b/qualcomm-software/cmake/fetch_eld.cmake deleted file mode 100644 index 61e900eda1da..000000000000 --- a/qualcomm-software/cmake/fetch_eld.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# To avoid duplicating the FetchContent code, this file can be -# included by either the top-level toolchain cmake, or the -# embedded-runtimes sub-project. -# FETCHCONTENT_SOURCE_DIR_ELD should be passed down from the -# top level to any library builss to prevent repeated checkouts. - -include(FetchContent) -include(${CMAKE_CURRENT_LIST_DIR}/patch_repo.cmake) - -if(NOT VERSIONS_JSON) - include(${CMAKE_CURRENT_LIST_DIR}/read_versions.cmake) -endif() -read_repo_version(eld eld) -get_patch_command(${CMAKE_CURRENT_LIST_DIR}/.. eld eld_patch_command) - -FetchContent_Declare(eld - GIT_REPOSITORY "${eld_URL}" - GIT_TAG "${eld_TAG}" - GIT_SHALLOW "${eld_SHALLOW}" - GIT_PROGRESS TRUE - PATCH_COMMAND ${eld_patch_command} - # We only want to download the content, not configure it at this - # stage. eld will be built as part of LLVM using the sources checked - # out here. - SOURCE_SUBDIR do_not_add_eld_subdir -) -FetchContent_MakeAvailable(eld) -FetchContent_GetProperties(eld SOURCE_DIR FETCHCONTENT_SOURCE_DIR_ELD) diff --git a/qualcomm-software/cmake/fetch_musl-embedded.cmake b/qualcomm-software/cmake/fetch_musl-embedded.cmake deleted file mode 100644 index 72fa4d036c18..000000000000 --- a/qualcomm-software/cmake/fetch_musl-embedded.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# To avoid duplicating the FetchContent code, this file can be -# included by either the top-level toolchain cmake, or the -# embedded-runtimes sub-project. -# FETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED should be passed down from the -# top level to any library builds to prevent repeated checkouts. - -include(FetchContent) -include(${CMAKE_CURRENT_LIST_DIR}/patch_repo.cmake) - -if(NOT VERSIONS_JSON) - include(${CMAKE_CURRENT_LIST_DIR}/read_versions.cmake) -endif() -read_repo_version(musl-embedded musl-embedded) -get_patch_command(${CMAKE_CURRENT_LIST_DIR}/.. musl-embedded musl-embedded_patch_command) - -FetchContent_Declare(musl-embedded - GIT_REPOSITORY "${musl-embedded_URL}" - GIT_TAG "${musl-embedded_TAG}" - GIT_SHALLOW "${musl-embedded_SHALLOW}" - GIT_PROGRESS TRUE - PATCH_COMMAND ${musl-embedded_patch_command} - # We only want to download the content, not configure it at this - # stage. musl-embedded will be built in many configurations using - # ExternalProject_Add using the sources that are checked out here. - SOURCE_SUBDIR do_not_add_musl-embedded_subdir -) -FetchContent_MakeAvailable(musl-embedded) -FetchContent_GetProperties(musl-embedded SOURCE_DIR FETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED) diff --git a/qualcomm-software/cmake/fetch_musl.cmake b/qualcomm-software/cmake/fetch_musl.cmake deleted file mode 100644 index 38e615a75ee6..000000000000 --- a/qualcomm-software/cmake/fetch_musl.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# To avoid duplicating the FetchContent code, this file can be -# included by either the top-level toolchain cmake, or the -# runtimes sub-project. -# FETCHCONTENT_SOURCE_DIR_MUSL should be passed down from the -# top level to any library builds to prevent repeated checkouts. - -include(FetchContent) -include(${CMAKE_CURRENT_LIST_DIR}/patch_repo.cmake) - -if(NOT VERSIONS_JSON) - include(${CMAKE_CURRENT_LIST_DIR}/read_versions.cmake) -endif() -read_repo_version(musl musl) -get_patch_command(${CMAKE_CURRENT_LIST_DIR}/.. musl musl_patch_command) - -FetchContent_Declare(musl - GIT_REPOSITORY "${musl_URL}" - GIT_TAG "${musl_TAG}" - GIT_SHALLOW "${musl_SHALLOW}" - GIT_PROGRESS TRUE - PATCH_COMMAND ${musl_patch_command} - # We only want to download the content, not configure it at this - # stage. musl will be built in many configurations using - # ExternalProject_Add using the sources that are checked out here. - SOURCE_SUBDIR do_not_add_musl_subdir -) -FetchContent_MakeAvailable(musl) -FetchContent_GetProperties(musl SOURCE_DIR FETCHCONTENT_SOURCE_DIR_MUSL) diff --git a/qualcomm-software/cmake/fetch_picolibc.cmake b/qualcomm-software/cmake/fetch_picolibc.cmake deleted file mode 100644 index 0c3ab4fe439e..000000000000 --- a/qualcomm-software/cmake/fetch_picolibc.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# To avoid duplicating the FetchContent code, this file can be -# included by either the top-level toolchain cmake, or the -# embedded-runtimes sub-project. -# FETCHCONTENT_SOURCE_DIR_PICOLIBC should be passed down from the -# top level to any library builss to prevent repeated checkouts. - -include(FetchContent) -include(${CMAKE_CURRENT_LIST_DIR}/patch_repo.cmake) - -if(NOT VERSIONS_JSON) - include(${CMAKE_CURRENT_LIST_DIR}/read_versions.cmake) -endif() -read_repo_version(picolibc picolibc) -get_patch_command(${CMAKE_CURRENT_LIST_DIR}/.. picolibc picolibc_patch_command) - -FetchContent_Declare(picolibc - GIT_REPOSITORY "${picolibc_URL}" - GIT_TAG "${picolibc_TAG}" - GIT_SHALLOW "${picolibc_SHALLOW}" - GIT_PROGRESS TRUE - PATCH_COMMAND ${picolibc_patch_command} - # We only want to download the content, not configure it at this - # stage. picolibc will be built in many configurations using - # ExternalProject_Add using the sources that are checked out here. - SOURCE_SUBDIR do_not_add_picolibc_subdir -) -FetchContent_MakeAvailable(picolibc) -FetchContent_GetProperties(picolibc SOURCE_DIR FETCHCONTENT_SOURCE_DIR_PICOLIBC) diff --git a/qualcomm-software/cmake/generate_version_txt.cmake b/qualcomm-software/cmake/generate_version_txt.cmake deleted file mode 100644 index a7f229a23a58..000000000000 --- a/qualcomm-software/cmake/generate_version_txt.cmake +++ /dev/null @@ -1,48 +0,0 @@ -# The following line will look different depending on how you got this -# source file. If you got it from a Git repository then it will contain -# a string in the git pretty format with dollar symbols. If you got it -# from a source archive then the `git archive` command should have -# replaced the format string with the Git revision at the time the -# archive was created. This is configured in the .gitattributes file. -# In the former case, this script will run a Git command to find out the -# current revision. In the latter case the revision will be used as is. -set(cpullvm_COMMIT "$Format:%H$") - -function(get_commit_from_dir source_dir commit) - execute_process( - COMMAND git -C ${source_dir} rev-parse HEAD - OUTPUT_VARIABLE temp_commit - OUTPUT_STRIP_TRAILING_WHITESPACE - COMMAND_ERROR_IS_FATAL ANY - ) - set(${commit} ${temp_commit} PARENT_SCOPE) -endfunction() - -if(NOT ${cpullvm_COMMIT} MATCHES "^[a-f0-9]+$") - get_commit_from_dir("${CPULLVMToolchain_SOURCE_DIR}" cpullvm_COMMIT) -endif() - -get_commit_from_dir("${eld_SOURCE_DIR}" eld_COMMIT) - -if(ENABLE_LINUX_LIBRARIES) - get_commit_from_dir("${musl_SOURCE_DIR}" musl_COMMIT) - set(musl_version_string "* musl: ${musl_URL} (commit ${musl_COMMIT})\n") - - if(NOT LLVM_TOOLCHAIN_C_LIBRARY STREQUAL musl-embedded) - get_commit_from_dir("${musl-embedded_SOURCE_DIR}" musl-embedded_COMMIT) - set(musl-embedded_version_string "* musl-embedded: ${musl-embedded_URL} (commit ${musl-embedded_COMMIT})\n") - endif() -endif() - -# Supported libcs are all in a separate repo -set(base_library ${LLVM_TOOLCHAIN_C_LIBRARY}) - -get_commit_from_dir("${${base_library}_SOURCE_DIR}" ${base_library}_COMMIT) - -set(LLVM_TOOLCHAIN_C_LIBRARY_URL ${${base_library}_URL}) -set(LLVM_TOOLCHAIN_C_LIBRARY_COMMIT ${${base_library}_COMMIT}) - -configure_file( - ${CMAKE_CURRENT_LIST_DIR}/VERSION.txt.in - ${CMAKE_CURRENT_BINARY_DIR}/VERSION.txt -) diff --git a/qualcomm-software/cmake/get_canonical_riscv_march.cmake b/qualcomm-software/cmake/get_canonical_riscv_march.cmake deleted file mode 100644 index 3413a5b450fa..000000000000 --- a/qualcomm-software/cmake/get_canonical_riscv_march.cmake +++ /dev/null @@ -1,25 +0,0 @@ -# Retrieve the canonical `-march` string given a set of valid compiler flags. -# -# We require the following arguments: -# - `compiler_path` is expected to contain the path to the compiler to use. -# - `build_args` is expected to be a CMake list (';' separated) of the compiler -# commands to use (typically `--target=`, `-march=`, etc.). These arguments -# are expected to all be valid and be sufficient to determine the correct set -# of extensions. -# - `march_out` should contain the variable to be used to return the -# canonicalized arch string. -function(get_canonical_riscv_march compiler_path build_args march_out) - set(command_args ${build_args} "--print-enabled-extensions") - execute_process( - COMMAND ${compiler_path} - ${command_args} - RESULT_VARIABLE return_val - OUTPUT_VARIABLE extension_output - ) - if(NOT return_val EQUAL 0) - message(FATAL_ERROR "Unable to execute `--print-enabled-extensions` to retreive canonical `-march` string") - endif() - string(REGEX MATCH - "ISA String: ([A-Za-z0-9_]+)" out_var "${extension_output}") - set(${march_out} ${CMAKE_MATCH_1} PARENT_SCOPE) -endfunction() diff --git a/qualcomm-software/cmake/patch_repo.cmake b/qualcomm-software/cmake/patch_repo.cmake deleted file mode 100644 index c81a76bd9948..000000000000 --- a/qualcomm-software/cmake/patch_repo.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -# Function to generate a PATCH_COMMAND, calling the -# patch_repo.py script using a target set of patches. - -function(get_patch_command toolchain_root patch_dir patch_command_out) - set(patch_script ${toolchain_root}/cmake/patch_repo.py) - list(APPEND patch_script_args ${Python3_EXECUTABLE} ${patch_script}) - if(GIT_PATCH_METHOD STREQUAL "am") - list(APPEND patch_script_args "--method" "am") - elseif(GIT_PATCH_METHOD STREQUAL "apply") - list(APPEND patch_script_args "--method" "apply") - endif() - list(APPEND patch_script_args ${toolchain_root}/patches/${patch_dir}) - - set(${patch_command_out} ${patch_script_args} PARENT_SCOPE) -endfunction() diff --git a/qualcomm-software/cmake/patch_repo.py b/qualcomm-software/cmake/patch_repo.py deleted file mode 100644 index 035d8173e83c..000000000000 --- a/qualcomm-software/cmake/patch_repo.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python3 - -""" -Script to apply a set of patches to a git repository. -""" - -import argparse -import os -import pathlib -import subprocess -import sys - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "patchdir", - help="Set of patches to apply. This should be a directory containing one or more ordered *.patch files.", - ) - parser.add_argument( - "--repo_dir", - help="Directory of the git checkout, if not the current directory.", - ) - parser.add_argument( - "--method", - choices=["am", "apply"], - default="apply", - help="Git command to use. git am will add each patch as a commit, whereas git apply will leave patched changes staged.", - ) - parser.add_argument( - "--reset", - help="Clean and hard reset the repo to a specified commit before patching.", - ) - parser.add_argument( - "--restore_on_fail", - action="store_true", - help="If a patch in a series cannot be applied, restore the original state instead of leaving patches missing. Return code will be 2 instead of 1.", - ) - parser.add_argument( - "--3way", - action="store_true", - dest="three_way", - help="If the patch does not apply cleanly, fall back on 3-way merge.", - ) - args = parser.parse_args() - - # If the patch is valid but contain conflicts, using --3way --apply can apply - # the patch but leave conflict markers in the source for the user to resolve. - # This doesn't return an error code, making it compatible with this script's - # --restore_on_fail option, which relies on the error code from running --check. - if args.method == "apply" and args.restore_on_fail and args.three_way: - print("--restore_on_fail is incompatible with --3way using apply") - exit(1) - - if args.repo_dir: - git_cmd = ["git", "-C", args.repo_dir] - else: - git_cmd = ["git"] - - if args.reset: - reset_args = git_cmd + ["reset", "--quiet", "--hard", args.reset] - subprocess.check_output(reset_args) - clean_args = git_cmd + ["clean", "--quiet", "--force", "-dx", args.reset] - subprocess.check_output(clean_args) - - abs_patch_dir = os.path.abspath(args.patchdir) - patch_list = list(pathlib.Path(abs_patch_dir).glob("*.patch")) - patch_list.sort() - - if len(patch_list) == 0: - print(f"Found no patches to apply.") - else: - print(f"Found {len(patch_list)} patches to apply:") - print("\n".join(p.name for p in patch_list)) - - if args.method == "am": - merge_args = git_cmd + ["am", "-k", "--ignore-whitespace"] - if args.three_way: - merge_args.append("--3way") - for patch in patch_list: - merge_args.append(str(patch)) - p = subprocess.run(merge_args, capture_output=True, text=True) - print(p.stdout) - print(p.stderr) - - if p.returncode == 0: - print(f"All patches applied.") - sys.exit(0) - if args.restore_on_fail: - # Check that the operation can be aborted. - # git am doesn't give any specific return codes, - # so check for unresolved working files. - rebase_apply_path = os.path.join(".git", "rebase-apply") - if args.repo_dir: - rebase_apply_path = os.path.join(args.repo_dir, rebase_apply_path) - if os.path.isdir(rebase_apply_path): - print("Aborting git am...") - subprocess.run(git_cmd + ["am", "--abort"], check=True) - print(f"Abort successful.") - sys.exit(2) - else: - print("Unable to abort.") - sys.exit(1) - else: - applied_patches = [] - for current_patch in patch_list: - print(f"Checking {current_patch.name}...") - # Check that the patch applies before trying to apply it. - apply_check_args = git_cmd + [ - "apply", - "--ignore-whitespace", - "--check", - ] - if args.three_way: - apply_check_args.append("--3way") - apply_check_args.append(str(current_patch)) - p_check = subprocess.run(apply_check_args) - - if p_check.returncode == 0: - # Patch will apply. - print(f"Applying {current_patch.name}...") - apply_args = git_cmd + [ - "apply", - "--ignore-whitespace", - ] - if args.three_way: - apply_args.append("--3way") - apply_args.append(str(current_patch)) - p = subprocess.run(apply_args, check=True) - applied_patches.append(current_patch) - else: - # Patch won't apply. - print(f"Unable to apply {current_patch.name}") - if args.restore_on_fail: - # Remove any patches that have already been applied. - while len(applied_patches) > 0: - previous_patch = applied_patches.pop() - print(f"Reversing {previous_patch.name}...") - reverse_args = git_cmd + [ - "apply", - "--ignore-whitespace", - "--reverse", - ] - if args.three_way: - reverse_args.append("--3way") - reverse_args.append(str(previous_patch)) - p_check = subprocess.run(reverse_args, check=True) - print( - f"Rollback successful, failure occured on {current_patch.name}" - ) - sys.exit(2) - sys.exit(1) - print(f"All patches applied.") - - -main() diff --git a/qualcomm-software/cmake/read_versions.cmake b/qualcomm-software/cmake/read_versions.cmake deleted file mode 100644 index c100b112e3be..000000000000 --- a/qualcomm-software/cmake/read_versions.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# Read which revisions of the repos to use. -file(READ ${CMAKE_CURRENT_LIST_DIR}/../versions.json VERSIONS_JSON) -function(read_repo_version output_variable_prefix repo) - string(JSON url GET ${VERSIONS_JSON} "repos" "${repo}" "url") - string(JSON tag GET ${VERSIONS_JSON} "repos" "${repo}" "tag") - string(JSON tagType GET ${VERSIONS_JSON} "repos" "${repo}" "tagType") - if(tagType STREQUAL "commithash") - # GIT_SHALLOW doesn't work with commit hashes. - set(shallow OFF) - elseif(tagType STREQUAL "branch") - set(shallow ON) - # CMake docs recommend that "branch names and tags should - # generally be specified as remote names" - set(tag "origin/${tag}") - elseif(tagType STREQUAL "tag") - set(shallow ON) - else() - message(FATAL_ERROR "Unrecognised tagType ${tagType}") - endif() - - set(${output_variable_prefix}_URL "${url}" PARENT_SCOPE) - set(${output_variable_prefix}_TAG "${tag}" PARENT_SCOPE) - set(${output_variable_prefix}_SHALLOW "${shallow}" PARENT_SCOPE) -endfunction() diff --git a/qualcomm-software/embedded-multilib/CMakeLists.txt b/qualcomm-software/embedded-multilib/CMakeLists.txt deleted file mode 100644 index a33b242b8554..000000000000 --- a/qualcomm-software/embedded-multilib/CMakeLists.txt +++ /dev/null @@ -1,420 +0,0 @@ -# -# Copyright (c) 2024-2025, Arm Limited and affiliates. -# -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# - -# CMake build for a multilib layout of library variants, with each -# variant in a subdirectory and a multilib.yaml file to map flags to -# a variant. - -cmake_minimum_required(VERSION 3.20) - -project(embedded-multilib) - -# Root directory of the CMake scripts. -set(TOOLCHAIN_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/..) -# Root directory of the llvm-project source. -set(llvmproject_src_dir ${TOOLCHAIN_SOURCE_DIR}/..) - -# Cache variables to be set by user -set(MULTILIB_JSON "" CACHE STRING "JSON file to load library definitions from.") -set(ENABLE_VARIANTS "all" CACHE STRING "Semicolon separated list of variants to build, or \"all\". Must match entries in the json.") -set(C_LIBRARY "picolibc" CACHE STRING "Which C library to use.") -set_property(CACHE C_LIBRARY PROPERTY STRINGS picolibc musl-embedded) -set(PROJECT_PREFIX "${CMAKE_BINARY_DIR}/lib-builds" CACHE STRING "Directory to build subprojects in.") -option( - NUMERICAL_BUILD_NAMES - "Instead of using the full variant name to label build directories, use an index number. This may help shorten paths." - OFF -) -set(LLVM_BINARY_DIR "" CACHE PATH "Path to LLVM toolchain build or install root.") -option( - ENABLE_QEMU_TESTING - "Enable tests that use QEMU. This option is ON by default." - ON -) -option( - ENABLE_PARALLEL_LIB_CONFIG - "Run the library variant configuration steps in parallel." - ON -) -option( - ENABLE_PARALLEL_LIB_BUILD - "Run the library variant build steps in parallel." - OFF -) -set(PARALLEL_LIB_BUILD_LEVELS - "1" CACHE STRING - "If ENABLE_PARALLEL_LIB_BUILD is ON, this number of processes will be assigned to each variant built." -) -if(NOT CMAKE_GENERATOR MATCHES "Ninja") - if (ENABLE_PARALLEL_LIB_CONFIG OR ENABLE_PARALLEL_LIB_BUILD) - message(WARNING "Library build parallelization should only be enabled with the Ninja generator.") - endif() -endif() - -# If a compiler launcher such as ccache has been set, it should be -# passed down to each subproject build. -set(compiler_launcher_cmake_args "") -if(CMAKE_C_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER}") -endif() -if(CMAKE_CXX_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER}") -endif() - -# Arguments to pass down to the library projects. -foreach(arg - LLVM_BINARY_DIR -) - if(${arg}) - list(APPEND passthrough_dirs "-D${arg}=${${arg}}") - endif() -endforeach() - -find_package(Python3 REQUIRED COMPONENTS Interpreter) # needed by fetch_*.cmake - -include(ExternalProject) -if(C_LIBRARY STREQUAL picolibc) - include(${TOOLCHAIN_SOURCE_DIR}/cmake/fetch_picolibc.cmake) - list(APPEND passthrough_dirs "-DFETCHCONTENT_SOURCE_DIR_PICOLIBC=${FETCHCONTENT_SOURCE_DIR_PICOLIBC}") -elseif(C_LIBRARY STREQUAL musl-embedded) - include(${TOOLCHAIN_SOURCE_DIR}/cmake/fetch_musl-embedded.cmake) - list(APPEND passthrough_dirs "-DFETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED=${FETCHCONTENT_SOURCE_DIR_MUSL-EMBEDDED}") -endif() - -# Target for any dependencies to build the runtimes project. -add_custom_target(runtimes-depends) - -# Create one target to run all the tests. -add_custom_target(check-${C_LIBRARY}) -add_custom_target(check-compiler-rt) -add_custom_target(check-cxx) -add_custom_target(check-cxxabi) -add_custom_target(check-unwind) - -add_custom_target(check-all) -add_dependencies( - check-all - check-${C_LIBRARY} - check-compiler-rt - check-cxx - check-cxxabi - check-unwind -) - -if(ENABLE_PARALLEL_LIB_CONFIG OR ENABLE_PARALLEL_LIB_BUILD) - # Additional targets to build the variant subprojects in parallel. - # The build steps can use multible jobs to compile in parallel, but - # the configuration steps are largely single threaded. This creates a - # bottleneck if each variant is built in series. - # It is significantly faster to run all the subproject configuration - # steps in parallel, run the build steps, then run the next set of - # configuration steps in parallel, etc. - set( - subtargets - compiler_rt-configure - compiler_rt-build - clib-configure - clib-build - cxxlibs-configure - cxxlibs-build - ) - set(subtarget_deps none ${subtargets}) - list(REMOVE_AT subtarget_deps 6) - - foreach(subtarget subtarget_dep IN ZIP_LISTS subtargets subtarget_deps) - add_custom_target(${subtarget}-all) - if(NOT subtarget_dep STREQUAL "none") - add_dependencies(${subtarget}-all ${subtarget_dep}-all) - endif() - endforeach() -endif() - -# Read the JSON file to load a multilib configuration. -file(READ ${MULTILIB_JSON} multilib_json_str) -string(JSON multilib_defs GET ${multilib_json_str} "libs") - -string(JSON lib_count LENGTH ${multilib_defs}) -math(EXPR lib_count_dec "${lib_count} - 1") - -include(${TOOLCHAIN_SOURCE_DIR}/cmake/get_canonical_riscv_march.cmake) - -foreach(lib_idx RANGE ${lib_count_dec}) - string(JSON lib_def GET ${multilib_defs} ${lib_idx}) - string(JSON variant GET ${lib_def} "variant") - set(additional_cmake_args "") - - # If a variant doesn't support all possible C library - # options, check if it should be skipped. - string(JSON variant_support ERROR_VARIABLE json_error GET ${lib_def} "libraries_supported") - if(NOT variant_support STREQUAL "libraries_supported-NOTFOUND") - # Replace colons with semi-colons so CMake comprehends the list. - string(REPLACE "," ";" variant_support ${variant_support}) - if(NOT C_LIBRARY IN_LIST variant_support) - continue() - endif() - endif() - - if(variant IN_LIST ENABLE_VARIANTS OR ENABLE_VARIANTS STREQUAL "all") - list(APPEND enabled_variants ${variant}) - string(JSON variant_multilib_flags GET ${lib_def} "flags") - # Placeholder libraries won't have a json, so store the error in - # a variable so a fatal error isn't generated. - string(JSON variant_json ERROR_VARIABLE json_error GET ${lib_def} "json") - - if(NOT variant_json STREQUAL "json-NOTFOUND") - # Sort by target triple - if(variant MATCHES "^aarch64") - set(parent_dir_name aarch64-none-elf) - elseif(variant MATCHES "^riscv32") - set(parent_dir_name riscv32-unknown-elf) - elseif(variant MATCHES "^riscv64") - set(parent_dir_name riscv64-unknown-elf) - else() - set(parent_dir_name arm-none-eabi) - endif() - set(destination_directory "${CMAKE_CURRENT_BINARY_DIR}/multilib/${parent_dir_name}/${variant}") - install( - DIRECTORY ${destination_directory} - DESTINATION ${parent_dir_name} - ) - set(variant_json_file ${CMAKE_CURRENT_SOURCE_DIR}/json/variants/${variant_json}) - - # Read info from the variant specific json. - file(READ ${variant_json_file} variant_json_str) - string(JSON test_executor GET ${variant_json_str} "args" "common" "TEST_EXECUTOR") - - # The multilib project can be configured to disable QEMU - # testing, which will need to override the settings from the json. - if(test_executor STREQUAL "qemu" AND NOT ${ENABLE_QEMU_TESTING}) - list(APPEND additional_cmake_args "-DENABLE_LIBC_TESTS=OFF" "-DENABLE_COMPILER_RT_TESTS=OFF" "-DENABLE_LIBCXX_TESTS=OFF") - set(read_ENABLE_LIBC_TESTS "OFF") - set(read_ENABLE_COMPILER_RT_TESTS "OFF") - set(read_ENABLE_LIBCXX_TESTS "OFF") - else() - # From the json, check which tests are enabled. - foreach(test_enable_var - ENABLE_LIBC_TESTS - ENABLE_COMPILER_RT_TESTS - ENABLE_LIBCXX_TESTS - ) - string(JSON read_${test_enable_var} ERROR_VARIABLE json_error GET ${variant_json_str} "args" ${C_LIBRARY} ${test_enable_var}) - if(read_${test_enable_var} STREQUAL "json-NOTFOUND") - string(JSON read_${test_enable_var} ERROR_VARIABLE json_error GET ${variant_json_str} "args" "common" ${test_enable_var}) - if(read_${test_enable_var} STREQUAL "json-NOTFOUND") - set(read_${test_enable_var} "OFF") - endif() - endif() - endforeach() - endif() - - if(NUMERICAL_BUILD_NAMES) - set(runtimes_id ${lib_idx}) - list(APPEND additional_cmake_args "-DVARIANT_BUILD_ID=${lib_idx}") - else() - set(runtimes_id ${variant}) - endif() - - ExternalProject_Add( - runtimes-${variant} - STAMP_DIR ${PROJECT_PREFIX}/runtimes/${runtimes_id}/stamp - BINARY_DIR ${PROJECT_PREFIX}/runtimes/${runtimes_id}/build - DOWNLOAD_DIR ${PROJECT_PREFIX}/runtimes/${runtimes_id}/dl - TMP_DIR ${PROJECT_PREFIX}/runtimes/${runtimes_id}/tmp - SOURCE_DIR ${TOOLCHAIN_SOURCE_DIR}/embedded-runtimes - INSTALL_DIR ${destination_directory} - DEPENDS runtimes-depends - CMAKE_ARGS - ${compiler_launcher_cmake_args} - ${passthrough_dirs} - ${additional_cmake_args} - -DVARIANT_JSON=${variant_json_file} - -DC_LIBRARY=${C_LIBRARY} - -DCMAKE_INSTALL_PREFIX= - -DPROJECT_PREFIX=${PROJECT_PREFIX} - STEP_TARGETS build install - USES_TERMINAL_CONFIGURE FALSE - USES_TERMINAL_BUILD TRUE - LIST_SEPARATOR , - CONFIGURE_HANDLED_BY_BUILD TRUE - TEST_EXCLUDE_FROM_MAIN TRUE - ) - - list(APPEND all_runtime_targets runtimes-${variant}) - - if(ENABLE_PARALLEL_LIB_CONFIG OR ENABLE_PARALLEL_LIB_BUILD) - # Create additional steps to configure/build the subprojects. - # These are collected to be run together, so that all the - # configuration steps can be run in parallel. - # Each step should depend on the previous, with the first depending on the pre-defined - # 'configure' step, and the pre-defined 'build' step depending on the last. - set(subtarget_deps configure ${subtargets} build) - list(SUBLIST subtarget_deps 0 6 subtarget_dependees) - list(SUBLIST subtarget_deps 2 6 subtarget_dependers) - - # First loop to add the steps and targets. - foreach(subtarget subtarget_dependee IN ZIP_LISTS subtargets subtarget_dependees) - # Enabling USES_TERMINAL puts the step in Ninja's "console" job pool, which - # prevents the steps from being run in parallel since each must be given - # exclusive access to the terminal. When disabled, the console won't be updated - # with any output from the step until it completes. - set(step_uses_terminal ON) - set(step_extra_env "") - if(${subtarget} MATCHES "-configure$" AND ENABLE_PARALLEL_LIB_CONFIG) - set(step_uses_terminal OFF) - elseif(${subtarget} MATCHES "-build$" AND ENABLE_PARALLEL_LIB_BUILD) - set(step_uses_terminal OFF) - set(step_extra_env ${CMAKE_COMMAND} -E env CMAKE_BUILD_PARALLEL_LEVEL=${PARALLEL_LIB_BUILD_LEVELS}) - endif() - ExternalProject_Add_Step( - runtimes-${variant} - ${subtarget} - COMMAND ${step_extra_env} ${CMAKE_COMMAND} --build --target ${subtarget} - DEPENDEES ${subtarget_dependee} - DEPENDERS build - USES_TERMINAL ${step_uses_terminal} - ) - ExternalProject_Add_StepTargets(runtimes-${variant} ${subtarget}) - add_dependencies(${subtarget}-all runtimes-${variant}-${subtarget}) - endforeach() - - # Second loop to set the steps that will depend on the new targets. - foreach(subtarget subtarget_depender IN ZIP_LISTS subtargets subtarget_dependers) - ExternalProject_Add_StepDependencies( - runtimes-${variant} - ${subtarget_depender} - ${subtarget}-all - ) - endforeach() - endif() - - # Add custom check targets. - set(check_targets "") - if(read_ENABLE_LIBC_TESTS) - list(APPEND check_targets check-${C_LIBRARY}) - endif() - if(read_ENABLE_COMPILER_RT_TESTS) - list(APPEND check_targets check-compiler-rt) - endif() - if(read_ENABLE_LIBCXX_TESTS) - list(APPEND check_targets check-cxx) - list(APPEND check_targets check-cxxabi) - list(APPEND check_targets check-unwind) - endif() - foreach(check_target ${check_targets}) - ExternalProject_Add_Step( - runtimes-${variant} - ${check_target} - COMMAND "${CMAKE_COMMAND}" --build --target ${check_target} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - ) - ExternalProject_Add_StepTargets(runtimes-${variant} ${check_target}) - ExternalProject_Add_StepDependencies( - runtimes-${variant} - ${check_target} - runtimes-${variant}-build - ) - add_custom_target(${check_target}-${variant}) - add_dependencies(${check_target} runtimes-${variant}-${check_target}) - add_dependencies(${check_target}-${variant} runtimes-${variant}-${check_target}) - endforeach() - - # Add the variant to the multilib yaml - string(APPEND multilib_yaml_content "- Dir: ${parent_dir_name}/${variant}\n") - string(APPEND multilib_yaml_content " Flags:\n") - string(REPLACE " " ";" multilib_flags_list ${variant_multilib_flags}) - foreach(flag ${multilib_flags_list}) - if(variant MATCHES "^riscv") - if(flag MATCHES "-march=") - get_canonical_riscv_march( - "${LLVM_BINARY_DIR}/bin/clang${CMAKE_EXECUTABLE_SUFFIX}" - "${multilib_flags_list}" - canonical_riscv_march - ) - set(flag "-march=${canonical_riscv_march}") - - # This forgoes any special handling of extension versions, but that seems - # fine as we don't support multiple versions of the same extensions, and we - # generally expect our compiler, libraries, and multilib file) to be coupled. - string(REPLACE "_" "_.*" expanded_riscv_march_match ${flag}) - string(APPEND riscv_march_mappings "- Match: ${expanded_riscv_march_match}.*\n") - string(APPEND riscv_march_mappings " Flags:\n") - string(APPEND riscv_march_mappings " - ${flag}\n") - endif() - endif() - - string(APPEND multilib_yaml_content " - ${flag}\n") - endforeach() - string(APPEND multilib_yaml_content " Group: stdlibs\n") - else() - # In place of a json, an error message is expected. - string(JSON variant_error_msg GET ${lib_def} "error") - - string(APPEND multilib_yaml_content "- Error: \"${variant_error_msg}\"\n") - string(APPEND multilib_yaml_content " Flags:\n") - string(REPLACE " " ";" multilib_flags_list ${variant_multilib_flags}) - foreach(flag ${multilib_flags_list}) - string(APPEND multilib_yaml_content " - ${flag}\n") - endforeach() - string(APPEND multilib_yaml_content " Group: stdlibs\n") - endif() - endif() - -endforeach() - -# Check that all variants that were configured to be enabled -# were actually enabled. -if(NOT ENABLE_VARIANTS STREQUAL "all") - foreach(expected_variant ${ENABLE_VARIANTS}) - if(NOT expected_variant IN_LIST enabled_variants) - message(FATAL_ERROR "Variant name ${expected_variant} not found in ${MULTILIB_JSON}") - endif() - endforeach() -endif() - -# Multilib file is generated in two parts. -# 1. Template is filled with multilib flags from json -configure_file( - ${CMAKE_CURRENT_SOURCE_DIR}/multilib.yaml.in - ${CMAKE_CURRENT_BINARY_DIR}/multilib-without-fpus.yaml - @ONLY -) - -# 2. multilib-generate.py maps compiler command line options to flags -add_custom_command( - OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/multilib-fpus.yaml - COMMAND ${Python3_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/multilib-generate.py" - "--clang=${LLVM_BINARY_DIR}/bin/clang${CMAKE_EXECUTABLE_SUFFIX}" - "--llvm-source=${llvmproject_src_dir}" - >> ${CMAKE_CURRENT_BINARY_DIR}/multilib-fpus.yaml -) - -# Combine the two parts. -add_custom_command( - OUTPUT - ${CMAKE_CURRENT_BINARY_DIR}/multilib/multilib.yaml - COMMAND - ${CMAKE_COMMAND} -E cat - ${CMAKE_CURRENT_BINARY_DIR}/multilib-without-fpus.yaml - ${CMAKE_CURRENT_BINARY_DIR}/multilib-fpus.yaml - > ${CMAKE_CURRENT_BINARY_DIR}/multilib/multilib.yaml - DEPENDS - ${CMAKE_CURRENT_BINARY_DIR}/multilib-without-fpus.yaml - ${CMAKE_CURRENT_BINARY_DIR}/multilib-fpus.yaml -) - -add_custom_target(multilib-yaml ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/multilib/multilib.yaml) -install( - FILES ${CMAKE_CURRENT_BINARY_DIR}/multilib/multilib.yaml - DESTINATION . -) diff --git a/qualcomm-software/embedded-multilib/json/multilib.json b/qualcomm-software/embedded-multilib/json/multilib.json deleted file mode 100644 index ef4f3bc10c09..000000000000 --- a/qualcomm-software/embedded-multilib/json/multilib.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "libs": [ - { - "variant": "aarch64a", - "json": "aarch64a.json", - "flags": "--target=aarch64-unknown-none-elf -fno-exceptions" - }, - { - "variant": "aarch64a_soft_nofp", - "json": "aarch64a_soft_nofp.json", - "flags": "--target=aarch64-unknown-none-elf -march=armvX+nofp -march=armvX+nosimd -mabi=aapcs-soft -fno-exceptions" - }, - { - "variant": "aarch64a_soft_nofp_pacret_bti", - "json": "aarch64a_soft_nofp_pacret_bti.json", - "flags": "--target=aarch64-unknown-none-elf -march=armv8.3-a -march=armvX+nofp -march=armvX+nosimd -mbranch-protection=pac-ret+leaf+bti -mabi=aapcs-soft -fno-exceptions" - }, - { - "variant": "aarch64a_pacret_bkey_bti", - "json": "aarch64a_pacret_bkey_bti.json", - "flags": "--target=aarch64-unknown-none-elf -march=armv8.5-a -mbranch-protection=pac-ret+leaf+b-key+bti -fno-exceptions" - }, - { - "variant": "armv7a_soft_neon", - "json": "armv7a_soft_neon.json", - "flags": "--target=thumbv7-unknown-none-eabi -mfpu=neon -fno-exceptions" - }, - { - "variant": "riscv32imac_ilp32_nopic", - "json": "riscv32imac_ilp32_nopic.json", - "flags": "--target=riscv32-unknown-unknown-elf -march=rv32imac -mabi=ilp32 -fno-pic -fno-exceptions", - "libraries_supported": "picolibc" - }, - { - "variant": "riscv32imac_zba_zbb_ilp32", - "json": "riscv32imac_zba_zbb_ilp32.json", - "flags": "--target=riscv32-unknown-unknown-elf -march=rv32imac_zba_zbb -mabi=ilp32 -fno-exceptions", - "libraries_supported": "picolibc" - }, - { - "variant": "riscv32imac_zba_zbb_ilp32_nopic", - "json": "riscv32imac_zba_zbb_ilp32_nopic.json", - "flags": "--target=riscv32-unknown-unknown-elf -march=rv32imac_zba_zbb -mabi=ilp32 -fno-pic -fno-exceptions", - "libraries_supported": "picolibc" - }, - { - "variant": "riscv64imac_lp64_nopic", - "json": "riscv64imac_lp64_nopic.json", - "flags": "--target=riscv64-unknown-unknown-elf -march=rv64imac -mabi=lp64 -fno-pic -fno-exceptions", - "libraries_supported": "picolibc" - } - ] -} diff --git a/qualcomm-software/embedded-multilib/json/variants/aarch64a.json b/qualcomm-software/embedded-multilib/json/variants/aarch64a.json deleted file mode 100644 index b1a09b357b38..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/aarch64a.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "aarch64a", - "VARIANT": "aarch64a", - "COMPILE_FLAGS": "-march=armv8-a -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "cortex-a57", - "FLASH_ADDRESS": "0x40000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x40400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - }, - "musl-embedded": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF", - "EXTRA_MUSL-EMBEDDED_CFLAGS": "-mstrict-align -D__QUIC_ENABLE_FLT_FOR_PRINT" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/aarch64a_pacret_bkey_bti.json b/qualcomm-software/embedded-multilib/json/variants/aarch64a_pacret_bkey_bti.json deleted file mode 100644 index 13dd82bbf641..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/aarch64a_pacret_bkey_bti.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "aarch64a", - "VARIANT": "aarch64a_pacret_bkey_bti", - "COMPILE_FLAGS": "-march=armv8.5a -mbranch-protection=pac-ret+leaf+b-key+bti -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "cortex-a57", - "FLASH_ADDRESS": "0x40000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x40400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - }, - "musl-embedded": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF", - "EXTRA_MUSL-EMBEDDED_CFLAGS": "-mstrict-align -D__QUIC_ENABLE_FLT_FOR_PRINT" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp.json b/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp.json deleted file mode 100644 index c1c98396cbf8..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "aarch64a", - "VARIANT": "aarch64a_soft_nofp", - "COMPILE_FLAGS": "-march=armv8-a+nofp+nosimd -mabi=aapcs-soft -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "cortex-a57", - "FLASH_ADDRESS": "0x40000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x40400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "OFF", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - }, - "musl-embedded": { - "ENABLE_CXX_LIBS": "OFF", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF", - "EXTRA_MUSL-EMBEDDED_CONFIG_FLAGS": "--quic-aarch64-nofp", - "EXTRA_MUSL-EMBEDDED_CFLAGS": "-mstrict-align" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp_pacret_bti.json b/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp_pacret_bti.json deleted file mode 100644 index 965bfbb3f3be..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/aarch64a_soft_nofp_pacret_bti.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "aarch64a", - "VARIANT": "aarch64a_soft_nofp_pacret_bti", - "COMPILE_FLAGS": "-march=armv8.3a+nofp+nosimd -mbranch-protection=pac-ret+leaf+bti -mabi=aapcs-soft -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "cortex-a57", - "FLASH_ADDRESS": "0x40000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x40400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "OFF", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - }, - "musl-embedded": { - "ENABLE_CXX_LIBS": "OFF", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF", - "EXTRA_MUSL-EMBEDDED_CONFIG_FLAGS": "--quic-aarch64-nofp", - "EXTRA_MUSL-EMBEDDED_CFLAGS": "-mstrict-align" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/armv7a_soft_neon.json b/qualcomm-software/embedded-multilib/json/variants/armv7a_soft_neon.json deleted file mode 100644 index 90934d1f92a8..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/armv7a_soft_neon.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "armv7a", - "VARIANT": "armv7a_soft_neon", - "COMPILE_FLAGS": "-mfloat-abi=softfp -march=armv7a -mthumb -mfpu=neon -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "none", - "QEMU_CPU": "cortex-a7", - "QEMU_PARAMS": "-m 1G", - "FLASH_ADDRESS": "0x00000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x20000000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - }, - "musl-embedded": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "OFF", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF", - "EXTRA_MUSL-EMBEDDED_CFLAGS": "-mno-unaligned-access" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_ilp32_nopic.json b/qualcomm-software/embedded-multilib/json/variants/riscv32imac_ilp32_nopic.json deleted file mode 100644 index 204576a96809..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_ilp32_nopic.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "riscv32", - "VARIANT": "riscv32imac_ilp32_nopic", - "COMPILE_FLAGS": "-march=rv32imac -mabi=ilp32", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "rv32", - "QEMU_PARAMS": "-bios none", - "FLASH_ADDRESS": "0x80000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x80400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32.json b/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32.json deleted file mode 100644 index 82931eaab88e..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "riscv32", - "VARIANT": "riscv32imac_zba_zbb_ilp32_fpic", - "COMPILE_FLAGS": "-march=rv32imac_zba_zbb -mabi=ilp32 -fPIC", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "rv32", - "QEMU_PARAMS": "-bios none", - "FLASH_ADDRESS": "0x80000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x80400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32_nopic.json b/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32_nopic.json deleted file mode 100644 index ecc63f9b1823..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/riscv32imac_zba_zbb_ilp32_nopic.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "riscv32", - "VARIANT": "riscv32imac_zba_zbb_ilp32_nopic", - "COMPILE_FLAGS": "-march=rv32imac_zba_zbb -mabi=ilp32", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "rv32", - "QEMU_PARAMS": "-bios none", - "FLASH_ADDRESS": "0x80000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x80400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - } - } -} diff --git a/qualcomm-software/embedded-multilib/json/variants/riscv64imac_lp64_nopic.json b/qualcomm-software/embedded-multilib/json/variants/riscv64imac_lp64_nopic.json deleted file mode 100644 index c8e434bc8e84..000000000000 --- a/qualcomm-software/embedded-multilib/json/variants/riscv64imac_lp64_nopic.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "args": { - "common": { - "TARGET_ARCH": "riscv64", - "VARIANT": "riscv64imac_lp64", - "COMPILE_FLAGS": "-march=rv64imac -mabi=lp64 -mcmodel=medany", - "ENABLE_EXCEPTIONS": "OFF", - "ENABLE_RTTI": "ON", - "TEST_EXECUTOR": "qemu", - "QEMU_MACHINE": "virt", - "QEMU_CPU": "rv64", - "QEMU_PARAMS": "-bios none", - "FLASH_ADDRESS": "0x80000000", - "FLASH_SIZE": "0x00400000", - "RAM_ADDRESS": "0x80400000", - "RAM_SIZE": "0x00200000", - "LIBRARY_BUILD_TYPE": "minsizerelease" - }, - "picolibc": { - "ENABLE_CXX_LIBS": "ON", - "ENABLE_LIBC_TESTS": "ON", - "ENABLE_COMPILER_RT_TESTS": "OFF", - "ENABLE_LIBCXX_TESTS": "OFF" - } - } -} diff --git a/qualcomm-software/embedded-multilib/multilib-generate.py b/qualcomm-software/embedded-multilib/multilib-generate.py deleted file mode 100755 index 436e389b304b..000000000000 --- a/qualcomm-software/embedded-multilib/multilib-generate.py +++ /dev/null @@ -1,425 +0,0 @@ -#!/usr/bin/env python3 - -"""Auto-generate implications between command-line options options for -multilib.yaml.in. - -Each FPU name that clang knows about is mapped to all the FPU names -that clang considers to be a subset of it (determined by extracting -the cc1 command from `clang -###` for each one and looking at the set -of -target-feature options). - -An exception is that we don't consider any hardware FP configuration -to be compatible with -mfpu=none. It would work in most cases to -cross-call between code compiled for an FPU or no FPU, if you were -using the soft float ABI. But it wouldn't work in all cases: setjmp -needs to know whether to save FP registers in the jmp_buf, so a -non-FPU-aware setjmp would not behave correctly if linked into an -otherwise FPU-using application. Similarly for exception unwinding. So -we don't permit selecting an -mfpu=none library as a fallback for any -hard-FP library. - -However, it's fine for ABI purposes to mix code compiled for 16 and 32 -d-registers, because the extra 16 d-registers are caller-saved, so -setjmp and exceptions need not preserve them. Interrupt handlers would -have to preserve them, but our libraries don't define any. - -For architecture extension modifiers on the -march option, we expand these into -options of the form -march=armvX+[no]feature, for each feature which is listed -as enabled or disabled in the input options. Each of these new options has -exactly one feature, so the multilib file can mark a library as depending on -any set of by matching multiple options. The "armvX" architecture version isn't -a valid option, but that doesn't matter to multilib, and means that we don't -need to repeat the matching for every minor version. - -For architecture versions, we expand -march=armvX.Y-a+features to include every -lower or equal architecture version, so that if, for example, a library -requires armv8.3-a, then a link command targeting any later version will be -able to select it. These generated options don't include the feature modifiers, -which can be matched separately if a library requires them. -""" - -import argparse -import json -import os -import re -import shlex -import subprocess -import unittest -from dataclasses import dataclass - - -def get_fpu_list(args): - """Extract the list of FPUs from ARMTargetParser.def. - - Strategy: the intended use of ARMTargetParser.def in the actual - LLVM build is to run it through the C preprocessor with whatever - #defines will generate the output you want. So the most reliable - way to get what _we_ want is to do exactly the same. - - The output format I've chosen is actually JSON, because that's - close enough to C-like syntax that you can generate it easily - using cpp features like stringification, and also convenient for - Python to consume afterwards. - """ - - command = [ - args.clang, - # For this purpose we're only interested in the calls to the - # ARM_FPU macro in the input file, and want the first and - # third argument of each of those. - # - # The first argument will be a string literal giving the FPU - # name, which we copy into the JSON output still as a string - # literal. - # - # The third argument indicates the general FPU category, which - # we need so as to exclude FPUVersion::NONE. That is not - # already a string literal, so we stringify it in the - # preprocessor to make it legal JSON. - "-DARM_FPU(name,kind,version,...)=[name,#version],", - "-E", # preprocess - "-P", # don't output linemarkers - "-xc", # treat input as C, even though no .c filename extension - os.path.join( - args.llvm_source, - "llvm", - "include", - "llvm", - "TargetParser", - "ARMTargetParser.def", - ), - ] - - raw_output = subprocess.check_output(command) - - # The output of the above is a collection of JSON arrays each - # containing two strings, and each followed by a comma. Turn it - # into a single legal JSON declaration by deleting the final comma - # and wrapping it in array brackets. - json_output = b"[" + raw_output.strip().rstrip(b",") + b"]" - - # Filter the list of 2-tuples to exclude the FPU names that aren't - # FPUs. - for name, fputype in json.loads(json_output): - assert fputype.startswith("FPUVersion::"), ( - f"FPU type value {fputype} not of the expected form!\n" - "Has ARMTargetParser.def been refactored?" - ) - - if fputype != "FPUVersion::NONE": - yield name - - -def get_target_features(args, fpu): - """Return the set of feature names for a given FPU. - - Strategy: run a clang compile command with that FPU, including - the -### argument to print all the subsidiary command lines, and - extract the list of "-target-feature" "+foo" options from the - clang -cc1 command line in the output. This shows what low-level - LLVM feature names are enabled by that FPU. - - It will also include the feature names enabled by the CPU or - architecture we specified. But since we only care about which - FPUs are subsets of which other ones, that doesn't affect the - output, as long as the architecture is the same for all the FPUs - we do this with. - """ - - command = [ - args.clang, - "--target=arm-none-eabi", - "-march=armv7a", - "-mfpu=" + fpu, - "-S", # tell clang to do as little as possible - "-xc", # interpret input as C - "-", # read from standard input (not that we'll get that far) - "-###", # print all the command lines rather than actually doing it - ] - - output = subprocess.check_output(command, stderr=subprocess.STDOUT).decode() - - # Find the clang -cc1 command, and parse it into an argv list. - for line in output.splitlines(): - try: - words = shlex.split(line) - except ValueError: - # We expect that some of the output lines won't parse as - # valid shell syntax, because -### doesn't output *only* - # command lines. So this is fine; any line that doesn't - # parse is not the one we were looking for anyway. - continue - - if len(words) > 1 and words[1] == "-cc1": - # We've found the cc1 command. - break - else: - assert False, "no cc1 command found in output of: " + " ".join( - map(shlex.quote, command) - ) - - # Now we've found the clang command, go through it for - # -target-feature options. We only care about the ones that add - # rather than removing features, i.e. "-target-feature +foo" - # rather than "-target-feature -bar". - it = iter(words) - features = set() - for word in it: - if word == "-target-feature": - arg = next(it) - if arg.startswith("+"): - features.add(arg[1:]) - - assert len(features) > 0, ( - "This cc1 command contained no argument pairs of the form" - " '-target-feature +something':\n" - f"{line}\n" - "Has the clang -cc1 command-line syntax changed?" - ) - - return features - - -def generate_fpus(args): - # Collect all the data: make the list of FPU names, and the set of - # features that LLVM maps each one to. - fpu_features = { - fpuname: get_target_features(args, fpuname) for fpuname in get_fpu_list(args) - } - - # Now, for each FPU, find all the FPUs that are subsets of it - # (excluding itself). - sorted_fpus = list(sorted(fpu_features)) - for super_fpu in sorted_fpus: - subsets = [ - sub_fpu - for sub_fpu in sorted_fpus - if sub_fpu != super_fpu - and fpu_features[sub_fpu].issubset(fpu_features[super_fpu]) - ] - - # If this FPU has any subsets at all, write a multilib.yaml - # snippet that adds all the subset FPU flags if it sees the - # superset flag. - # - # The YAML is trivial enough that it's easier to do this by - # hand than to rely on everyone having python3-yaml available. - if len(subsets) > 0: - print("- Match: -mfpu=" + super_fpu) - print(" Flags:") - for sub_fpu in subsets: - print(" - -mfpu=" + sub_fpu) - print() - - -def get_extension_list(clang, triple): - """Extract the list of architecture extension flags from clang, by running - it with the --print-supported-extensions option.""" - - command = [ - clang, - "--target=" + triple, - "--print-supported-extensions", - ] - output = subprocess.check_output(command, stderr=subprocess.STDOUT).decode() - return get_extension_list_from_clang_output(output, command) - - -def get_extension_list_from_clang_output(output, command): - it = iter(output.splitlines()) - - # Read and discard a version dump, terminated by the expected - # title line of the table. - for line in it: - if line.startswith("All available -march extensions for"): - break - else: - assert False, ( - "Did not find expected header line in output of command:\n{}\n" - "Has clang --print-supported-extensions changed its output format?".format( - shlex.join(command) - ) - ) - - # Now expect a blank line, followed by a line containing the - # table's column headings. - assert next(it) == "", ( - "Did not find blank line after header in output of command:\n{}\n" - "Has clang --print-supported-extensions changed its output format?".format( - shlex.join(command) - ) - ) - - # Read the table heading line and remember what column position - # each heading starts at. - headers = next(it) - headers_split = re.split(r"( +)", headers) # split at 2 or more spaces - column = 0 - header_start = {} - header_end = {} - last_header_name = None - for substring in headers_split: - if substring.rstrip(" ") != "": - assert substring in { - # List of header names we recognize, so that if the - # spelling changes in some trivial way we find out - # rather than silently beginning to ignore a header - "Name", - "Architecture Feature(s)", - "Description", - }, ( - "Unrecognized table heading {!r} in output of command:\n{}\n" - "Has clang --print-supported-extensions changed its output format?".format( - substring, shlex.join(command) - ) - ) - - if last_header_name is not None: - header_end[last_header_name] = column - header_start[substring] = column - last_header_name = substring - column += len(substring) - - # Now read the table rows. - for line in it: - row = {} - for header_name, startcol in header_start.items(): - endcol = header_end.get(header_name) - text = line[startcol:] if endcol is None else line[startcol:endcol] - row[header_name] = text.rstrip(" ") - - # Mostly, we just return every extension name. An exception is - # that in AArch64, some extensions are shorthands for - # combinations of others. We can tell this because their - # 'Architecture Feature(s)' column is empty. - if "Architecture Feature(s)" in row and row["Architecture Feature(s)"] == "": - continue - - yield row["Name"] - - -class TestExtensionList(unittest.TestCase): - def testTwoColumn(self): - output = """\ -clang version information -which should be ignored -All available -march extensions for ARM - - Name Description - crc Enable support for CRC instructions - crypto Enable support for Cryptography extensions - sha2 Enable SHA1 and SHA256 support -""" - self.assertEqual( - list(get_extension_list_from_clang_output(output, ["dummy"])), - ["crc", "crypto", "sha2"], - ) - - def testThreeColumn(self): - output = """\ -clang version information -which should be ignored -All available -march extensions for AArch64 - - Name Architecture Feature(s) Description - aes FEAT_AES, FEAT_PMULL Enable AES support - sve2 FEAT_SVE2 Enable Scalable Vector Extension 2 (SVE2) instructions - sve2-aes Shorthand for +sve2+sve-aes - tme FEAT_TME Enable Transactional Memory Extension -""" - # Expect sve2-aes to be ignored, because it has no associated - # architecture features - self.assertEqual( - list(get_extension_list_from_clang_output(output, ["dummy"])), - ["aes", "sve2", "tme"], - ) - - -def generate_extensions(args): - aarch64_features = get_extension_list(args.clang, "aarch64-none-eabi") - aarch32_features = get_extension_list(args.clang, "arm-none-eabi") - all_features = list(aarch64_features) - # Combine the aarch64 and aarch32 lists without duplication. - # Casting to sets and merging would be simpler, but creates - # non-deterministic output. - all_features.extend( - feat for feat in list(aarch32_features) if feat not in all_features - ) - - print("# Expand -march=...+[no]feature... into individual options we can match") - print("# on. We use 'armvX' to represent a feature applied to any architecture, so") - print("# that these don't need to be repeated for every version. Libraries which") - print( - "# require a particular architecture version or profile should also match on the" - ) - print("# original option to check that.") - - for feature in all_features: - print(f"- Match: -march=armv.*\\+{feature}($|\\+.*)") - print(f" Flags:") - print(f" - -march=armvX+{feature}") - print(f"- Match: -march=armv.*\\+no{feature}($|\\+.*)") - print(f" Flags:") - print(f" - -march=armvX+no{feature}") - print() - - -@dataclass -class Version: - major: int - minor: int - profile: int - - def __str__(self): - if self.minor == 0: - return f"armv{self.major}-{self.profile}" - else: - return f"armv{self.major}.{self.minor}-{self.profile}" - - @property - def all_compatible(self): - yield self - for compat_minor in range(self.minor): - yield Version(self.major, compat_minor, self.profile) - if self.major == 9: - for compat_minor in range(self.minor + 5 + 1): - yield Version(self.major - 1, compat_minor, self.profile) - - -def generate_versions(args): - """Generate match blocks which allow selecting a library build for a - lower-version architecture, for the v8.x-A and v9.x-A minor versions.""" - versions = ( - [Version(8, minor, "a") for minor in range(10)] - + [Version(9, minor, "a") for minor in range(6)] - + [Version(8, minor, "r") for minor in range(1)] - ) - - for match_ver in versions: - print(f"- Match: -march={match_ver}.*") - print(f" Flags:") - for compat_ver in match_ver.all_compatible: - print(f" - -march={compat_ver}") - print() - - -def main(): - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("--clang", required=True, help="Path to clang executable.") - parser.add_argument( - "--llvm-source", - required=True, - help="Path to root of llvm-project source tree.", - ) - args = parser.parse_args() - - generate_fpus(args) - generate_extensions(args) - generate_versions(args) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-multilib/multilib.yaml.in b/qualcomm-software/embedded-multilib/multilib.yaml.in deleted file mode 100644 index 881d708420df..000000000000 --- a/qualcomm-software/embedded-multilib/multilib.yaml.in +++ /dev/null @@ -1,214 +0,0 @@ -# -# Copyright (c) 2023-2025, Arm Limited and affiliates. -# -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# If you're reading this file under the name 'multilib.yaml.in' in the -# qualcomm-software/ source tree, then it's not valid -# YAML in its own right: it's a template that CMakeLists.txt will -# expand into a real 'multilib.yaml' containing a list of library -# variants and the flags that will select them. -# -# If you're reading it under the name 'multilib.yaml' in the build or -# install directory, then that substitution has been done. -# -# Comments in this file mostly make more sense from the -# multilib.yaml.in point of view. - -MultilibVersion: '1.0' - -# Make an exclusive group for library variants, to make sure we don't -# accidentally include two or more variants at once. -# -# Even if they all match the command-line options, putting two sets of -# include directories on the include path can cause build failure, -# because of the #include_next used in the libc++ headers. The first -# libc++ stdio.h (for example) will #include_next , which -# will find the second libc++ version. That won't do anything at all, -# because it has the same include-guard macro as the first, and so -# nothing will ever include the _libc_ stdio.h, which was what the -# #include_next was really looking for. -Groups: -- Name: stdlibs - Type: Exclusive - -# The list of library variants is substituted in by CMakeLists.txt, so -# that it can respect the LLVM_TOOLCHAIN_LIBRARY_VARIANTS setting and -# only include the set of libraries actually included in this build. - -Variants: -@multilib_yaml_content@ - -# Append an error case to the Variants list, matching anything that -# enables the RWPI option. We don't build any library variants using -# the RWPI style of access to writable static data, so if a user -# compiles with -frwpi, we don't have any compatible library. -- Error: RWPI library variants are not provided - Flags: - - -frwpi - Group: stdlibs - -Mappings: - -# Map extended sets of extensions back to the variants provided -# for RISC-V targets. -@riscv_march_mappings@ - -# Map higher architecture versions to subsets of them, so that a -# compatible library can be found even for architectures we don't have -# specific variants for. - -# v8-M Baseline is a superset of v6-M -- Match: --target=thumbv8m\.base-unknown-none-eabi - Flags: - - --target=thumbv6m-unknown-none-eabi - -# v8.2-M Mainline is a superset of v8.1-M Mainline, in both hard and -# soft float variants. -# -# Also, v8.1-M Mainline is also a superset of v8-M Mainline, which in -# turn is a superset of v7E-M, and then of plain v7-M. We have -# libraries for all those architecture versions, but not for every -# combination of them with FPUs, so in some cases it might be -# necessary to fall back to a lower architecture in order to provide -# the needed FPU support. -- Match: --target=thumbv8\.[2-9]m\.main-unknown-none-eabi - Flags: - - --target=thumbv8.1m.main-unknown-none-eabi - - --target=thumbv8m.main-unknown-none-eabi - - --target=thumbv7em-unknown-none-eabi - - --target=thumbv7m-unknown-none-eabi -- Match: --target=thumbv8\.[2-9]m\.main-unknown-none-eabihf - Flags: - - --target=thumbv8.1m.main-unknown-none-eabihf - - --target=thumbv8m.main-unknown-none-eabihf - - --target=thumbv7em-unknown-none-eabihf - - --target=thumbv7m-unknown-none-eabihf -- Match: --target=thumbv8\.1m\.main-unknown-none-eabi - Flags: - - --target=thumbv8m.main-unknown-none-eabi - - --target=thumbv7em-unknown-none-eabi - - --target=thumbv7m-unknown-none-eabi -- Match: --target=thumbv8\.1m\.main-unknown-none-eabihf - Flags: - - --target=thumbv8m.main-unknown-none-eabihf - - --target=thumbv7em-unknown-none-eabihf - - --target=thumbv7m-unknown-none-eabihf -- Match: --target=thumbv8m\.main-unknown-none-eabi - Flags: - - --target=thumbv7em-unknown-none-eabi - - --target=thumbv7m-unknown-none-eabi -- Match: --target=thumbv8m\.main-unknown-none-eabihf - Flags: - - --target=thumbv7em-unknown-none-eabihf - - --target=thumbv7m-unknown-none-eabihf -- Match: --target=thumbv7em-unknown-none-eabi - Flags: - - --target=thumbv7m-unknown-none-eabi -- Match: --target=thumbv7em-unknown-none-eabihf - Flags: - - --target=thumbv7m-unknown-none-eabihf -- Match: --target=thumbebv7em-unknown-none-eabi - Flags: - - --target=thumbebv7m-unknown-none-eabi -- Match: --target=thumbebv7em-unknown-none-eabihf - Flags: - - --target=thumbebv7m-unknown-none-eabihf - -# v7-A and v7-R include the ISA in the triple, but that doesn't matter for -# library selection, so canonicalise Thumb triples to ARM ones. -- Match: --target=thumbv7r-unknown-none-eabi - Flags: - - --target=armv7r-unknown-none-eabi -- Match: --target=thumbv7r-unknown-none-eabihf - Flags: - - --target=armv7r-unknown-none-eabihf -- Match: --target=thumbv7-unknown-none-eabi - Flags: - - --target=armv7-unknown-none-eabi -- Match: --target=thumbebv7-unknown-none-eabi - Flags: - - --target=armebv7-unknown-none-eabi -- Match: --target=thumbv7-unknown-none-eabihf - Flags: - - --target=armv7-unknown-none-eabihf -- Match: --target=thumbebv7-unknown-none-eabihf - Flags: - - --target=armebv7-unknown-none-eabihf -- Match: --target=thumbv4t-unknown-none-eabi - Flags: - - --target=armv4t-unknown-none-eabi -- Match: --target=thumbv4t-unknown-none-eabihf - Flags: - - --target=armv4t-unknown-none-eabihf -- Match: --target=thumbv5e-unknown-none-eabi - Flags: - - --target=armv5e-unknown-none-eabi -- Match: --target=thumbv5e-unknown-none-eabihf - Flags: - - --target=armv5e-unknown-none-eabihf - -# armv7ve is a GCC name for v7-A with the virtualisation extension, for library -# selection we treat it the same as v7-A. -- Match: --target=(arm|thumb)v7ve-unknown-none-eabi - Flags: - - --target=armv7-unknown-none-eabi -- Match: --target=(arm|thumb)ebv7ve-unknown-none-eabi - Flags: - - --target=armebv7-unknown-none-eabi -- Match: --target=(arm|thumb)v7ve-unknown-none-eabihf - Flags: - - --target=armv7-unknown-none-eabihf -- Match: --target=(arm|thumb)ebv7ve-unknown-none-eabihf - Flags: - - --target=armebv7-unknown-none-eabihf - -# Higher versions of the architecture such as v8-A and v9-A are a superset of -# v7-A. -- Match: --target=(arm|thumb)v(8|8\.[1-9]|9|9\.[1-9])a-unknown-none-eabi - Flags: - - --target=armv7-unknown-none-eabi -- Match: --target=(arm|thumb)ebv(8|8\.[1-9]|9|9\.[1-9])a-unknown-none-eabi - Flags: - - --target=armebv7-unknown-none-eabi -- Match: --target=(arm|thumb)v(8|8\.[1-9]|9|9\.[1-9])a-unknown-none-eabihf - Flags: - - --target=armv7-unknown-none-eabihf -- Match: --target=(arm|thumb)ebv(8|8\.[1-9]|9|9\.[1-9])a-unknown-none-eabihf - Flags: - - --target=armebv7-unknown-none-eabihf -- Match: --target=(arm|thumb)v(8|8\.[1-9]|9|9\.[1-9])r-unknown-none-eabi - Flags: - - --target=armv7r-unknown-none-eabi -- Match: --target=(arm|thumb)v(8|8\.[1-9]|9|9\.[1-9])r-unknown-none-eabihf - Flags: - - --target=armv7r-unknown-none-eabihf - -# -march extensions -- Match: -march=thumbv8\.[1-9]m\.main(\+[^\+]+)*\+fp16(\+[^\+]+)* - Flags: - - -march=thumbv8.1m.main+fp16 -- Match: -march=thumbv8\.[1-9]m\.main(\+[^\+]+)*\+mve(\+[^\+]+)* - Flags: - - -march=thumbv8.1m.main+mve -- Match: -march=thumbv8\.[1-9]m\.main(\+[^\+]+)*\+mve\.fp(\+[^\+]+)* - Flags: - - -march=thumbv8.1m.main+mve.fp -- Match: -march=thumbv8\.[1-9]m\.main(\+[^\+]+)*\+lob(\+[^\+]+)* - Flags: - - -march=thumbv8.1m.main+lob - -# -mbranch-protection options -- Match: -mbranch-protection=(standard|pac-ret(\+leaf)?(\+bti)?|bti(\+pac-ret(\+leaf)?)?) - Flags: - - -mbranch-protection=pac-ret+bti - -- Match: -Oz - Flags: - - -Os diff --git a/qualcomm-software/embedded-runtimes/CMakeLists.txt b/qualcomm-software/embedded-runtimes/CMakeLists.txt deleted file mode 100644 index ea8053db996c..000000000000 --- a/qualcomm-software/embedded-runtimes/CMakeLists.txt +++ /dev/null @@ -1,770 +0,0 @@ -# -# Copyright (c) 2024-2025, Arm Limited and affiliates. -# -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# - -# CMake build for a library variant, combining a chosen C library -# with builtins from compiler-rt and libcx/libcxxabi/libunwind - -cmake_minimum_required(VERSION 3.20) - -project(embedded-runtimes) - -# Root directory of the CMake scrpts. -set(TOOLCHAIN_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/..) -# Root directory of the llvm-project source. -set(llvmproject_src_dir ${TOOLCHAIN_SOURCE_DIR}/..) - -# CMake arguments are loaded from the JSON file depending on which C -# library is used, so this must be set before the JSON is processed. -set(C_LIBRARY "picolibc" CACHE STRING "Which C library to use.") -set_property(CACHE C_LIBRARY PROPERTY STRINGS picolibc musl-embedded) - -set(VARIANT_JSON "" CACHE STRING "JSON file to load args from.") -if(VARIANT_JSON) - file(READ ${VARIANT_JSON} variant_json_read) - # Load arguments common to all libraries. - string(JSON json_args GET ${variant_json_read} "args" "common") - string(JSON json_args_len LENGTH ${json_args}) - math(EXPR json_args_len_dec "${json_args_len} - 1") - foreach(json_idx RANGE ${json_args_len_dec}) - string(JSON json_param MEMBER ${json_args} ${json_idx}) - string(JSON json_val GET ${json_args} ${json_param}) - string(JSON json_val_type TYPE ${json_args} ${json_param}) - set(${json_param}_def ${json_val}) - endforeach() - # Load arguments specific to the chosen library, overwriting any existing values. - string(JSON json_args GET ${variant_json_read} "args" ${C_LIBRARY}) - string(JSON json_args_len LENGTH ${json_args}) - math(EXPR json_args_len_dec "${json_args_len} - 1") - foreach(json_idx RANGE ${json_args_len_dec}) - string(JSON json_param MEMBER ${json_args} ${json_idx}) - string(JSON json_val GET ${json_args} ${json_param}) - string(JSON json_val_type TYPE ${json_args} ${json_param}) - set(${json_param}_def ${json_val}) - endforeach() -endif() - -# Default values will be populated by the json above. -# Any user specified options will override the default. -set(TARGET_ARCH ${TARGET_ARCH_def} CACHE STRING "Architecture being targetted.") -set(VARIANT ${VARIANT_def} CACHE STRING "Name for the variant, usually architecture + suffix.") -set(COMPILE_FLAGS ${COMPILE_FLAGS_def} CACHE STRING "Flags required to build the variant.") -set(TEST_EXECUTOR ${TEST_EXECUTOR_def} CACHE STRING "Program used to run tests.") -set_property(CACHE TEST_EXECUTOR PROPERTY STRINGS qemu) - -set(QEMU_MACHINE ${QEMU_MACHINE_def} CACHE STRING "Machine for QEMU to emulate.") -set(QEMU_CPU ${QEMU_CPU_def} CACHE STRING "CPU for QEMU to emulate.") -set(QEMU_PARAMS ${QEMU_PARAMS_def} CACHE STRING "Any additional parameters to pass to QEMU.") - -set(FLASH_ADDRESS ${FLASH_ADDRESS_def} CACHE STRING "") -set(FLASH_SIZE ${FLASH_SIZE_def} CACHE STRING "") -set(RAM_ADDRESS ${RAM_ADDRESS_def} CACHE STRING "") -set(RAM_SIZE ${RAM_SIZE_def} CACHE STRING "") - -set(ENABLE_EXCEPTIONS ${ENABLE_EXCEPTIONS_def} CACHE BOOL "Enable C++ exceptions.") -set(ENABLE_RTTI ${ENABLE_RTTI_def} CACHE BOOL "Enable C++ exceptions.") - -set(SUPPORTED_LIBRARY_BUILD_TYPES minsizerelease release releasewithdebuginfo) -if(NOT LIBRARY_BUILD_TYPE_def IN_LIST SUPPORTED_LIBRARY_BUILD_TYPES) - string(REPLACE ";" ", " SUPPORTED_LIBRARY_BUILD_TYPES_STR "${SUPPORTED_LIBRARY_BUILD_TYPES}") - message(FATAL_ERROR "Unsupported LIBRARY_BUILD_TYPE value. Supported values are: ${SUPPORTED_LIBRARY_BUILD_TYPES_STR}") -endif() -set(LIBRARY_BUILD_TYPE ${LIBRARY_BUILD_TYPE_def} CACHE STRING "Build type to use in library builds.") -set_property(CACHE LIBRARY_BUILD_TYPE PROPERTY STRINGS ${SUPPORTED_LIBRARY_BUILD_TYPES}) - -set(ENABLE_CXX_LIBS ${ENABLE_CXX_LIBS_def} CACHE BOOL "Build CXX libs") -set(ENABLE_LIBC_TESTS ${ENABLE_LIBC_TESTS_def} CACHE BOOL "Enable libc tests (picolibc only).") -set(ENABLE_COMPILER_RT_TESTS ${ENABLE_COMPILER_RT_TESTS_def} CACHE BOOL "Enable compiler-rt tests.") -set(ENABLE_LIBCXX_TESTS ${ENABLE_LIBCXX_TESTS_def} CACHE BOOL "Enable libcxx tests.") -set(LLVM_BINARY_DIR "" CACHE PATH "Path to LLVM toolchain root to build libraries with") - -# Set up a few arbitrary pass-through flags for musl-embedded. This probably -# isn't the best idea, but historically, our flag usage is such that it is a -# bit unclear how to do this in a more princicpled way. This should be -# revisited and probably cleaned up, but for now just mirror what we used to do. -set(EXTRA_MUSL-EMBEDDED_CONFIG_FLAGS ${EXTRA_MUSL-EMBEDDED_CONFIG_FLAGS_def} CACHE BOOL "Extra configure options for musl-embedded.") -set(EXTRA_MUSL-EMBEDDED_CFLAGS ${EXTRA_MUSL-EMBEDDED_CFLAGS_def} CACHE BOOL "Extra flags to be used when compiling musl-embedded.") - -set(PROJECT_PREFIX "subproj" CACHE STRING "Directory to build subprojects in.") -set(VARIANT_BUILD_ID "${VARIANT}" CACHE STRING "Name to use to identify build directories." ) - -# A '.' character is used in junit xml to split classes/groups. -# Variants such as armv8m.main need to be renamed. -string(REPLACE "." "_" variant_xml_name ${VARIANT}) - -# Temporary location to collect the libraries as they are built. -set(TEMP_LIB_DIR "${CMAKE_CURRENT_BINARY_DIR}/tmp_install") - -find_package(Python3 REQUIRED COMPONENTS Interpreter) - -include(ExternalProject) - -# If a compiler launcher such as ccache has been set, it should be -# passed down to each subproject build. -set(compiler_launcher_cmake_args "") -if(CMAKE_C_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_C_COMPILER_LAUNCHER=${CMAKE_C_COMPILER_LAUNCHER}") -endif() -if(CMAKE_CXX_COMPILER_LAUNCHER) - list(APPEND compiler_launcher_cmake_args "-DCMAKE_CXX_COMPILER_LAUNCHER=${CMAKE_CXX_COMPILER_LAUNCHER}") -endif() - -if(TARGET_ARCH MATCHES "^aarch64") - set(target_triple "aarch64-none-elf") - set(cpu_family aarch64) -elseif(TARGET_ARCH MATCHES "^riscv32") - set(target_triple "riscv32-unknown-elf") - set(cpu_family riscv32) -elseif(TARGET_ARCH MATCHES "^riscv64") - set(target_triple "riscv64-unknown-elf") - set(cpu_family riscv64) -else() - # Choose the target triple so that compiler-rt will do the - # right thing. We can't always put the exact target - # architecture in the triple, because compiler-rt's cmake - # system doesn't recognize every possible Arm architecture - # version. So mostly we just say 'arm' and control the arch - # version via -march=armv7m (or whatever). - # Exceptions are architectures pre-armv7, which compiler-rt expects to - # see in the triple because that's where it looks to decide whether to - # use specific assembly sources. - if(TARGET_ARCH MATCHES "^armv[4-6]") - set(target_triple "${TARGET_ARCH}-none-eabi") - else() - set(target_triple "arm-none-eabi") - endif() - if(COMPILE_FLAGS MATCHES "-mfloat-abi=hard") - # Also, compiler-rt looks in the ABI component of the - # triple to decide whether to use the hard float ABI. - set(target_triple "${target_triple}hf") - endif() - set(cpu_family arm) -endif() - -# Create a single target for all testing. If no testing is enabled, this -# will simply do nothing. -add_custom_target(check-all) - -# If any testing is enabled, prepare test executor settings. -if(ENABLE_LIBC_TESTS OR ENABLE_COMPILER_RT_TESTS OR ENABLE_LIBCXX_TESTS) - # Flags required to link tests. - if(C_LIBRARY STREQUAL picolibc) - if(TEST_EXECUTOR STREQUAL qemu) - set(picocrt "crt0-semihost") - endif() - set(test_link_flags "-nostartfiles -l${picocrt} -lsemihost -T picolibcpp.ld") - else() - message(FATAL_ERROR "Tests can only be enabled using picolibc.") - endif() - - set(external_lit_path "${LLVM_BINARY_DIR}/bin/llvm-lit") - set(default_lit_args "-sv --show-xfail --show-unsupported --show-excluded") - if (MSVC OR XCODE) - set(default_lit_args "${default_lit_args} --no-progress-bar") - endif() - set(LLVM_LIT_ARGS "${default_lit_args}" CACHE STRING "Default options for lit") - - if(TEST_EXECUTOR STREQUAL qemu) - if(TARGET_ARCH MATCHES "^aarch64") - find_program(QEMU_EXECUTABLE qemu-system-aarch64 REQUIRED) - elseif(TARGET_ARCH MATCHES "^riscv32") - find_program(QEMU_EXECUTABLE qemu-system-riscv32 REQUIRED) - elseif(TARGET_ARCH MATCHES "^riscv64") - find_program(QEMU_EXECUTABLE qemu-system-riscv64 REQUIRED) - else() - find_program(QEMU_EXECUTABLE qemu-system-arm REQUIRED) - endif() - - # Use colon as a separator because comma and semicolon are used for - # other purposes in CMake. - string(REPLACE " " ":" qemu_params_list "${QEMU_PARAMS}") - - set(test_executor_params --qemu-command ${QEMU_EXECUTABLE} --qemu-machine ${QEMU_MACHINE}) - if(QEMU_CPU) - list(APPEND test_executor_params --qemu-cpu ${QEMU_CPU}) - endif() - if(qemu_params_list) - list(APPEND test_executor_params "--qemu-params=${qemu_params_list}") - endif() - set( - lit_test_executor - ${CMAKE_CURRENT_SOURCE_DIR}/test-support/lit-exec-qemu.py - ${test_executor_params} - ) - endif() - list(JOIN lit_test_executor " " lit_test_executor) -endif() - -set(compile_arch_flags "--target=${target_triple} ${COMPILE_FLAGS} --sysroot ${TEMP_LIB_DIR}") -# Compiling the libraries benefits from some extra optimization -# flags, and requires a sysroot. -set(lib_compile_flags "${compile_arch_flags} -ffunction-sections -fdata-sections") - -# Generic target names for the C library. -# Declare these now, since compiler-rt requires the 'install' dependency. -add_custom_target(clib-configure) -add_custom_target(clib-build) -add_custom_target(clib-install) - -# At this point it's been asserted that LIBRARY_BUILD_TYPE value is valid. -if(LIBRARY_BUILD_TYPE STREQUAL minsizerelease) - set(LIBRARY_CMAKE_BUILD_TYPE MinSizeRel) - set(LIBRARY_MESON_BUILD_TYPE minsize) -elseif(LIBRARY_BUILD_TYPE STREQUAL release) - set(LIBRARY_CMAKE_BUILD_TYPE Release) - set(LIBRARY_MESON_BUILD_TYPE release) -elseif(LIBRARY_BUILD_TYPE STREQUAL releasewithdebuginfo) - set(LIBRARY_CMAKE_BUILD_TYPE RelWithDebInfo) - set(LIBRARY_MESON_BUILD_TYPE debugoptimized) -endif() - -############################################################################### -# compiler-rt -############################################################################### - -# We can't always put the exact target -# architecture in the triple, because compiler-rt's cmake -# system doesn't recognize every possible Arm architecture -# version. So mostly we just say 'arm' and control the arch -# version via -march=armv7m (or whatever). -# Exceptions are architectures pre-armv7, which compiler-rt expects to -# see in the triple because that's where it looks to decide whether to -# use specific assembly sources. -if(NOT target_triple MATCHES "^(aarch64-none-elf|arm-none-eabi|armv[4-6]|riscv32-unknown-elf|riscv64-unknown-elf)") - message(FATAL_ERROR "\ -Target triple name \"${target_triple}\" not compatible with compiler-rt. -Use -march to specify the architecture.") -endif() -# Also, compiler-rt looks in the ABI component of the -# triple to decide whether to use the hard float ABI. -if(flags MATCHES "-mfloat-abi=hard" AND NOT target_triple MATCHES "-eabihf$") - message(FATAL_ERROR "\ -Hard-float library with target triple \"${target_triple}\" must end \"-eabihf\"") -endif() -if(target_triple MATCHES "^riscv") - string(REPLACE "-unknown-" "-unknown-unknown-" normalized_target_triple ${target_triple}) -else() - string(REPLACE "-none-" "-unknown-none-" normalized_target_triple ${target_triple}) -endif() - -if(ENABLE_COMPILER_RT_TESTS) - # Generate xfails for compiler-rt. - set(compiler_rt_lit_xfails_path "${CMAKE_CURRENT_BINARY_DIR}/${VARIANT}_compiler-rt_lit_xfails.txt") - set(compiler_rt_lit_args "--path=${LLVM_BINARY_DIR}/bin @${compiler_rt_lit_xfails_path} ${LLVM_LIT_ARGS}") - add_custom_target( - compiler-rt-xfail-lit-args - COMMAND ${Python3_EXECUTABLE} - ${CMAKE_CURRENT_SOURCE_DIR}/test-support/xfails.py - --variant ${VARIANT} - --project compiler-rt - --libc ${C_LIBRARY} - --output-args ${compiler_rt_lit_xfails_path} - ) - - set(compiler_rt_test_flags "${lib_compile_flags} ${test_link_flags}") - set( - compiler_rt_test_cmake_args - -DCOMPILER_RT_INCLUDE_TESTS=ON - -DCOMPILER_RT_EMULATOR=${lit_test_executor} - -DCOMPILER_RT_TEST_COMPILER=${LLVM_BINARY_DIR}/bin/clang - -DCOMPILER_RT_TEST_COMPILER_CFLAGS=${compiler_rt_test_flags} - -DLLVM_INCLUDE_TESTS=ON - -DLLVM_LIT_ARGS=${compiler_rt_lit_args} - -DLLVM_EXTERNAL_LIT=${external_lit_path} - ) -endif() - -ExternalProject_Add( - compiler_rt - STAMP_DIR ${PROJECT_PREFIX}/compiler_rt/${VARIANT_BUILD_ID}/stamp - BINARY_DIR ${PROJECT_PREFIX}/compiler_rt/${VARIANT_BUILD_ID}/build - DOWNLOAD_DIR ${PROJECT_PREFIX}/compiler_rt/${VARIANT_BUILD_ID}/dl - TMP_DIR ${PROJECT_PREFIX}/compiler_rt/${VARIANT_BUILD_ID}/tmp - SOURCE_DIR ${llvmproject_src_dir}/runtimes - INSTALL_DIR compiler-rt/install - CMAKE_ARGS - ${compiler_launcher_cmake_args} - -DCMAKE_AR=${LLVM_BINARY_DIR}/bin/llvm-ar${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_ASM_COMPILER_TARGET=${target_triple} - -DCMAKE_ASM_FLAGS=${lib_compile_flags} - -DCMAKE_BUILD_TYPE=${LIBRARY_CMAKE_BUILD_TYPE} - -DCMAKE_CXX_COMPILER=${LLVM_BINARY_DIR}/bin/clang++${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_CXX_COMPILER_TARGET=${target_triple} - -DCMAKE_CXX_FLAGS=${lib_compile_flags} - -DCMAKE_C_COMPILER=${LLVM_BINARY_DIR}/bin/clang${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_C_COMPILER_TARGET=${target_triple} - -DCMAKE_C_FLAGS=${lib_compile_flags} - -DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE} - -DCMAKE_INSTALL_PREFIX= - -DCMAKE_NM=${LLVM_BINARY_DIR}/bin/llvm-nm${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_RANLIB=${LLVM_BINARY_DIR}/bin/llvm-ranlib${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_SYSTEM_NAME=Generic - -DCMAKE_TRY_COMPILE_TARGET_TYPE=STATIC_LIBRARY - -DCOMPILER_RT_BAREMETAL_BUILD=ON - -DCOMPILER_RT_BUILD_LIBFUZZER=OFF - -DCOMPILER_RT_BUILD_PROFILE=OFF - -DCOMPILER_RT_BUILD_SANITIZERS=OFF - -DCOMPILER_RT_BUILD_XRAY=OFF - -DCOMPILER_RT_DEFAULT_TARGET_ONLY=ON - -DLLVM_ENABLE_RUNTIMES=compiler-rt - -DRUNTIME_VARIANT_NAME=${VARIANT} - -DLLVM_ENABLE_PER_TARGET_RUNTIME_DIR=ON - ${compiler_rt_test_cmake_args} - STEP_TARGETS configure build install - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - USES_TERMINAL_INSTALL TRUE - LIST_SEPARATOR , - CONFIGURE_HANDLED_BY_BUILD TRUE - INSTALL_COMMAND ${CMAKE_COMMAND} --install . - # Copy compiler-rt lib directory, moving libraries out of their - # target-specific subdirectory. - COMMAND - ${CMAKE_COMMAND} - -E copy_directory - /lib/${normalized_target_triple} - "${TEMP_LIB_DIR}/lib" -) - -add_custom_target(check-compiler-rt) -add_dependencies(check-all check-compiler-rt) -if(ENABLE_COMPILER_RT_TESTS) - ExternalProject_Add_Step( - compiler_rt - check-compiler-rt - COMMAND "${CMAKE_COMMAND}" --build --target check-compiler-rt - COMMAND ${Python3_EXECUTABLE} - ${CMAKE_CURRENT_SOURCE_DIR}/test-support/modify-compiler-rt-xml.py - --dir - --variant ${variant_xml_name} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - ) - ExternalProject_Add_StepTargets(compiler_rt check-compiler-rt) - ExternalProject_Add_StepDependencies( - compiler_rt - check-compiler-rt - compiler_rt-build - clib-install - compiler-rt-xfail-lit-args - ) - add_dependencies(check-compiler-rt compiler_rt-check-compiler-rt) -endif() - -############################################################################### -# picolibc -############################################################################### - -if(C_LIBRARY STREQUAL picolibc) - include(${TOOLCHAIN_SOURCE_DIR}/cmake/fetch_picolibc.cmake) - include(${CMAKE_CURRENT_SOURCE_DIR}/to_meson_list.cmake) - - # For building picolibc use Meson. - # Although picolibc has support for building with CMake, the Meson code - # is more mature and works better with LLVM. - find_program(MESON_EXECUTABLE meson REQUIRED) - - if(CMAKE_INSTALL_MESSAGE STREQUAL NEVER) - set(MESON_INSTALL_QUIET "--quiet") - endif() - - if(LIBRARY_BUILD_TYPE MATCHES "minsizerelease") - # In the past we've built our size-optimized libraries at -Os but Meson - # will set -Oz when the `minsize` buildtype is used. Override that. - set(picolibc_opt_flags "-Os") - endif() - - # Set meson_c_args to a comma-separated list of the clang path - # and flags e.g. 'path/to/clang', '--target=armv6m-none-eabi', - # '-march=armv6m' - set(picolibc_flags "${LLVM_BINARY_DIR}/bin/clang${CMAKE_EXECUTABLE_SUFFIX} ${lib_compile_flags}") - set(picolibcpp_flags "${LLVM_BINARY_DIR}/bin/clang++${CMAKE_EXECUTABLE_SUFFIX} ${lib_compile_flags}") - if(CMAKE_C_COMPILER_LAUNCHER) - set(picolibc_flags "${CMAKE_C_COMPILER_LAUNCHER} ${picolibc_flags}") - endif() - separate_arguments(picolibc_flags) - separate_arguments(picolibcpp_flags) - to_meson_list("${picolibc_flags}" picolibc_meson_flags) - to_meson_list("${picolibcpp_flags}" picolibcpp_meson_flags) - - if(ENABLE_LIBC_TESTS) - set(picolibc_test_executor_bin ${CMAKE_CURRENT_SOURCE_DIR}/test-support/picolibc-test-wrapper.py) - to_meson_list("${test_executor_params}" meson_test_executor_params) - endif() - - configure_file(${CMAKE_CURRENT_SOURCE_DIR}/meson-cross-build.txt.in ${CMAKE_CURRENT_BINARY_DIR}/meson-cross-build.txt @ONLY) - - ExternalProject_Add( - picolibc - STAMP_DIR ${PROJECT_PREFIX}/picolibc/${VARIANT_BUILD_ID}/stamp - BINARY_DIR ${PROJECT_PREFIX}/picolibc/${VARIANT_BUILD_ID}/build - DOWNLOAD_DIR ${PROJECT_PREFIX}/picolibc/${VARIANT_BUILD_ID}/dl - TMP_DIR ${PROJECT_PREFIX}/picolibc/${VARIANT_BUILD_ID}/tmp - SOURCE_DIR ${picolibc_SOURCE_DIR} - INSTALL_DIR ${TEMP_LIB_DIR} - DEPENDS compiler_rt-install - CONFIGURE_COMMAND - ${MESON_EXECUTABLE} - setup - -Dincludedir=include - -Dlibdir=lib - -Dspecsdir=none - -Dmultilib=false - -Ddebug=false - # Pass opt flags through c/cpp_args rather than in the cross file as - # we need them to override what Meson will set automatically. - -Dc_args=${picolibc_opt_flags} - -Dcpp_args=${picolibc_opt_flags} - -Dtests-enable-stack-protector=false - -Dtest-machine=${TEST_EXECUTOR} - -Dtests=false - --prefix - --cross-file ${CMAKE_CURRENT_BINARY_DIR}/meson-cross-build.txt - --buildtype=${LIBRARY_MESON_BUILD_TYPE} - - BUILD_COMMAND ${MESON_EXECUTABLE} compile - INSTALL_COMMAND ${MESON_EXECUTABLE} install ${MESON_INSTALL_QUIET} - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - USES_TERMINAL_INSTALL TRUE - LIST_SEPARATOR , - CONFIGURE_HANDLED_BY_BUILD TRUE - TEST_EXCLUDE_FROM_MAIN TRUE - STEP_TARGETS configure build install - ) - - add_custom_target(check-picolibc) - add_dependencies(check-all check-picolibc) - if(ENABLE_LIBC_TESTS) - # meson builds the tests at the same time as the library. - # So reconfigure to enable tests at a later point. - ExternalProject_Add_Step( - picolibc - compile-tests - COMMAND ${MESON_EXECUTABLE} setup -Dtests=true --reconfigure - COMMAND ${MESON_EXECUTABLE} compile -C - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ) - ExternalProject_Add_StepTargets(picolibc compile-tests) - ExternalProject_Add_StepDependencies( - picolibc - compile-tests - picolibc-build - compiler_rt-install - ) - - # Step target to run the picolibc test via meson. - ExternalProject_Add_Step( - picolibc - check-meson - COMMAND ${MESON_EXECUTABLE} test -C --no-rebuild - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - DEPENDEES compile-tests - ) - ExternalProject_Add_StepTargets(picolibc check-meson) - - # Generate lit wrappers for the tests. - set(picolibc_lit_dir ${CMAKE_CURRENT_BINARY_DIR}/picolibc_lit) - ExternalProject_Add_Step( - picolibc - gen-lit-tests - COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test-support/gen-picolibc-lit-tests.py - --meson ${MESON_EXECUTABLE} - --build - --output ${picolibc_lit_dir} - --name picolibc-${variant_xml_name} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - DEPENDEES compile-tests - ) - ExternalProject_Add_StepTargets(picolibc gen-lit-tests) - - # Generate xfails for picolibc. - set(picolibc_lit_xfails_path "${picolibc_lit_dir}/${VARIANT}_picolibc_lit_xfails.txt") - add_custom_target( - picolibc-xfail-lit-args - COMMAND ${Python3_EXECUTABLE} - ${CMAKE_CURRENT_SOURCE_DIR}/test-support/xfails.py - --variant ${VARIANT} - --project picolibc - --libc ${C_LIBRARY} - --output-args ${picolibc_lit_xfails_path} - ) - - set(check_lit_cmd "${Python3_EXECUTABLE} ${external_lit_path} ${picolibc_lit_dir} ${LLVM_LIT_ARGS} @${picolibc_lit_xfails_path}") - # LLVM_LIT_ARGS has space separated flags, but the - # step COMMAND needs a list of arguments. - separate_arguments(check_lit_cmd) - - # Step target to run the picolibc test via lit. - ExternalProject_Add_Step( - picolibc - check-lit - COMMAND ${check_lit_cmd} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - DEPENDEES gen-lit-tests - ) - ExternalProject_Add_StepTargets(picolibc check-lit) - ExternalProject_Add_StepDependencies( - picolibc - check-lit - picolibc-xfail-lit-args - ) - add_dependencies(check-picolibc picolibc-check-lit) - add_dependencies(clib-build ${C_LIBRARY}-compile-tests) - endif() - -endif() - -############################################################################### -# musl-embedded -############################################################################### -if(C_LIBRARY STREQUAL musl-embedded) - if(ENABLE_LIBC_TESTS) - message(FATAL_ERROR "Tests cannot be enabled using musl-embedded.") - endif() - - if(WIN32) - message(FATAL_ERROR "Building musl-embedded is not supported on Windows hosts") - endif() - - if(TARGET_ARCH MATCHES "^riscv") - message(FATAL_ERROR "Only Arm and AArch64 targets are supported for musl-embedded") - endif() - - include(${TOOLCHAIN_SOURCE_DIR}/cmake/fetch_musl-embedded.cmake) - - include(ProcessorCount) - set(make_flags) - ProcessorCount(nproc) - if(NOT nproc EQUAL 0) - set(make_flags -j${nproc}) - endif() - - if(LIBRARY_BUILD_TYPE STREQUAL minsizerelease) - set(musl-embedded_opt_flags "-Os") - elseif((LIBRARY_BUILD_TYPE STREQUAL release) OR (LIBRARY_BUILD_TYPE STREQUAL releasewithdebuginfo)) - set(musl-embedded_opt_flags "-O3") - endif() - - # FIXME: if we need the extra `uselocks`/`standalone` variants, we - # can add a passthrough here and make them separate variants. - set(musl_embedded_flags - --disable-wrapper - --quic-arm-baremetal - --disable-visibility - ${EXTRA_MUSL-EMBEDDED_CONFIG_FLAGS} - "CROSS_COMPILE=${LLVM_BINARY_DIR}/bin/llvm-" - "CC=${LLVM_BINARY_DIR}/bin/clang --target=${target_triple} -fuse-ld=eld" - "CFLAGS=${lib_compile_flags} ${musl-embedded_opt_flags} -fPIC -fvisibility=hidden -DVISIBILITY_HIDDEN -fno-rounding-math ${EXTRA_MUSL-EMBEDDED_CFLAGS}" - "LIBCC=${TEMP_LIB_DIR}/libclang_rt.builtins.a") - - ExternalProject_Add( - ${C_LIBRARY} - STAMP_DIR ${PROJECT_PREFIX}/${C_LIBRARY}/${VARIANT_BUILD_ID}/stamp - BINARY_DIR ${PROJECT_PREFIX}/${C_LIBRARY}/${VARIANT_BUILD_ID}/build - DOWNLOAD_DIR ${PROJECT_PREFIX}/${C_LIBRARY}/${VARIANT_BUILD_ID}/dl - TMP_DIR ${PROJECT_PREFIX}/${C_LIBRARY}/${VARIANT_BUILD_ID}/tmp - SOURCE_DIR ${musl-embedded_SOURCE_DIR} - INSTALL_DIR ${TEMP_LIB_DIR} - DEPENDS compiler_rt-install - CONFIGURE_COMMAND - /configure - --prefix=${TEMP_LIB_DIR} - ${musl_embedded_flags} - BUILD_COMMAND - make ${make_flags} - INSTALL_COMMAND - make install - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - USES_TERMINAL_INSTALL TRUE - # Always run the build command so that incremental builds are correct. - CONFIGURE_HANDLED_BY_BUILD TRUE - TEST_EXCLUDE_FROM_MAIN TRUE - STEP_TARGETS configure build install - ) -endif() - - -add_dependencies(clib-configure ${C_LIBRARY}-configure) -add_dependencies(clib-build ${C_LIBRARY}-build) -add_dependencies(clib-install ${C_LIBRARY}-install) - -############################################################################### -# runtimes (libcxx, libcxxabi, libunwind) -############################################################################### - -if(ENABLE_CXX_LIBS) - if(C_LIBRARY STREQUAL picolibc) - set(cxxlibs_extra_cmake_options - -DLIBCXXABI_ENABLE_THREADS=OFF - -DLIBCXX_ENABLE_MONOTONIC_CLOCK=OFF - -DLIBCXX_ENABLE_RANDOM_DEVICE=OFF - -DLIBCXX_ENABLE_THREADS=OFF - -DLIBCXX_ENABLE_WIDE_CHARACTERS=OFF - -DLIBUNWIND_ENABLE_THREADS=OFF - -DLIBCXXABI_ENABLE_EXCEPTIONS=${ENABLE_EXCEPTIONS} - -DLIBCXXABI_ENABLE_STATIC_UNWINDER=${ENABLE_EXCEPTIONS} - -DLIBCXX_ENABLE_EXCEPTIONS=${ENABLE_EXCEPTIONS} - -DLIBCXX_ENABLE_RTTI=${ENABLE_RTTI} - -DRUNTIMES_USE_LIBC=picolibc - ) - if(ENABLE_LIBCXX_TESTS) - # Generate xfails for libxx. - set(libcxx_lit_xfails_path "${CMAKE_CURRENT_BINARY_DIR}/${VARIANT}_libcxx_lit_xfails.txt") - set(libcxx_lit_args "@${libcxx_lit_xfails_path} ${LLVM_LIT_ARGS}" - ) - add_custom_target( - libcxx-xfail-lit-args - COMMAND ${Python3_EXECUTABLE} - ${CMAKE_CURRENT_SOURCE_DIR}/test-support/xfails.py - --variant ${VARIANT} - --project libcxx - --libc ${C_LIBRARY} - --output-arg ${libcxx_lit_xfails_path} - ) - set(cxxlibs_test_cmake_options - -DLIBCXX_TEST_CONFIG=${CMAKE_CURRENT_SOURCE_DIR}/test-support/llvm-libc++-picolibc.cfg.in - -DLIBCXX_TEST_PARAMS=executor=${lit_test_executor} - -DLIBCXXABI_TEST_CONFIG=${CMAKE_CURRENT_SOURCE_DIR}/test-support/llvm-libc++abi-picolibc.cfg.in - -DLIBCXXABI_TEST_PARAMS=executor=${lit_test_executor} - -DLIBUNWIND_TEST_CONFIG=${CMAKE_CURRENT_SOURCE_DIR}/test-support/llvm-libunwind-picolibc.cfg.in - -DLIBUNWIND_TEST_PARAMS=executor=${lit_test_executor} - -DRUNTIME_TEST_ARCH_FLAGS=${compile_arch_flags} - -DRUNTIME_TEST_LINK_FLAGS=${test_link_flags} - -DLLVM_LIT_ARGS=${libcxx_lit_args} - -DLLVM_EXTERNAL_LIT=${external_lit_path} - ) - endif() - elseif(C_LIBRARY STREQUAL musl-embedded) - set(cxxlibs_extra_cmake_options - -DLIBCXX_HAS_MUSL_LIBC=ON - -DLIBCXXABI_ENABLE_THREADS=OFF - -DLIBCXX_ENABLE_RANDOM_DEVICE=OFF - -DLIBCXX_ENABLE_THREADS=OFF - -DLIBCXX_ENABLE_FILESYSTEM=OFF - -DLIBCXX_ENABLE_LOCALIZATION=OFF - -DLIBUNWIND_ENABLE_THREADS=OFF - -DLIBCXXABI_ENABLE_EXCEPTIONS=${ENABLE_EXCEPTIONS} - -DLIBCXXABI_ENABLE_STATIC_UNWINDER=${ENABLE_EXCEPTIONS} - -DLIBCXX_ENABLE_EXCEPTIONS=${ENABLE_EXCEPTIONS} - -DLIBCXX_ENABLE_RTTI=${ENABLE_RTTI} - ) - set(lib_compile_flags "${lib_compile_flags} -fno-unroll-loops -fno-optimize-sibling-calls -D_GNU_SOURCE") - endif() - - ExternalProject_Add( - cxxlibs - STAMP_DIR ${PROJECT_PREFIX}/cxxlibs/${VARIANT_BUILD_ID}/stamp - BINARY_DIR ${PROJECT_PREFIX}/cxxlibs/${VARIANT_BUILD_ID}/build - DOWNLOAD_DIR ${PROJECT_PREFIX}/cxxlibs/${VARIANT_BUILD_ID}/dl - TMP_DIR ${PROJECT_PREFIX}/cxxlibs/${VARIANT_BUILD_ID}/tmp - SOURCE_DIR ${llvmproject_src_dir}/runtimes - INSTALL_DIR ${TEMP_LIB_DIR} - DEPENDS compiler_rt-install clib-install - CMAKE_ARGS - ${compiler_launcher_cmake_args} - -DCMAKE_AR=${LLVM_BINARY_DIR}/bin/llvm-ar${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_ASM_FLAGS=${lib_compile_flags} - -DCMAKE_BUILD_TYPE=${LIBRARY_CMAKE_BUILD_TYPE} - -DCMAKE_CXX_COMPILER=${LLVM_BINARY_DIR}/bin/clang++${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_CXX_COMPILER_TARGET=${target_triple} - -DCMAKE_CXX_FLAGS=${lib_compile_flags} - -DCMAKE_C_COMPILER=${LLVM_BINARY_DIR}/bin/clang${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_C_COMPILER_TARGET=${target_triple} - -DCMAKE_C_FLAGS=${lib_compile_flags} - -DCMAKE_INSTALL_MESSAGE=${CMAKE_INSTALL_MESSAGE} - -DCMAKE_INSTALL_PREFIX=${TEMP_LIB_DIR} - -DCMAKE_NM=${LLVM_BINARY_DIR}/bin/llvm-nm${CMAKE_EXECUTABLE_SUFFIX} - -DCMAKE_RANLIB=${LLVM_BINARY_DIR}/bin/llvm-ranlib${CMAKE_EXECUTABLE_SUFFIX} - # Let CMake know we're cross-compiling - -DCMAKE_SYSTEM_NAME=Generic - -DCMAKE_TRY_COMPILE_TARGET_TYPE=STATIC_LIBRARY - -DLIBCXXABI_BAREMETAL=ON - -DLIBCXXABI_ENABLE_ASSERTIONS=OFF - -DLIBCXXABI_ENABLE_SHARED=OFF - -DLIBCXXABI_ENABLE_STATIC=ON - -DLIBCXXABI_USE_COMPILER_RT=ON - -DLIBCXXABI_USE_LLVM_UNWINDER=ON - -DLIBCXXABI_SHARED_OUTPUT_NAME="c++abi-shared" - -DLIBCXX_CXX_ABI=libcxxabi - -DLIBCXX_ENABLE_FILESYSTEM=OFF - -DLIBCXX_ENABLE_SHARED=OFF - -DLIBCXX_ENABLE_STATIC=ON - -DLIBCXX_INCLUDE_BENCHMARKS=OFF - -DLIBCXX_SHARED_OUTPUT_NAME="c++-shared" - -DLIBUNWIND_ENABLE_ASSERTIONS=OFF - -DLIBUNWIND_ENABLE_SHARED=OFF - -DLIBUNWIND_ENABLE_STATIC=ON - -DLIBUNWIND_IS_BAREMETAL=ON - -DLIBUNWIND_REMEMBER_HEAP_ALLOC=ON - -DLIBUNWIND_USE_COMPILER_RT=ON - -DLIBUNWIND_SHARED_OUTPUT_NAME="unwind-shared" - -DLLVM_LIT_ARGS=${LLVM_LIT_ARGS} - -DLLVM_ENABLE_RUNTIMES=libcxxabi,libcxx,libunwind - -DRUNTIME_VARIANT_NAME=${VARIANT} - ${cxxlibs_extra_cmake_options} - ${cxxlibs_test_cmake_options} - STEP_TARGETS configure build install - USES_TERMINAL_CONFIGURE TRUE - USES_TERMINAL_BUILD TRUE - USES_TERMINAL_INSTALL TRUE - LIST_SEPARATOR , - CONFIGURE_HANDLED_BY_BUILD TRUE - ) - add_custom_target(check-cxx) - add_dependencies(check-all check-cxx) - add_custom_target(check-cxxabi) - add_dependencies(check-all check-cxxabi) - add_custom_target(check-unwind) - add_dependencies(check-all check-unwind) - if(ENABLE_LIBCXX_TESTS) - foreach(check_target check-cxx check-cxxabi check-unwind) - ExternalProject_Add_Step( - cxxlibs - ${check_target} - COMMAND "${CMAKE_COMMAND}" --build --target ${check_target} - USES_TERMINAL TRUE - EXCLUDE_FROM_MAIN TRUE - ALWAYS TRUE - ) - ExternalProject_Add_StepTargets(cxxlibs ${check_target}) - ExternalProject_Add_StepDependencies( - cxxlibs - ${check_target} - cxxlibs-install - libcxx-xfail-lit-args - ) - add_dependencies(${check_target} cxxlibs-${check_target}) - endforeach() - endif() - -else() # if not ENABLE_CXX_LIBS - - # The parent embedded-multilib cmake script will still want to invoke - # build targets like 'cxxlibs-configure', whether we actually have - # C++ libraries or not. So we should define them, even if they - # don't do anything. - add_custom_target(cxxlibs-configure) - add_custom_target(cxxlibs-build) -endif() - -install( - DIRECTORY ${TEMP_LIB_DIR}/ - DESTINATION . -) diff --git a/qualcomm-software/embedded-runtimes/meson-cross-build.txt.in b/qualcomm-software/embedded-runtimes/meson-cross-build.txt.in deleted file mode 100644 index bd7397f23b43..000000000000 --- a/qualcomm-software/embedded-runtimes/meson-cross-build.txt.in +++ /dev/null @@ -1,31 +0,0 @@ -[binaries] -c = [@picolibc_meson_flags@, '-nostdlib'] -cpp = [@picolibcpp_meson_flags@, '-nostdlib'] -c_ld = 'eld' -cpp_ld = 'eld' -ar = '@LLVM_BINARY_DIR@/bin/llvm-ar@CMAKE_EXECUTABLE_SUFFIX@' -strip = '@LLVM_BINARY_DIR@/bin/llvm-strip@CMAKE_EXECUTABLE_SUFFIX@' -# only needed to run tests -# setting stdin to /dev/null prevents qemu from fiddling with the echo bit of -# the parent terminal -exe_wrapper = [ - 'sh', - '-c', - 'test -z "$PICOLIBC_TEST" || @Python3_EXECUTABLE@ @picolibc_test_executor_bin@ "$@" < /dev/null', - '@picolibc_test_executor_bin@', - @meson_test_executor_params@] - -[host_machine] -system = 'none' -cpu_family = '@cpu_family@' -cpu = '@cpu_family@' -endian = 'little' - -[properties] -libgcc ='-lclang_rt.builtins' -skip_sanity_check = true -has_link_defsym = true -default_flash_addr = '@FLASH_ADDRESS@' -default_flash_size = '@FLASH_SIZE@' -default_ram_addr = '@RAM_ADDRESS@' -default_ram_size = '@RAM_SIZE@' diff --git a/qualcomm-software/embedded-runtimes/test-support/gen-picolibc-lit-tests.py b/qualcomm-software/embedded-runtimes/test-support/gen-picolibc-lit-tests.py deleted file mode 100644 index 3ac4b14bd312..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/gen-picolibc-lit-tests.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python3 - -# SPDX-FileCopyrightText: Copyright 2025 Arm Limited and/or its affiliates - -"""Helper script to generate a lit test suite from a meson project's tests. - -The lit testing infratructure has a number of features that it would -be useful to apply to meson test suite, such as filtering tests, -setting particular tests to be expected to fail, and controlling -whether or not testing should continue on a failure. -Since meson can run tests individually and provide a list of all -tests, it is possible to run the same tests through lit, with each -individual test simply invoking meson. -This allows a project built using meson to share the same test -infrastructure as the other LLVM projects.""" - -import argparse -import os -import subprocess - - -def main(): - arg_parser = argparse.ArgumentParser( - prog="meson_to_lit", - description="A script that generates a set of lit tests for a meson test suite.", - ) - arg_parser.add_argument( - "--meson", - required=True, - help="Path to meson.", - ) - arg_parser.add_argument( - "--build", - required=True, - help="Path to meson build directory.", - ) - arg_parser.add_argument( - "--output", - required=True, - help="Path to write tests to.", - ) - arg_parser.add_argument( - "--name", - required=True, - help="Name to give the lit suite.", - ) - arg_parser.add_argument( - "--timeout-multiplier", type=float, help="Timeout multiplier (float)." - ) - - args = arg_parser.parse_args() - - # Ensure the output location exists. - os.makedirs(os.path.join(args.output, "tests"), exist_ok=True) - - # Get the test list from meson. - p = subprocess.run( - [args.meson, "test", "--list"], - cwd=args.build, - capture_output=True, - check=True, - text=True, - ) - - for line in p.stdout.splitlines(): - # Meson lists tests in the format of - # [SUBPROJECT]:[PATH] / [TESTNAME] - # e.g. picolibc:semihost / semihost-argv - # The testnames should be unique, so only the name is needed. - subproj, full_testname = line.split(":", maxsplit=1) - testname = full_testname.split(" / ")[-1] - with open( - os.path.join(args.output, "tests", testname + ".test"), - "w", - encoding="utf-8", - ) as f: - # Invoke meson to run the test. - # Set --logbase so that each has a unique log name. - cmd = f"# RUN: {args.meson} test -C {args.build} {testname} --logbase {testname} --no-rebuild" - if args.timeout_multiplier and args.timeout_multiplier != 1: - cmd += f" -t {args.timeout_multiplier}" - f.write(f"{cmd}\n") - - # Simple lit config to run the tests. - cfg_txt = """import lit.formats -import lit.llvm -import os - -lit.llvm.initialize(lit_config, config) - -config.name = "%CONFIG_NAME%" -config.suffixes = [".test"] -config.test_format = lit.formats.ShTest(not lit.llvm.llvm_config.use_lit_shell) -config.test_source_root = os.path.join(os.path.dirname(__file__), "tests") -""" - with open(os.path.join(args.output, "lit.cfg.py"), "w", encoding="utf-8") as f: - f.write(cfg_txt.replace("%CONFIG_NAME%", args.name)) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-runtimes/test-support/lit-exec-qemu.py b/qualcomm-software/embedded-runtimes/test-support/lit-exec-qemu.py deleted file mode 100755 index b8f8a5b9d21b..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/lit-exec-qemu.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2023, Arm Limited and affiliates. - -# This script is a bridge between lit-based tests of LLVM C++ runtime libraries -# (libc++abi, libunwind, libc++) and QEMU. It must handle the same command-line -# arguments as llvm-project/libcxx/utils/run.py. - -from run_qemu import run_qemu -import argparse -import pathlib -import sys - - -def main(): - parser = argparse.ArgumentParser(description="Run a single test using qemu") - parser.add_argument("--qemu-command", required=True, help="qemu-system- path") - parser.add_argument( - "--qemu-machine", - required=True, - help="name of the machine to pass to QEMU", - ) - parser.add_argument( - "--qemu-cpu", required=False, help="name of the cpu to pass to QEMU" - ) - parser.add_argument( - "--qemu-params", - required=False, - help='list of arguments to pass to qemu, separated with ":"', - ) - parser.add_argument( - "--timeout", - type=int, - default=900, - help="timeout, in seconds (default: 900)", - ) - parser.add_argument( - "--execdir", - type=pathlib.Path, - default=pathlib.Path.cwd(), - help="directory to run the program from", - ) - parser.add_argument( - "--codesign_identity", - type=str, - help="ignored, used for compatibility with libc++ tests", - ) - parser.add_argument( - "--env", - type=str, - nargs="*", - help="ignored, used for compatibility with libc++ tests", - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Print verbose output. This may affect test result, as the output " - "will be added to the output of the test.", - ) - parser.add_argument( - "--trace", - type=str, - default=None, - help="File to write execution trace to (slows execution significantly)", - ) - parser.add_argument("image", help="image file to execute") - parser.add_argument( - "arguments", - nargs=argparse.REMAINDER, - default=[], - help="optional arguments for the image", - ) - args = parser.parse_args() - ret_code = run_qemu( - args.qemu_command, - args.qemu_machine, - args.qemu_cpu, - args.qemu_params.split(":") if args.qemu_params else [], - args.image, - [args.image] + args.arguments, - args.timeout, - args.execdir, - args.verbose, - args.trace, - ) - sys.exit(ret_code) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-runtimes/test-support/llvm-libc++-picolibc.cfg.in b/qualcomm-software/embedded-runtimes/test-support/llvm-libc++-picolibc.cfg.in deleted file mode 100644 index 6737667803d1..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/llvm-libc++-picolibc.cfg.in +++ /dev/null @@ -1,49 +0,0 @@ -# This testing configuration handles running the test suite against LLVM's libc++ -# using a static library. - -lit_config.load_config(config, '@CMAKE_CURRENT_BINARY_DIR@/cmake-bridge.cfg') - -config.name = 'libc++-@RUNTIME_VARIANT_NAME@' - -config.substitutions.append(('%{libc-include}', '@CMAKE_INSTALL_PREFIX@/include')) -config.substitutions.append(('%{libc-lib}', '@CMAKE_INSTALL_PREFIX@/lib')) -config.substitutions.append(('%{libc-extra-link-flags}', '@RUNTIME_TEST_LINK_FLAGS@')) - -config.substitutions.append(('%{flags}', '@RUNTIME_TEST_ARCH_FLAGS@')) -config.substitutions.append(('%{compile_flags}', - '-nostdinc++ -I %{include-dir} -I %{target-include-dir} -I %{libcxx-dir}/test/support' - ' -isystem %{libc-include}' - - # Disable warnings in cxx_atomic_impl.h: - # "large atomic operation may incur significant performance penalty; the - # access size (4 bytes) exceeds the max lock-free size (0 bytes)" - ' -Wno-atomic-alignment' - - # Various libc++ headers check for the definition of _NEWLIB_VERSION - # which for picolibc is defined in picolibc.h. - ' -include picolibc.h' -)) -config.substitutions.append(('%{link_flags}', - ' -nostdlib++ -L %{lib-dir}' - ' -lc++ -lc++abi' - ' -nostdlib -L %{libc-lib}' - ' -lc -lm -lclang_rt.builtins' - ' %{libc-extra-link-flags}' -)) -config.substitutions.append(('%{exec}', - '%{executor} --execdir %{temp} -- ' -)) - -# Long tests are prohibitively slow when run via emulation. -config.long_tests = False -config.large_tests = False - -import os, site -site.addsitedir(os.path.join('@LIBCXX_SOURCE_DIR@', 'utils')) -import libcxx.test.params, libcxx.test.config -libcxx.test.config.configure( - libcxx.test.params.DEFAULT_PARAMETERS, - libcxx.test.features.DEFAULT_FEATURES, - config, - lit_config -) diff --git a/qualcomm-software/embedded-runtimes/test-support/llvm-libc++abi-picolibc.cfg.in b/qualcomm-software/embedded-runtimes/test-support/llvm-libc++abi-picolibc.cfg.in deleted file mode 100644 index b2a18dfe1baa..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/llvm-libc++abi-picolibc.cfg.in +++ /dev/null @@ -1,37 +0,0 @@ -# This testing configuration handles running the test suite against LLVM's libc++abi -# using a static library. - -lit_config.load_config(config, '@CMAKE_CURRENT_BINARY_DIR@/cmake-bridge.cfg') - -config.name = 'libc++abi-@RUNTIME_VARIANT_NAME@' - -config.substitutions.append(('%{libc-include}', '@CMAKE_INSTALL_PREFIX@/include')) -config.substitutions.append(('%{libc-lib}', '@CMAKE_INSTALL_PREFIX@/lib')) -config.substitutions.append(('%{libc-extra-link-flags}', '@RUNTIME_TEST_LINK_FLAGS@')) - -config.substitutions.append(('%{flags}', '@RUNTIME_TEST_ARCH_FLAGS@')) -config.substitutions.append(('%{compile_flags}', - '-nostdinc++ -I %{include} -I %{cxx-include} -I %{cxx-target-include} %{maybe-include-libunwind} ' - ' -I %{libcxx}/test/support -I %{libcxx}/src -D_LIBCPP_ENABLE_CXX17_REMOVED_UNEXPECTED_FUNCTIONS' - ' -isystem %{libc-include}' -)) -config.substitutions.append(('%{link_flags}', - ' -nostdlib++ -L %{lib}' - ' -lc++ -lc++abi' - ' -nostdlib -L %{libc-lib}' - ' -lc -lm -lclang_rt.builtins' - ' %{libc-extra-link-flags}' -)) -config.substitutions.append(('%{exec}', - '%{executor} --execdir %{temp} -- ' -)) - -import os, site -site.addsitedir(os.path.join('@LIBCXXABI_LIBCXX_PATH@', 'utils')) -import libcxx.test.params, libcxx.test.config -libcxx.test.config.configure( - libcxx.test.params.DEFAULT_PARAMETERS, - libcxx.test.features.DEFAULT_FEATURES, - config, - lit_config -) diff --git a/qualcomm-software/embedded-runtimes/test-support/llvm-libunwind-picolibc.cfg.in b/qualcomm-software/embedded-runtimes/test-support/llvm-libunwind-picolibc.cfg.in deleted file mode 100644 index b9cc04ba99b6..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/llvm-libunwind-picolibc.cfg.in +++ /dev/null @@ -1,50 +0,0 @@ -# -# Configuration file for running the libunwind tests against the static library. -# - -lit_config.load_config(config, '@CMAKE_CURRENT_BINARY_DIR@/cmake-bridge.cfg') - -config.name = 'libunwind-@RUNTIME_VARIANT_NAME@' - -config.substitutions.append(('%{libc-include}', '@CMAKE_INSTALL_PREFIX@/include')) -config.substitutions.append(('%{libc-lib}', '@CMAKE_INSTALL_PREFIX@/lib')) -config.substitutions.append(('%{libc-extra-link-flags}', '@RUNTIME_TEST_LINK_FLAGS@')) - -compile_flags = [] - -if @LIBUNWIND_ENABLE_CET@: - compile_flags.append('-fcf-protection=full') - -# Stack unwinding tests need unwinding tables and these are not generated by default on all targets. -compile_flags.append('-funwind-tables') - -local_sysroot = '@CMAKE_OSX_SYSROOT@' or '@CMAKE_SYSROOT@' -config.substitutions.append(('%{flags}', - '@RUNTIME_TEST_ARCH_FLAGS@' + - (' -isysroot {}'.format(local_sysroot) if local_sysroot else '') -)) -config.substitutions.append(('%{compile_flags}', - '-nostdinc++ -I %{include} ' - ' -isystem %{libc-include} ' - + ' '.join(compile_flags) -)) -config.substitutions.append(('%{link_flags}', - ' -nostdlib++ -L %{lib}' - ' -lc++ -lc++abi -lunwind' - ' -nostdlib -L %{libc-lib}' - ' -lc -lm -lclang_rt.builtins' - ' %{libc-extra-link-flags}' -)) -config.substitutions.append(('%{exec}', - '%{executor} --execdir %{temp} -- ' -)) - -import os, site -site.addsitedir(os.path.join('@LIBUNWIND_LIBCXX_PATH@', 'utils')) -import libcxx.test.params, libcxx.test.config -libcxx.test.config.configure( - libcxx.test.params.DEFAULT_PARAMETERS, - libcxx.test.features.DEFAULT_FEATURES, - config, - lit_config -) diff --git a/qualcomm-software/embedded-runtimes/test-support/modify-compiler-rt-xml.py b/qualcomm-software/embedded-runtimes/test-support/modify-compiler-rt-xml.py deleted file mode 100644 index d14259958d4d..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/modify-compiler-rt-xml.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 - -# Helper script to modify the xml results from compiler-rt. - -# compiler-rt always puts all the test results into the "compiler-rt" -# testsuite in the junit xml file. We have multiple variants of -# compiler-rt, so the xml is modified to group the tests by variant. - -import argparse -import os -import re -from xml.etree import ElementTree - - -def main(): - parser = argparse.ArgumentParser(description="Reformat compiler-rt xml results") - parser.add_argument( - "--dir", - required=True, - help="Path to compiler-rt build directory", - ) - parser.add_argument( - "--variant", - required=True, - help="Name of the variant under test", - ) - args = parser.parse_args() - - xml_file = None - # The xml file path can be set by lit's --xunit-xml-output option. - # Since we do not set this directly, it will likely be found - # in the LIT_OPTS environment variable, which lit will read - # options from. - if "LIT_OPTS" in os.environ: - lit_opts = os.environ["LIT_OPTS"] - m = re.search("--xunit-xml-output=([^ ]+)", lit_opts) - if m is not None: - results_path = m.group(1) - # Path may be absolute or relative. - if os.path.isabs(results_path): - xml_file = results_path - else: - # If not absolute, the path will be relative to compiler-rt/test - # in the build directory, not this script. - xml_file = os.path.join(args.dir, "compiler-rt", "test", results_path) - if xml_file is None: - print(f"No xml results generated to modify.") - return - - tree = ElementTree.parse(xml_file) - root = tree.getroot() - - # The compiler-rt Builtins tests runs two testsuites: TestCases and Unit - # TestCases are recorded in the "Builtins" suite. - # But the Unit tests are recorded in "Builtins-arm-generic" or similar. - # For readability, combine them all under compiler-rt-{variant}-Builtins - for testsuite in root.iter("testsuite"): - old_suitename = testsuite.get("name") - new_suitename = f"compiler-rt-{args.variant}-Builtins" - testsuite.set("name", new_suitename) - for testcase in testsuite.iter("testcase"): - old_classname = testcase.get("classname") - new_classname = old_classname.replace(old_suitename, new_suitename) - testcase.set("classname", new_classname) - - tree.write(xml_file) - print(f"Results written to {xml_file}") - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-runtimes/test-support/picolibc-test-wrapper.py b/qualcomm-software/embedded-runtimes/test-support/picolibc-test-wrapper.py deleted file mode 100755 index db0a9b3aeb42..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/picolibc-test-wrapper.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 - -# SPDX-FileCopyrightText: Copyright 2023-2024 Arm Limited and/or its affiliates -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# This is a wrapper script to run picolibc tests with QEMU. - -from run_qemu import run_qemu -import argparse -import pathlib -import sys - - -def run(args): - # Some picolibc tests expect argv[0] to be literally "program-name", not - # the actual program name. - argv = ["program-name"] + args.arguments - if args.qemu_command: - return run_qemu( - args.qemu_command, - args.qemu_machine, - args.qemu_cpu, - args.qemu_params.split(":") if args.qemu_params else [], - args.image, - argv, - None, - pathlib.Path.cwd(), - args.verbose, - args.trace, - ) - - -def main(): - parser = argparse.ArgumentParser( - description="Run a single test using either qemu" - ) - main_arg_group = parser.add_mutually_exclusive_group(required=True) - main_arg_group.add_argument("--qemu-command", help="qemu-system- path") - parser.add_argument( - "--qemu-machine", - help="name of the machine to pass to QEMU", - ) - parser.add_argument( - "--qemu-cpu", required=False, help="name of the cpu to pass to QEMU" - ) - parser.add_argument( - "--qemu-params", - help='list of arguments to pass to qemu, separated with ":"', - ) - parser.add_argument( - "--trace", - type=str, - default=None, - help="File to write execution trace to (QEMU only)", - ) - parser.add_argument( - "--verbose", - action="store_true", - help="Print verbose output. This may affect test result, as the output " - "will be added to the output of the test.", - ) - parser.add_argument("image", help="image file to execute") - parser.add_argument( - "arguments", - nargs=argparse.REMAINDER, - default=[], - help="optional arguments for the image", - ) - args = parser.parse_args() - ret_code = run(args) - sys.exit(ret_code) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-runtimes/test-support/run_qemu.py b/qualcomm-software/embedded-runtimes/test-support/run_qemu.py deleted file mode 100755 index 2d96a3fb2574..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/run_qemu.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2023, Arm Limited and affiliates. - -import subprocess -import sys -import re - - -def get_qemu_major_version(qemu_command): - output = subprocess.check_output([qemu_command, "--version"], text=True) - version_match = re.search(r"version (\d+)\.", output) - if version_match: - return int(version_match.group(1)) - else: - raise Exception("Cannot get version of " + qemu_command) - - -def run_qemu( - qemu_command, - qemu_machine, - qemu_cpu, - qemu_extra_params, - image, - arguments, - timeout, - working_directory, - verbose, - trace, -): - """Execute the program using QEMU and return the subprocess return code.""" - qemu_params = ["-M", qemu_machine] - if qemu_cpu: - qemu_params += ["-cpu", qemu_cpu] - qemu_params += qemu_extra_params - - # Setup semihosting with chardev bound to stdio. - # This is needed to test semihosting functionality in picolibc. - qemu_params += ["-chardev", "stdio,mux=on,id=stdio0"] - semihosting_config = ["enable=on", "chardev=stdio0"] + [ - "arg=" + arg.replace(",", ",,") for arg in arguments - ] - qemu_params += ["-semihosting-config", ",".join(semihosting_config)] - - # Disable features we don't need and which could slow down the test or - # interfere with semihosting. - qemu_params += ["-monitor", "none", "-serial", "none", "-nographic"] - - # Load the image to machine's memory and set the PC. - # "virt" machine cannot be used with load, as QEMU will try to put - # device tree blob at start of RAM conflicting with our code - # https://www.qemu.org/docs/master/system/arm/virt.html#hardware-configuration-information-for-bare-metal-programming - if qemu_machine == "virt": - qemu_params += ["-kernel", image] - else: - qemu_params += ["-device", f"loader,file={image},cpu-num=0"] - - # Enable tracing: disassembly, CPU state, interrupts and guest errors like - # invalid instructions. - if trace: - qemu_params += ["-d", "in_asm,nochain,cpu,int,guest_errors"] - qemu_params += ["-D", trace] - # Enable per instruction tracing depending on EQMU version - if get_qemu_major_version(qemu_command) >= 9: - qemu_params += ["-accel", "tcg,one-insn-per-tb=on"] - else: - qemu_params += ["-singlestep"] - - command = [qemu_command] + qemu_params - - if verbose: - print("running: {}".format(" ".join(command))) - - result = subprocess.run( - command, - stdout=subprocess.PIPE, - stderr=sys.stderr, - timeout=timeout, - cwd=working_directory, - check=False, - ) - sys.stdout.buffer.write(result.stdout) - return result.returncode diff --git a/qualcomm-software/embedded-runtimes/test-support/xfails.py b/qualcomm-software/embedded-runtimes/test-support/xfails.py deleted file mode 100644 index 6863fa72a0ad..000000000000 --- a/qualcomm-software/embedded-runtimes/test-support/xfails.py +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env python3 - -# SPDX-FileCopyrightText: Copyright 2024-2025 Arm Limited and/or its affiliates -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -"""This script will generate a list of tests where the expected result in the -source files needs to be overridden via the lit command line or environment -variables. -It can also be used to track where downstream testing diverges from -upstream, and why.""" - -import argparse -import os -import re -import subprocess - -from enum import Enum -from typing import Callable, NamedTuple, List - - -class NewResult(Enum): - """Enum storing the potential new result a test.""" - - XFAILED = "FAILED" # Replace a failure with an expected failure. - PASSED = "PASSED" # Replace an unexpected pass with a pass. - EXCLUDE = "EXCLUDE" # Exclude a test, so that it is not run at all. - - -class XFail(NamedTuple): - """Class to collect information about an xfail.""" - - name: str # Name to identify the xfail. - testnames: List[str] # The tests to include. - result: NewResult # The expected result. - project: str # Affected project. - variants: List[str] = None # Affected library variants, if applicable. - conditional: Callable = None # A function that will test whether an xfail applies. - issue_link: str = None # Optional link to a GitHub issue. - description: str = None # Optional field for notes. - - -def main(): - arg_parser = argparse.ArgumentParser( - prog="xfailgen", - description="A script that generates lit environment variables to xfail or filter tests.", - ) - arg_parser.add_argument( - "--variant", - help="For library specific projects, the variant being tested.", - ) - arg_parser.add_argument( - "--libc", - help="For library specific projects, the C library that was used.", - ) - arg_parser.add_argument( - "--clang", - help="Path to clang for conditional testing.", - ) - arg_parser.add_argument( - "--project", - required=True, - help="Project to generate xfails for.", - ) - arg_parser.add_argument( - "--output-args", - help="Write the test lists to a file with --xfail and --xfail-not" - "parameters, which can be read directly by lit by prefixing with @.", - ) - args = arg_parser.parse_args() - - # Test whether there is a multilib error from -frwpi - def check_frwpi_error(): - test_args = [ - args.clang, - "--print-multi-directory", - "-target", - "arm-none-eabi", - "-frwpi", - ] - p = subprocess.run(test_args, capture_output=True, check=False) - return p.returncode != 0 - - # Test whether there is a multilib warning from -mcpu=cortex-r52 - def check_r52_warning(): - test_args = [ - args.clang, - "--print-multi-directory", - "-target", - "arm-none-eabi", - "-mcpu=cortex-r52", - "-Werror", - ] - p = subprocess.run(test_args, capture_output=True, check=False) - return p.returncode != 0 - - xfails = [ - XFail( - name="no frwpi", - testnames=[ - "Driver/ropi-rwpi.c", - "Preprocessor/arm-pic-predefines.c", - ], - result=NewResult.XFAILED, - conditional=check_frwpi_error, - project="clang", - description="The multilib built by ATfE will generate a configuration error if -frwpi is used. Will pass if run before the multilib is installed.", - ), - XFail( - name="no r52", - testnames=[ - "Driver/arm-fpu-selection.s", - ], - result=NewResult.XFAILED, - conditional=check_r52_warning, - project="clang", - description="If the installed default multilib does not have a library available for -mcpu=cortex-r52, this test will fail.", - ), - ] - - tests_to_xfail = [] - tests_to_upass = [] - tests_to_exclude = [] - - for xfail in xfails: - if args.project != xfail.project: - continue - if xfail.variants is not None: - if args.variant is None: - raise ValueError( - f"--variant must be specified for project {args.project}" - ) - if args.variant not in xfail.variants: - continue - if xfail.conditional is not None: - if not xfail.conditional(): - continue - if xfail.result == NewResult.XFAILED: - tests_to_xfail.extend(xfail.testnames) - elif xfail.result == NewResult.PASSED: - tests_to_upass.extend(xfail.testnames) - elif xfail.result == NewResult.EXCLUDE: - tests_to_exclude.extend(xfail.testnames) - - tests_to_xfail.sort() - tests_to_upass.sort() - tests_to_exclude.sort() - - if args.output_args: - os.makedirs(os.path.dirname(args.output_args), exist_ok=True) - with open(args.output_args, "w", encoding="utf-8") as f: - if len(tests_to_xfail) > 0: - # --xfail and --xfail-not expect a comma separated list of test names. - f.write("--xfail=") - f.write(";".join(tests_to_xfail)) - f.write("\n") - if len(tests_to_upass) > 0: - f.write("--xfail-not=") - f.write(";".join(tests_to_upass)) - f.write("\n") - if len(tests_to_exclude) > 0: - # --filter-out expects a regular expression to match any test names. - escaped_testnames = [ - re.escape(testname) for testname in tests_to_exclude - ] - f.write("--filter-out=") - f.write("|".join(escaped_testnames)) - f.write("\n") - print(f"xfail list written to {args.output_args}") - else: - if len(tests_to_xfail) > 0: - print("xfailed tests:") - for testname in tests_to_xfail: - print(testname) - if len(tests_to_upass) > 0: - print("xfail removed from tests:") - for testname in tests_to_upass: - print(testname) - if len(tests_to_exclude) > 0: - print("excluded tests:") - for testname in tests_to_exclude: - print(testname) - - -if __name__ == "__main__": - main() diff --git a/qualcomm-software/embedded-runtimes/to_meson_list.cmake b/qualcomm-software/embedded-runtimes/to_meson_list.cmake deleted file mode 100644 index 8a42db85fdd4..000000000000 --- a/qualcomm-software/embedded-runtimes/to_meson_list.cmake +++ /dev/null @@ -1,9 +0,0 @@ -# Converts a cmake list to a string, which can be interpreted as list content in -# meson configuration file. -# The delimiting brackets are not included. -# Example output: "'foo', 'bar', 'baz'" - -function(to_meson_list input_list out_var) - list(JOIN input_list "', '" input_list) - set(${out_var} "'${input_list}'" PARENT_SCOPE) -endfunction() diff --git a/qualcomm-software/README.md b/qualcomm-software/embedded/README.md similarity index 97% rename from qualcomm-software/README.md rename to qualcomm-software/embedded/README.md index 9855530c58c9..e33f02282c55 100644 --- a/qualcomm-software/README.md +++ b/qualcomm-software/embedded/README.md @@ -87,7 +87,7 @@ CPULLVM Toolchain for Embedded is built and tested on #### Navigate to the scripts directory - cd cpullvm-toolchain/qualcomm-software/scripts + cd cpullvm-toolchain/qualcomm-software/embedded/scripts #### Run the script diff --git a/qualcomm-software/embedded/patches/eld/0001-Explicitly-set-aarch64-unknown-none-elf-triple-for-a.patch b/qualcomm-software/embedded/patches/eld/0001-Explicitly-set-aarch64-unknown-none-elf-triple-for-a.patch new file mode 100644 index 000000000000..8b3ff1e835ef --- /dev/null +++ b/qualcomm-software/embedded/patches/eld/0001-Explicitly-set-aarch64-unknown-none-elf-triple-for-a.patch @@ -0,0 +1,43 @@ +From 915493d714518dc799a5f5b25822ade9fb3ce4d8 Mon Sep 17 00:00:00 2001 +From: nshanmug +Date: Thu, 18 Sep 2025 10:34:41 -0700 +Subject: [PATCH] [PATCH] Explicitly set aarch64-unknown-none-elf triple for + aarch64 tests + +ELD takes LLVM_DEFAULT_TARGET_TRIPLE but tests are expecting none-linux output. +Configuring LLVM during build to targeting aarch64-unknown-linux-gnueabi by +default causes several test failures. + +Those tests are passing when LLVM is configured for aarch64-linux-gnueabi, which +should behavior identically as aarch64-unknown-linux-gnueabi. ELD is relying on +buggy behavior of LLVM's triple passing. + +We'll open a bug with upstream ELD. + +Change-Id: I8f8ce66fa847c15150466cc65da18b36ef85f671 +--- + test/lit.cfg | 7 ++++--- + 1 file changed, 4 insertions(+), 3 deletions(-) + +diff --git a/test/lit.cfg b/test/lit.cfg +index 10d2e5c..0a4459a 100644 +--- a/test/lit.cfg ++++ b/test/lit.cfg +@@ -355,10 +355,11 @@ if config.test_target == 'AArch64': + 'aarch64-linux' in config.available_features): + config.available_features.add('aarch64') + xlen = 8 +- link = 'ld.eld' + config.emulation = '-m aarch64elf' +- clang ='clang -target aarch64' +- clangxx ='clang++ -target aarch64' ++ link = 'ld.eld -m aarch64elf -mtriple aarch64-unknown-none-elf' ++ clang = 'clang -target aarch64-unknown-none-elf' ++ clangxx = 'clang++ -target aarch64-unknown-none-elf' ++ + clangas = 'clang' + linkopts = '--thread-count 4 --threads' + if hasattr(config,'eld_option') and config.eld_option != 'default': +-- +2.17.1 + diff --git a/qualcomm-software/embedded/patches/llvm-project/0001-ARM-compiler-rt-Add-a-few-more-variants-for-builtins.patch b/qualcomm-software/embedded/patches/llvm-project/0001-ARM-compiler-rt-Add-a-few-more-variants-for-builtins.patch new file mode 100644 index 000000000000..a3d3533a2560 --- /dev/null +++ b/qualcomm-software/embedded/patches/llvm-project/0001-ARM-compiler-rt-Add-a-few-more-variants-for-builtins.patch @@ -0,0 +1,51 @@ +From ee4f89120d294e0ef196b8187f4bbfbd0964d1f4 Mon Sep 17 00:00:00 2001 +From: Jonathon Penix +Date: Tue, 20 Jan 2026 19:06:31 -0800 +Subject: [PATCH] [ARM][compiler-rt] Add a few more variants for builtins + +Adding following variants: pacret, pacret-bti and pacret-b-key-bti. They are +needed by corresponding musl variants. + +These should build for baremetal only. +--- + compiler-rt/lib/builtins/CMakeLists.txt | 24 +++++++++++++++++++++++- + 1 file changed, 23 insertions(+), 1 deletion(-) + +diff --git a/compiler-rt/lib/builtins/CMakeLists.txt b/compiler-rt/lib/builtins/CMakeLists.txt +index c3dbd65998f1..78735ff6ea99 100644 +--- a/compiler-rt/lib/builtins/CMakeLists.txt ++++ b/compiler-rt/lib/builtins/CMakeLists.txt +@@ -1029,7 +1029,29 @@ else () + C_STANDARD 11 + CXX_STANDARD 17 + PARENT_TARGET builtins) +- ++ if(arch MATCHES "aarch64" AND COMPILER_RT_BAREMETAL_BUILD) ++ add_compiler_rt_runtime(clang_rt.builtins-pacret-b-key-bti ++ STATIC ++ ARCHS ${arch} ++ SOURCES ${${arch}_SOURCES} ++ DEFS "${BUILTIN_DEFS}" ++ CFLAGS ${BUILTIN_CFLAGS_${arch}} -march=armv8.5-a -mbranch-protection=pac-ret+leaf+b-key+bti ++ PARENT_TARGET builtins) ++ add_compiler_rt_runtime(clang_rt.builtins-pacret ++ STATIC ++ ARCHS ${arch} ++ SOURCES ${${arch}_SOURCES} ++ DEFS "${BUILTIN_DEFS}" ++ CFLAGS ${BUILTIN_CFLAGS_${arch}} -march=armv8.5-a -mbranch-protection=pac-ret+leaf ++ PARENT_TARGET builtins) ++ add_compiler_rt_runtime(clang_rt.builtins-pacret-bti ++ STATIC ++ ARCHS ${arch} ++ SOURCES ${${arch}_SOURCES} ++ DEFS "${BUILTIN_DEFS}" ++ CFLAGS ${BUILTIN_CFLAGS_${arch}} -march=armv8.5-a -mbranch-protection=pac-ret+leaf+bti ++ PARENT_TARGET builtins) ++ endif () + # Write out the sources that were used to compile the builtins so that tests can be run in + # an independent compiler-rt build (see: compiler-rt/test/builtins/CMakeLists.txt) + get_compiler_rt_output_dir(${arch} BUILTIN_LIB_OUTPUT_DIR) +-- +2.43.0 + diff --git a/qualcomm-software/embedded/patches/llvm-project/0002-Allow-baremetal-builds-for-AArch64-and-ARM32-v7.patch b/qualcomm-software/embedded/patches/llvm-project/0002-Allow-baremetal-builds-for-AArch64-and-ARM32-v7.patch new file mode 100644 index 000000000000..73d115e457e5 --- /dev/null +++ b/qualcomm-software/embedded/patches/llvm-project/0002-Allow-baremetal-builds-for-AArch64-and-ARM32-v7.patch @@ -0,0 +1,33 @@ +From c6a5e4ed61dda760b270daf68dfea880a7fdc1c1 Mon Sep 17 00:00:00 2001 +From: Zhaoshi Zheng +Date: Tue, 30 Sep 2025 14:20:09 -0700 +Subject: [PATCH] Allow baremetal builds for AArch64 and ARM32(v7) + +--- + compiler-rt/cmake/base-config-ix.cmake | 10 ++++++++++ + 1 file changed, 10 insertions(+) + +diff --git a/compiler-rt/cmake/base-config-ix.cmake b/compiler-rt/cmake/base-config-ix.cmake +index d92bc0e71fa1..444704218393 100644 +--- a/compiler-rt/cmake/base-config-ix.cmake ++++ b/compiler-rt/cmake/base-config-ix.cmake +@@ -272,6 +272,16 @@ macro(test_targets) + elseif("${COMPILER_RT_DEFAULT_TARGET_ARCH}" MATCHES "nvptx") + test_target_arch(nvptx64 "" "--nvptx64-nvidia-cuda" "-nogpulib" "-flto" + "-fconvergent-functions" "-c") ++ # Rule for aarch64 baremetal library ++ # Match the entire triple ++ elseif ("${COMPILER_RT_DEFAULT_TARGET_TRIPLE}" MATCHES "aarch64-unknown-none-elf") ++ set (CAN_TARGET_aarch64 1) ++ list(APPEND COMPILER_RT_SUPPORTED_ARCH aarch64) ++ # Rule for armv7 baremetal library ++ # Match the entire triple ++ elseif ("${COMPILER_RT_DEFAULT_TARGET_TRIPLE}" MATCHES "arm-unknown-none-eabi") ++ set (CAN_TARGET_armv7 1) ++ list(APPEND COMPILER_RT_SUPPORTED_ARCH armv7) + elseif("${COMPILER_RT_DEFAULT_TARGET_ARCH}" MATCHES "arm") + if(WIN32) + test_target_arch(arm "" "" "") +-- +2.34.1 + diff --git a/qualcomm-software/embedded/patches/musl-embedded/0001-Remove-Linux-libraries-from-component_list.sh.patch b/qualcomm-software/embedded/patches/musl-embedded/0001-Remove-Linux-libraries-from-component_list.sh.patch new file mode 100644 index 000000000000..fd2bf033d459 --- /dev/null +++ b/qualcomm-software/embedded/patches/musl-embedded/0001-Remove-Linux-libraries-from-component_list.sh.patch @@ -0,0 +1,35 @@ +From 35ba9a719e2dd0015ddae56fc130fe0cbd941be2 Mon Sep 17 00:00:00 2001 +From: Jonathon Penix +Date: Sun, 18 Jan 2026 17:48:21 -0800 +Subject: [PATCH] Remove Linux libraries from component_list.sh + +We're temporarily porting these over to a consolidated build flow with +RISC-V prior to a larger refactor. So, don't build these as we'll do it +elsewhere. + +We also don't want to merge this into upstream musl-embedded--we'll be +removing usage of component_list.sh entirely soon enough. So, just keep +this as a patch for now. + +Signed-off-by: Jonathon Penix +--- + qualcomm-software/config/component_list.sh | 4 ---- + 1 file changed, 4 deletions(-) + +diff --git a/qualcomm-software/config/component_list.sh b/qualcomm-software/config/component_list.sh +index 2dd84ab1..29e4af4f 100644 +--- a/qualcomm-software/config/component_list.sh ++++ b/qualcomm-software/config/component_list.sh +@@ -1,9 +1,5 @@ + # musl_components + musl_components=( +-armv7_linux.sh,armv7-linux-gnueabi +-aarch64_linux.sh,aarch64-linux-gnu +-aarch64-pacret_linux.sh,aarch64-pacret-linux-gnu +-aarch64-pacret-bti_linux.sh,aarch64-pacret-bti-linux-gnu + armv7_baremetal.sh,armv7-none-eabi + aarch64_baremetal.sh,aarch64-none-elf + aarch64-pacret-b-key-bti_baremetal.sh,aarch64-pacret-b-key-bti-none-elf +-- +2.43.0 + diff --git a/qualcomm-software/embedded/patchsets.yml b/qualcomm-software/embedded/patchsets.yml new file mode 100644 index 000000000000..86d38a6e1cd6 --- /dev/null +++ b/qualcomm-software/embedded/patchsets.yml @@ -0,0 +1,28 @@ +{ + "version": 1, + "defaults": { + "method": "am", + "three_way": true, + "restore_on_fail": true, + "ignore_whitespace": true + }, + "patchsets": [ + { + "name": "eld", + "repo": "../../llvm/tools/eld", + "patches": "patches/eld", + "reset_to": "" + }, + { + "name": "llvm-project", + "repo": "../..", + "patches": "patches/llvm-project" + }, + { + "name": "musl-embedded", + "repo": "../../../musl-embedded", + "patches": "patches/musl-embedded" + } + ] +} + diff --git a/qualcomm-software/embedded/scripts/build.ps1 b/qualcomm-software/embedded/scripts/build.ps1 new file mode 100644 index 000000000000..4d711ec7f220 --- /dev/null +++ b/qualcomm-software/embedded/scripts/build.ps1 @@ -0,0 +1,343 @@ +# Fail fast on errors thrown by PowerShell cmdlets +$ErrorActionPreference = "Stop" + +# === Derive directories === +$SCRIPT_DIR = Split-Path -Parent $MyInvocation.MyCommand.Definition +$REPO_ROOT = (git -C $SCRIPT_DIR rev-parse --show-toplevel).Trim() +$WORKSPACE = (Resolve-Path "$REPO_ROOT\..").Path + +$SRC_DIR = $REPO_ROOT +$BUILD_DIR = "$WORKSPACE\build" +$INSTALL_DIR = "$WORKSPACE\install" +$ELD_DIR = "$REPO_ROOT\llvm\tools\eld" +$MUSL_EMBEDDED_DIR = "$WORKSPACE\musl-embedded" + +# === Config defaults === +if (-not $env:JOBS) { $env:JOBS = $env:NUMBER_OF_PROCESSORS } +if (-not $env:BUILD_MODE) { $env:BUILD_MODE = "Release" } +if (-not $env:ASSERTION_MODE) { $env:ASSERTION_MODE = "OFF" } + +# === Constants === +$ELD_REPO_URL = "https://github.com/qualcomm/eld.git" +$ELD_BRANCH = "release/22.x" + +$MUSL_EMBEDDED_REPO_URL = "https://github.com/qualcomm/musl-embedded.git" +$MUSL_EMBEDDED_BRANCH = "main" +$MUSL_EMBEDDED_COMMIT = "a2bc89ab37e8691e300d7a7dd96bfac4917dc884" + +Write-Host "[log] SCRIPT_DIR = $SCRIPT_DIR" +Write-Host "[log] REPO_ROOT = $REPO_ROOT" +Write-Host "[log] WORKSPACE = $WORKSPACE" +Write-Host "[log] BUILD_DIR = $BUILD_DIR" +Write-Host "[log] INSTALL_DIR = $INSTALL_DIR" +Write-Host "[log] ELD_DIR = $ELD_DIR" +Write-Host "[log] BUILD_MODE = $env:BUILD_MODE" +Write-Host "[log] ASSERTIONS = $env:ASSERTION_MODE" +Write-Host "[log] JOBS = $env:JOBS" + +# === Host architecture detection === +$hostArch = $env:PROCESSOR_ARCHITECTURE +switch -Regex ($hostArch) { + 'ARM64' { $hostArch = 'ARM64' } + 'AMD64' { $hostArch = 'x64' } + default { $hostArch = 'x64' } # safe default +} +Write-Host "[log] Host architecture detected: $hostArch" + +# Allow overriding to x64 tools on WoA via emulation (optional) +$useX64Tools = ($env:USE_X64_TOOLS -eq '1') +if ($hostArch -eq 'ARM64' -and $useX64Tools) { + Write-Warning "[warn] Forcing x64 toolchain under emulation on Windows on Arm (USE_X64_TOOLS=1). Expect slower builds." +} + +# === Resolve Visual Studio (vswhere) === +$vswhere = Join-Path ${env:ProgramFiles(x86)} "Microsoft Visual Studio\Installer\vswhere.exe" +if (-not (Test-Path $vswhere)) { + $vswhere = Join-Path ${env:ProgramFiles} "Microsoft Visual Studio\Installer\vswhere.exe" +} +if (-not (Test-Path $vswhere)) { + $cmd = Get-Command vswhere -ErrorAction Ignore + if ($cmd) { $vswhere = $cmd.Source } +} +if (-not (Test-Path $vswhere)) { + Write-Error "*** ERROR: vswhere.exe not found in Program Files (x86), Program Files, or PATH ***" + exit 1 +} + +# === Choose VS component and vcvars target === +$vsRequires = $null +$vcvarsTarget = $null +if ($hostArch -eq 'ARM64' -and -not $useX64Tools) { + $vsRequires = 'Microsoft.VisualStudio.Component.VC.Tools.ARM64' + $vcvarsTarget = 'arm64' + Write-Host "[log] Using native ARM64 MSVC toolset" +} else { + $vsRequires = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64' + $vcvarsTarget = 'x64' + Write-Host "[log] Using x64 MSVC toolset" +} + +# Query the latest VS with required component +$VS_INSTALL = & $vswhere -latest -products * ` + -requires $vsRequires ` + -property installationPath +if (-not $VS_INSTALL) { + Write-Error "*** ERROR: Visual Studio installation with '$vsRequires' not found via vswhere ***" + exit 1 +} + +# Get vcvarsall.bat and import the environment for selected host +$VCVARSALL = Join-Path $VS_INSTALL "VC\Auxiliary\Build\vcvarsall.bat" +if (-not (Test-Path $VCVARSALL)) { + Write-Error "*** ERROR: vcvarsall.bat not found at $VCVARSALL ***" + exit 1 +} + +Write-Host "[log] Loading VS environment: vcvarsall $vcvarsTarget" +cmd /c "call `"$VCVARSALL`" $vcvarsTarget && set" | ForEach-Object { + if ($_ -match '^(.*?)=(.*)$') { + [System.Environment]::SetEnvironmentVariable($matches[1], $matches[2], 'Process') + } +} + +# Tool sanity checks +foreach ($tool in @("git","python","cmake","ninja","clang-cl")) { + if (-not (Get-Command $tool -ErrorAction SilentlyContinue)) { + Write-Error "*** ERROR: $tool not found on PATH ***" + exit 1 + } +} + +# === Clean === +Write-Host "[log] Cleaning $BUILD_DIR and $INSTALL_DIR" +Remove-Item -Recurse -Force $BUILD_DIR,$INSTALL_DIR -ErrorAction SilentlyContinue + +# === Prepare workspace === +Write-Host "[log] Preparing workspace at: $WORKSPACE" +New-Item -ItemType Directory -Force -Path $BUILD_DIR,$INSTALL_DIR | Out-Null + +# === Clone repos === +if (-not (Test-Path "$MUSL_EMBEDDED_DIR\.git")) { + Write-Host "[log] Cloning musl-embedded..." + git clone $MUSL_EMBEDDED_REPO_URL $MUSL_EMBEDDED_DIR + Push-Location $MUSL_EMBEDDED_DIR + git checkout $MUSL_EMBEDDED_COMMIT + Pop-Location +} + +if (-not (Test-Path "$ELD_DIR\.git")) { + Write-Host "[log] Cloning ELD..." + git clone $ELD_REPO_URL $ELD_DIR +} + +# === Apply patches === +Push-Location $SRC_DIR +python "qualcomm-software/embedded/tools/patchctl.py" apply -f "qualcomm-software/embedded/patchsets.yml" + +if ($LASTEXITCODE -ne 0) { + Write-Error "*** ERROR: Patch apply failed (exit=$LASTEXITCODE) ***" + exit $LASTEXITCODE +} + +Pop-Location + +# === Build === +Write-Host "[log] Configuring CMake..." + +# Provide Python to CMake/lit if available +$pythonExe = (Get-Command python -ErrorAction SilentlyContinue).Source +if ($pythonExe) { Write-Host "[log] Using Python: $pythonExe" } else { Write-Host "[warn] Python not found via Get-Command; relying on PATH" } + +# --- Resource compiler selection (prefer llvm-rc) --- +$llvmRcCandidates = @( + (Join-Path $VS_INSTALL 'VC\Tools\Llvm\{0}\bin\llvm-rc.exe' -f $vcvarsTarget), # ...\Llvm\arm64\bin or ...\Llvm\x64\bin + (Join-Path $VS_INSTALL 'VC\Tools\Llvm\bin\llvm-rc.exe'), # generic bin in some layouts + 'C:\Program Files\LLVM\bin\llvm-rc.exe' # Standalone LLVM +) | Where-Object { Test-Path $_ } + +$llvmRc = $llvmRcCandidates | Select-Object -First 1 + +$cmakeRcArg = '' +if ($llvmRc) { + Write-Host "[log] Using llvm-rc: $llvmRc" + # Normalize to forward slashes to keep CMake from parsing backslash escapes + $llvmRcForCMake = $llvmRc -replace '\\','/' + # FILEPATH type prevents quoting glitches in the generated CMakeRCCompiler.cmake + $cmakeRcArg = "-DCMAKE_RC_COMPILER:FILEPATH=$llvmRcForCMake" + Write-Host "[diag] CMAKE_RC_COMPILER = $llvmRcForCMake" +} else { + Write-Warning "[warn] llvm-rc.exe not found; falling back to Windows rc.exe (may hang)." +} + +# --- Generation --- +cmake -G "Ninja" ` + -S "$SRC_DIR\llvm" ` + -B "$BUILD_DIR\llvm" ` + -DCMAKE_INSTALL_PREFIX="$INSTALL_DIR" ` + -DLLVM_TARGETS_TO_BUILD="ARM;AArch64;RISCV" ` + -DLLVM_EXTERNAL_PROJECTS="eld" ` + -DLLVM_EXTERNAL_ELD_SOURCE_DIR="$ELD_DIR" ` + -DLLVM_DEFAULT_TARGET_TRIPLE="aarch64-unknown-linux-gnu" ` + -DLIBCLANG_BUILD_STATIC=ON ` + -DLLVM_POLLY_LINK_INTO_TOOLS=ON ` + -DCMAKE_C_COMPILER=clang-cl ` + -DCMAKE_CXX_COMPILER=clang-cl ` + -DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDLL ` + -DLLVM_ENABLE_ASSERTIONS="$env:ASSERTION_MODE" ` + -DLLVM_ENABLE_PROJECTS="llvm;clang;polly;lld;mlir" ` + $(if ($pythonExe) { "-DPython3_EXECUTABLE=`"$pythonExe`"" } else { "" }) ` + $cmakeRcArg ` + -DCMAKE_BUILD_TYPE="$env:BUILD_MODE" + +Push-Location "$BUILD_DIR\llvm" + +# --- Build (Ninja) --- +Write-Host "[log] Building LLVM with Ninja..." +& ninja +if ($LASTEXITCODE -ne 0) { Write-Error "*** ERROR: build failed (exit=$LASTEXITCODE) ***"; exit $LASTEXITCODE } + +# --- Install (Ninja) --- +Write-Host "[log] Install target..." +& ninja install +if ($LASTEXITCODE -ne 0) { Write-Error "*** ERROR: install failed (exit=$LASTEXITCODE) ***"; exit $LASTEXITCODE } + +# === Prefer our build bin and ensure Git Unix tools are available === +$env:PATH = "$BUILD_DIR\llvm\bin;$env:PATH" +$gitUsr = Join-Path ${env:ProgramFiles} "Git\usr\bin" +if (Test-Path $gitUsr) { + $env:PATH = "$env:PATH;$gitUsr" + Write-Host "[log] Added Git Unix tools to PATH: $gitUsr" +} else { + Write-Warning "[warn] Git usr\bin not found; polly-check-format may fail (missing diff)." +} + +# === Tests === +Write-Host "[log] ===== BEGIN TEST SUITE =====" +$FAIL_COUNT = 0 +foreach ($test in @("llvm","lld","eld","clang","polly")) { + Write-Host "[log] Running $test tests..." + & ninja -v "check-$test" + if ($LASTEXITCODE -ne 0) { + Write-Host "[ERROR] $test tests failed! (exit=$LASTEXITCODE)" + $FAIL_COUNT++ + } else { + Write-Host "[log] $test tests completed." + } +} + +Write-Host "[log] ===== END TEST SUITE =====" +Pop-Location + +if ($FAIL_COUNT -ne 0) { + Write-Host "[log] Build completed, but $FAIL_COUNT test suite(s) failed." + exit 1 +} +Write-Host "[log] Build and all tests completed successfully!" + +# --- Create artifact (PowerShell) --- + +# Simple local log helpers +$__w_log = { param($m) Write-Host "[log] $m" } +$__w_warn = { param($m) Write-Warning "[warn] $m" } + +# Compute identifiers +$short_sha = (git -C $SRC_DIR rev-parse --short HEAD).Trim() +$suffix = Get-Date -Format "yyyyMMdd" + +# Artifact roots and name +$archive_root = "$WORKSPACE\artifacts" +$archive_dir = $INSTALL_DIR + +# === select artifact arch label by host/toolset === +$artifactArch = if ($hostArch -eq 'ARM64' -and -not $useX64Tools) { 'arm64' } else { 'x86_64' } + +$base_name = "cpullvm-toolchain-$($ELD_BRANCH.Split('/')[-1])-Windows-$artifactArch-$short_sha-$suffix" + +& $__w_log "Applying NIGHTLY compression settings" +$COMPRESS_EXT = "txz" +$archive_name = "${base_name}_nightly.$COMPRESS_EXT" + +$env:XZ_OPT = "--threads=$JOBS" + +# Ensure output directory exists +if (-not (Test-Path $archive_root)) { New-Item -ItemType Directory -Force -Path $archive_root | Out-Null } + +$tar_file = Join-Path $archive_root $archive_name +& $__w_log "Compressing '$archive_dir' into '$tar_file'" + +function Test-TarSupportsXz { + try { + $help = & tar --help 2>&1 + if ($LASTEXITCODE -ne 0) { return $false } + return ($help -match '-J' -or $help -match 'xz') + } catch { return $false } +} + +function Get-7ZipPath { + $candidates = @( + (Get-Command 7z -ErrorAction SilentlyContinue | ForEach-Object { $_.Source }), + "C:\Program Files\7-Zip\7z.exe", + "C:\Program Files (x86)\7-Zip\7z.exe" + ) | Where-Object { $_ -and (Test-Path $_) } | Select-Object -First 1 + return $candidates +} + +$usedTool = $null + +# Preferred: tar with xz support (Git for Windows ships bsdtar) +$tarCmd = Get-Command tar -ErrorAction SilentlyContinue +if ($tarCmd) { + if (Test-TarSupportsXz) { + & tar -cJf "$tar_file" -C "$archive_dir" . + if ($LASTEXITCODE -ne 0) { throw "tar failed with exit code $LASTEXITCODE" } + $usedTool = "tar -cJf" + } else { + & $__w_warn "tar found but XZ (-J) not supported; falling back to 7‑Zip." + } +} else { + & $__w_warn "tar not found; attempting 7‑Zip fallback." +} + +# Fallback: 7‑Zip (create .tar, then compress to .xz -> .txz) +if (-not $usedTool) { + $sevenZip = Get-7ZipPath + if (-not $sevenZip) { + throw "No tar with xz support and no 7‑Zip found. Install Git for Windows (bsdtar) or 7‑Zip." + } + + $tempTar = [System.IO.Path]::ChangeExtension($tar_file, ".tar") + $tempXz = [System.IO.Path]::ChangeExtension($tar_file, ".xz") + if (Test-Path $tempTar) { Remove-Item -Force $tempTar } + if (Test-Path $tempXz) { Remove-Item -Force $tempXz } + + & $__w_log "7‑Zip: creating tar archive '$tempTar'" + Push-Location $archive_dir + try { + & "$sevenZip" a -bso0 -bse1 -ttar "$tempTar" "." + if ($LASTEXITCODE -ne 0) { throw "7z (create tar) failed with exit code $LASTEXITCODE" } + } finally { + Pop-Location + } + + & $__w_log "7‑Zip: compressing to XZ '$tempXz' (threads=$JOBS)" + & "$sevenZip" a -bso0 -bse1 -txz -mx=9 -mmt=$JOBS "$tempXz" "$tempTar" + if ($LASTEXITCODE -ne 0) { throw "7z (xz compress) failed with exit code $LASTEXITCODE" } + + Move-Item -Force "$tempXz" "$tar_file" + Remove-Item -Force "$tempTar" + $usedTool = "7z (tar + xz)" +} + +& $__w_log "Artifact created with: $usedTool" +& $__w_log "Artifact path: $tar_file" + +# copy to ARTIFACT_DIR +if ($env:ARTIFACT_DIR -and $env:ARTIFACT_DIR.Trim().Length -gt 0) { + $destDir = $env:ARTIFACT_DIR + if (-not (Test-Path $destDir)) { New-Item -ItemType Directory -Force -Path $destDir | Out-Null } + $destFile = Join-Path $destDir (Split-Path -Leaf $tar_file) + Copy-Item -Force "$tar_file" "$destFile" + & $__w_log "Artifact copied to $destFile" +} else { + & $__w_warn "Artifact left at $tar_file" +} diff --git a/qualcomm-software/embedded/scripts/build.sh b/qualcomm-software/embedded/scripts/build.sh new file mode 100755 index 000000000000..cc21cb81aaec --- /dev/null +++ b/qualcomm-software/embedded/scripts/build.sh @@ -0,0 +1,348 @@ +#!/usr/bin/env bash + +# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. +# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +# A bash script to build cpullvm toolchain +# The script automates cloning, patching, building LLVM, ELD linker and musl-embedded, +# and packaging artifacts for ARM and AArch64 targets. + +# Note: Ensure that `ELD_BRANCH` and `MUSL_EMBEDDED_BRANCH` match the current repository branch +# to maintain consistency across all dependencies. + +set -euo pipefail + +log() { echo -e "\033[1;34m[log]\033[0m $(date '+%F %T') $*"; } +warn() { echo -e "\033[1;33m[warn]\033[0m $(date '+%F %T') $*"; } +trap 'warn "Script failed at line $LINENO: \"$BASH_COMMAND\" (exit code: $?)"; exit 1' ERR + +readonly ELD_REPO_URL="https://github.com/qualcomm/eld.git" +readonly ELD_BRANCH="release/22.x" + +readonly MUSL_EMBEDDED_REPO_URL="https://github.com/qualcomm/musl-embedded.git" +readonly MUSL_EMBEDDED_BRANCH="main" + +SCRIPT_DIR="$( + cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" >/dev/null && pwd +)" +REPO_ROOT="$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel )" +WORKSPACE="${REPO_ROOT}/.." +SRC_DIR="${REPO_ROOT}" +BUILD_DIR="${WORKSPACE}/build" +INSTALL_DIR="${WORKSPACE}/install" +BUILD_DIR_AARCH64="${BUILD_DIR}/aarch64" +INSTALL_DIR_AARCH64="${INSTALL_DIR}/aarch64" +ARTIFACT_DIR="" +SKIP_TESTS="false" +JOBS="${JOBS:-$(nproc)}" +CLEAN="false" +NIGHTLY="false" + +usage() { + cat <<'EOF' +Usage: + build.sh [options] + +Options: + --artifact-dir Directory to copy final tarball + --skip-tests Skip LLVM test steps + --arm-sysroot Arm sysroot (default: /usr/arm-linux-gnueabi) + --aarch64-sysroot AArch64 sysroot (default: /usr/aarch64-linux-gnu) + --nightly Nightly build + --clean Delete and recreate build/install dirs + +Examples: + ./build.sh --artifact-dir /tmp/artifacts +EOF +} + +# --- Parse args --- +while [[ $# -gt 0 ]]; do + case "$1" in + --artifact-dir) ARTIFACT_DIR="$2"; shift 2 ;; + --skip-tests) SKIP_TESTS="true"; shift ;; + --nightly) NIGHTLY="true"; shift ;; + --clean) CLEAN="true"; shift ;; + -h|--help) usage; exit 0 ;; + *) warn "Unknown arg: $1"; usage; exit 1 ;; + esac +done + +# --- Set the Build flags --- +BUILD_MODE="Release" +ASSERTION_MODE="OFF" +ARM32_BM_TRIPLE="arm-none-eabi" +AARCH64_BM_TRIPLE="aarch64-none-elf" +COMPILER_RT_ARM32_BM_BUILDDIR="${WORKSPACE}/build/compiler-rt/arm32/baremetal" +COMPILER_RT_AARCH64_BM_BUILDDIR="${WORKSPACE}/build/compiler-rt/aarch64/baremetal" +COMPILER_RT_ARM32_BM_FLAGS="--target=arm-none-eabi -mcpu=cortex-a9 -ffunction-sections -fdata-sections -mfloat-abi=softfp -mfpu=neon -nostdlibinc" +COMPILER_RT_AARCH64_BM_FLAGS="--target=aarch64-none-elf -mcpu=cortex-a53 -ffunction-sections -fdata-sections -nostdlibinc" + +GCC_ROOT_AARCH64="/usr" + +# --- Prepare build/install dirs of aarch64 --- +if [[ "${CLEAN}" == "true" ]]; then + log "Cleaning ${BUILD_DIR} ${INSTALL_DIR} ${BUILD_DIR_AARCH64} and ${INSTALL_DIR_AARCH64}" + rm -rf "${BUILD_DIR}" "${INSTALL_DIR}" "${BUILD_DIR_AARCH64}" "${INSTALL_DIR_AARCH64}" +fi + +# --- Workspace prep --- +log "Preparing workspace at: ${WORKSPACE}" +mkdir -p "${BUILD_DIR}" "${INSTALL_DIR}" + +# --- Clone musl-embedded (if absent) --- +if [[ ! -d "${WORKSPACE}/musl-embedded/.git" ]]; then + log "Cloning musl-embedded into ${WORKSPACE}/musl-embedded" + git clone "${MUSL_EMBEDDED_REPO_URL}" "${WORKSPACE}/musl-embedded" -b "${MUSL_EMBEDDED_BRANCH}" + MUSL_PINNED_COMMIT="${MUSL_PINNED_COMMIT:-a2bc89ab37e8691e300d7a7dd96bfac4917dc884}" + pushd "${WORKSPACE}/musl-embedded" >/dev/null + git checkout "${MUSL_PINNED_COMMIT}" + popd >/dev/null +else + log "musl-embedded already present, leaving as-is" +fi + +# --- Clone ELD under llvm/tools (if absent) --- +if [[ ! -d "${REPO_ROOT}/llvm/tools/eld/.git" ]]; then + log "Cloning ELD to ${REPO_ROOT}/llvm/tools/eld" + git clone "${ELD_REPO_URL}" "${SRC_DIR}/llvm/tools/eld" -b "${ELD_BRANCH}" +else + log "ELD already present under llvm/tools, leaving as-is" +fi + +# --- Apply patches --- +log "Applying patches" +python3 "${SRC_DIR}/qualcomm-software/embedded/tools/patchctl.py" apply -f "${SRC_DIR}/qualcomm-software/embedded/patchsets.yml" + +# --- Build LLVM (native) --- +log "Configuring LLVM" +mkdir -p "${BUILD_DIR}/llvm" +pushd "${BUILD_DIR}/llvm" >/dev/null + +cmake -G Ninja -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" \ + -DLLVM_TARGETS_TO_BUILD="ARM;AArch64;RISCV;X86" \ + -DELD_TARGETS_TO_BUILD="AArch64;ARM;RISCV" \ + -DLLVM_EXTERNAL_PROJECTS="eld" \ + -DLLVM_EXTERNAL_ELD_SOURCE_DIR="llvm/tools/eld" \ + -DLLVM_DEFAULT_TARGET_TRIPLE="aarch64-unknown-linux-gnueabi" \ + -DLLVM_TARGET_ARCH="arm-linux-gnueabi" \ + -DLLVM_BUILD_RUNTIME="OFF" \ + -DLIBCLANG_BUILD_STATIC="ON" -DLLVM_POLLY_LINK_INTO_TOOLS="ON" \ + -DCMAKE_C_COMPILER="clang" -DCMAKE_CXX_COMPILER="clang++" \ + -DCMAKE_BUILD_TYPE="${BUILD_MODE}" \ + -DLLVM_ENABLE_ASSERTIONS:BOOL="${ASSERTION_MODE}" \ + -DLLVM_ENABLE_PROJECTS="llvm;clang;polly;lld;mlir" \ + "${SRC_DIR}/llvm" + +log "Building LLVM" +ninja +log "Installing LLVM" +ninja install +popd >/dev/null + +if [[ "${SKIP_TESTS}" != "true" ]]; then + log "Running LLVM tests" + (cd "${BUILD_DIR}/llvm" && ninja check-llvm check-lld check-polly check-eld check-clang) +else + warn "Skipping tests" +fi + +# --- Compute clang resource dir --- +RESOURCE_DIR="$("${INSTALL_DIR}/bin/clang" -print-resource-dir)" +log "RESOURCE_DIR=${RESOURCE_DIR}" + +# --- Build compiler-rt for ARM baremetal --- +log "Building compiler-rt for ARM baremetal" +mkdir -p "${BUILD_DIR}/compiler-rt/arm32/baremetal" +pushd "${BUILD_DIR}/compiler-rt/arm32/baremetal" >/dev/null +cmake -G Ninja \ + -DCMAKE_INSTALL_PREFIX="${RESOURCE_DIR}" \ + -DCMAKE_TRY_COMPILE_TARGET_TYPE="STATIC_LIBRARY" \ + -DCMAKE_ASM_COMPILER_TARGET="${ARM32_BM_TRIPLE}" \ + -DCMAKE_C_COMPILER_TARGET="${ARM32_BM_TRIPLE}" \ + -DCMAKE_CXX_COMPILER_TARGET="${ARM32_BM_TRIPLE}" \ + -DCMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \ + -DCMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \ + -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" \ + -DCMAKE_C_FLAGS="${COMPILER_RT_ARM32_BM_FLAGS}" \ + -DCMAKE_CXX_FLAGS="${COMPILER_RT_ARM32_BM_FLAGS}" \ + -DCMAKE_ASM_FLAGS="${COMPILER_RT_ARM32_BM_FLAGS}" \ + -DCMAKE_SYSTEM_NAME="Generic" \ + -DCOMPILER_RT_BAREMETAL_BUILD="ON" \ + -DCOMPILER_RT_BUILD_BUILTINS="ON" \ + -DCOMPILER_RT_BUILD_LIBFUZZER="OFF" \ + -DCOMPILER_RT_BUILD_PROFILE="OFF" \ + -DCOMPILER_RT_BUILD_SANITIZERS="OFF" \ + -DCOMPILER_RT_BUILD_XRAY="OFF" \ + -DCOMPILER_RT_DEFAULT_TARGET_TRIPLE="${ARM32_BM_TRIPLE}" \ + -DCOMPILER_RT_OS_DIR="baremetal" \ + -DCOMPILER_RT_TEST_TARGET_TRIPLE="${ARM32_BM_TRIPLE}" \ + -DCOMPILER_RT_TEST_COMPILER="${INSTALL_DIR}/bin/clang" \ + -DCOMPILER_RT_TEST_COMPILER_CFLAGS="${COMPILER_RT_ARM32_BM_FLAGS}" \ + -DCMAKE_BUILD_TYPE="${BUILD_MODE}" \ + -DLLVM_ENABLE_ASSERTIONS:BOOL="${ASSERTION_MODE}" \ + -DCXX_SUPPORTS_UNWINDLIB_NONE_FLAG:BOOL="OFF" \ + "${SRC_DIR}/compiler-rt" +ninja install +popd >/dev/null + +# --- Build compiler-rt for AArch64 baremetal --- +log "Building compiler-rt for AArch64 baremetal" +mkdir -p "${BUILD_DIR}/compiler-rt/aarch64/baremetal" +pushd "${BUILD_DIR}/compiler-rt/aarch64/baremetal" >/dev/null +cmake -G Ninja \ + -DCMAKE_INSTALL_PREFIX="${RESOURCE_DIR}" \ + -DCMAKE_TRY_COMPILE_TARGET_TYPE="STATIC_LIBRARY" \ + -DCMAKE_C_COMPILER="${INSTALL_DIR}/bin/clang" \ + -DCMAKE_CXX_COMPILER="${INSTALL_DIR}/bin/clang++" \ + -DCMAKE_PREFIX_PATH="${INSTALL_DIR}" \ + -DCMAKE_C_FLAGS="${COMPILER_RT_AARCH64_BM_FLAGS}" \ + -DCMAKE_CXX_FLAGS="${COMPILER_RT_AARCH64_BM_FLAGS}" \ + -DCMAKE_ASM_FLAGS="${COMPILER_RT_AARCH64_BM_FLAGS}" \ + -DCMAKE_SYSTEM_NAME="Generic" \ + -DCOMPILER_RT_BAREMETAL_BUILD="ON" \ + -DCOMPILER_RT_BUILD_BUILTINS="ON" \ + -DCOMPILER_RT_BUILD_LIBFUZZER="OFF" \ + -DCOMPILER_RT_BUILD_PROFILE="OFF" \ + -DCOMPILER_RT_BUILD_SANITIZERS="OFF" \ + -DCOMPILER_RT_BUILD_XRAY="OFF" \ + -DCOMPILER_RT_DEFAULT_TARGET_TRIPLE="${AARCH64_BM_TRIPLE}" \ + -DCOMPILER_RT_OS_DIR="baremetal" \ + -DCOMPILER_RT_TEST_TARGET_TRIPLE="${AARCH64_BM_TRIPLE}" \ + -DCOMPILER_RT_TEST_COMPILER="${INSTALL_DIR}/bin/clang" \ + -DCOMPILER_RT_TEST_COMPILER_CFLAGS="${COMPILER_RT_AARCH64_BM_FLAGS}" \ + -DCMAKE_BUILD_TYPE="${BUILD_MODE}" \ + -DLLVM_ENABLE_ASSERTIONS:BOOL="${ASSERTION_MODE}" \ + "${SRC_DIR}/compiler-rt" +ninja +ninja install +popd >/dev/null + +# --- Build musl-embedded --- +export PATH="${INSTALL_DIR}/bin:${PATH}" +log "Building musl-embedded" +MUSL_BUILDDIR="${WORKSPACE}/musl-embedded" +source "${MUSL_BUILDDIR}/qualcomm-software/config/component_list.sh" +for lib in "${musl_components[@]}"; do + libName="$(echo "${lib}" | awk -F".sh," '{print $1}')" + dirName="$(echo "${lib}" | awk -F"," '{print $2}')" + pushd "${MUSL_BUILDDIR}" >/dev/null + make distclean + bash -x "${MUSL_BUILDDIR}/qualcomm-software/config/linux/arm/${libName}.sh" --prefix="${INSTALL_DIR}/${dirName}/libc" + make -j"${JOBS}" + make install + popd >/dev/null +done + +# --- c++ libs --- +log "Build c++ libs ..." + +declare -A Triples +Triples["aarch64-none-elf"]="aarch64-none-elf" +Triples["aarch64-pacret-b-key-bti-none-elf"]="aarch64-none-elf" +Triples["armv7-none-eabi"]="armv7-none-eabi" +declare -A CFLAGS +CFLAGS["aarch64-none-elf"]="-mcpu=cortex-a53 -nostartfiles" +CFLAGS["aarch64-pacret-b-key-bti-none-elf"]="-mcpu=cortex-a53 -nostartfiles -march=armv8.5-a -mbranch-protection=pac-ret+leaf+b-key+bti" +CFLAGS["armv7-none-eabi"]="-mcpu=cortex-a9 -mthumb -specs=nosys.specs" +CFLAGS_RELEASE="-Os -DNDEBUG" +for VARIANT in "aarch64-none-elf" "aarch64-pacret-b-key-bti-none-elf" "armv7-none-eabi"; do + TRIPLE="${Triples[$VARIANT]}" + MUSL_INC="${INSTALL_DIR}/${TRIPLE}/libc/include" + CMAKE_CFLAGS="-target ${TRIPLE} -nostdinc -isystem ${MUSL_INC} -ccc-gcc-name ${TRIPLE}-g++ -fno-unroll-loops -fno-optimize-sibling-calls -ffunction-sections -fdata-sections -fno-exceptions -D_GNU_SOURCE ${CFLAGS[$VARIANT]}" + mkdir -p "${BUILD_DIR}/${VARIANT}" + pushd "${BUILD_DIR}/${VARIANT}" >/dev/null + cmake -G Ninja -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}/${VARIANT}" -DCMAKE_BUILD_TYPE="Release" -DCMAKE_C_COMPILER="clang" -DCMAKE_CXX_COMPILER="clang++" \ + -DHAVE_LIBCXXABI="True" -DCMAKE_SYSTEM_NAME="Generic" \ + -DCMAKE_C_FLAGS_RELEASE="${CFLAGS_RELEASE}" \ + -DCMAKE_CXX_FLAGS_RELEASE="${CFLAGS_RELEASE}" \ + -DCMAKE_C_FLAGS="${CMAKE_CFLAGS}" \ + -DCMAKE_CXX_FLAGS="${CMAKE_CFLAGS}" \ + -DCMAKE_ASM_FLAGS="${CMAKE_CFLAGS}" \ + -DCMAKE_TRY_COMPILE_TARGET_TYPE="STATIC_LIBRARY" \ + -DLIBCXX_ENABLE_SHARED="False" \ + -DLIBCXX_SHARED_OUTPUT_NAME="c++-shared" \ + -DLIBCXX_ENABLE_EXCEPTIONS="False" \ + -DLIBCXX_HAS_MUSL_LIBC="True" \ + -DLIBCXX_ENABLE_ABI_LINKER_SCRIPT="False" \ + -DLIBCXX_ENABLE_THREADS="False" \ + -DLIBCXX_ENABLE_FILESYSTEM="False" \ + -DLIBCXX_ENABLE_RANDOM_DEVICE="False" \ + -DLIBCXX_ENABLE_LOCALIZATION="False" \ + -DLIBCXX_SUPPORTS_STD_EQ_CXX11_FLAG="ON" \ + -DLIBCXX_SUPPORTS_STD_EQ_CXX14_FLAG="ON" \ + -DLIBCXX_SUPPORTS_STD_EQ_CXX17_FLAG="ON" \ + -DLIBCXX_QUIC_BAREMETAL="ON" \ + -DLIBCXXABI_USE_LLVM_UNWINDER="True" \ + -DLIBCXXABI_BAREMETAL="True" \ + -DLIBCXXABI_ENABLE_SHARED="False" \ + -DLIBCXXABI_SHARED_OUTPUT_NAME="c++abi-shared" \ + -DLIBCXXABI_ENABLE_WERROR="True" \ + -DLIBCXXABI_ENABLE_THREADS="False" \ + -DLIBCXXABI_ENABLE_ASSERTIONS="False" \ + -DLIBCXXABI_ENABLE_EXCEPTIONS="False" \ + -DLIBUNWIND_IS_BAREMETAL="True" \ + -DLIBUNWIND_ENABLE_SHARED="False" \ + -DLIBUNWIND_SHARED_OUTPUT_NAME="unwind-shared" \ + -DUNIX="True" \ + -S "${SRC_DIR}/runtimes" "-DLLVM_ENABLE_RUNTIMES=libcxx;libcxxabi;libunwind" + ninja + ninja install + popd >/dev/null + log "c++ libs install ..." +done + +# Linux libraries +log "Install RISC-V Linux libraries" +"${SCRIPT_DIR}"/build_linux_runtimes.sh \ + --base-build-dir "${BUILD_DIR}" \ + --base-install-dir "${INSTALL_DIR}" \ + --resource-dir "${RESOURCE_DIR}" \ + --llvm-src-dir "${SRC_DIR}" \ + --musl-emb-src-dir "${MUSL_BUILDDIR}" \ + --download-dir "${WORKSPACE}" + +log "Build and installation complete." + +# --- Create artifact --- +log "Creating artifact tarball" + +short_sha="$(git -C "${SRC_DIR}" rev-parse --short HEAD)" +suffix="$(date +%Y%m%d)" +archive_root="${BUILD_DIR}" +archive_dir="${INSTALL_DIR}" +COMPRESS_EXT="tgz" +COMPRESS_FLAG="-czvf" + +# Name the artifact according to host arch (native build) +ARCH_TAG="$(uname -m)" +case "${ARCH_TAG}" in + x86_64) ARCH_TAG="x86_64" ;; + aarch64|arm64) ARCH_TAG="AArch64" ;; + *) ARCH_TAG="${ARCH_TAG}" ;; +esac + +archive_name="cpullvm-toolchain-${ELD_BRANCH##*/}-Linux-${ARCH_TAG}-${short_sha}-${suffix}.${COMPRESS_EXT}" + +if [[ "${NIGHTLY}" == "true" ]]; then + log "Applying NIGHTLY compression settings" + COMPRESS_EXT="txz" + COMPRESS_FLAG="-cJvf" + archive_name="${archive_name%.tgz}_nightly.${COMPRESS_EXT}" + XZ_THREADS="${JOBS:-$(nproc)}" + export XZ_OPT="--threads=${XZ_THREADS}" +fi + +# Create tarball +tar_file="${archive_root}/${archive_name}" +log "Compressing ${archive_dir} into ${tar_file}" +tar ${COMPRESS_FLAG} "${tar_file}" -C "${archive_dir}" . + +# Copy artifact if destination provided +if [[ -n "${ARTIFACT_DIR}" ]]; then + mkdir -p "${ARTIFACT_DIR}" + cp "${tar_file}" "${ARTIFACT_DIR}/" + log "Artifact copied to ${ARTIFACT_DIR}/${archive_name}" +else + warn "Artifact left at ${tar_file}" +fi diff --git a/qualcomm-software/scripts/build_linux_runtimes.sh b/qualcomm-software/embedded/scripts/build_linux_runtimes.sh similarity index 79% rename from qualcomm-software/scripts/build_linux_runtimes.sh rename to qualcomm-software/embedded/scripts/build_linux_runtimes.sh index 4e3907d9f17a..0d1518f4a90a 100755 --- a/qualcomm-software/scripts/build_linux_runtimes.sh +++ b/qualcomm-software/embedded/scripts/build_linux_runtimes.sh @@ -1,10 +1,5 @@ #!/usr/bin/env bash -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -set -euo pipefail - usage() { cat <<'EOF' Usage: @@ -12,12 +7,14 @@ Usage: !!!! Note that all options must be specified + !!!! Note that the clang you use must be the clang you are installing + things into + Options: - --tools-path Path to directory to find Clang/LLVM tools --base-build-dir Directory for the build --base-install-dir Directory for the install + --resource-dir Resource directory for installing runtimes --llvm-src-dir Directory of the LLVM sources - --musl-src-dir Directory of the musl source dir --musl-emb-src-dir Directory of the musl-embedded source dir --download-dir Directory where extra projects are downloaded into EOF @@ -67,11 +64,10 @@ get_kernel_arch() { while [[ $# -gt 0 ]]; do case "$1" in - --tools-path) TOOLS_PATH="$2"; shift 2 ;; --base-build-dir) BASE_BUILD_DIR="$2"; shift 2 ;; --base-install-dir) BASE_INSTALL_DIR="$2"; shift 2;; + --resource-dir) RESOURCE_DIR="$2"; shift 2 ;; --llvm-src-dir) LLVM_BASE_DIR="$2"; shift 2 ;; - --musl-src-dir) MUSL_SRC_DIR="$2"; shift 2 ;; --musl-emb-src-dir) MUSL_EMB_SRC_DIR="$2"; shift 2 ;; --download-dir) DOWNLOAD_DIR="$2"; shift 2 ;; -h|--help) usage; exit 0 ;; @@ -80,17 +76,21 @@ while [[ $# -gt 0 ]]; do done # Require all flags be passed. -if [ -z "${TOOLS_PATH}" ] || - [ -z "${BASE_BUILD_DIR}" ] || +if [ -z "${BASE_BUILD_DIR}" ] || [ -z "${BASE_INSTALL_DIR}" ] || + [ -z "${RESOURCE_DIR}" ] || [ -z "${LLVM_BASE_DIR}" ] || - [ -z "${MUSL_SRC_DIR}" ] || [ -z "${MUSL_EMB_SRC_DIR}" ] || [ -z "${DOWNLOAD_DIR}" ]; then echo "All options must be specified"; usage; exit 1 fi -export PATH="${TOOLS_PATH}:${PATH}" +# Double check that the resource dirs match. We could remove the flag, +# but having something that prevents accidentally installing things to +# random other toolchains during testing is helpful. +if [[ "$(clang -print-resource-dir)" != "${RESOURCE_DIR}" ]]; then + echo "clang's resource dir does not match --resource-dir"; usage; exit 1 +fi JOBS=$(nproc) @@ -105,7 +105,18 @@ if [[ ! -d "${KERNEL_SOURCE_BASE_DIR}" ]]; then rm "${KERNEL_SOURCE_BASE}.tar.xz" fi -CLANG_RESOURCE_DIR="$(clang --print-resource-dir)" +# Source musl for use for RISC-V. In the past, we used different versions +# of musl for Arm and RISC-V with our RISC-V being significantly newer. We're +# only doing Linux support here, so source a matching version (1.2.5) from +# upstream. +MUSL_SOURCE_BASE="musl-1.2.5" +MUSL_SOURCE_BASE_DIR="${DOWNLOAD_DIR}/${MUSL_SOURCE_BASE}" +if [[ ! -d "${MUSL_SOURCE_BASE_DIR}" ]]; then + wget https://git.musl-libc.org/cgit/musl/snapshot/${MUSL_SOURCE_BASE}.tar.gz + tar xvf "${MUSL_SOURCE_BASE}.tar.gz" + rm "${MUSL_SOURCE_BASE}.tar.gz" +fi +popd >/dev/null # Variants to build and the basic set of compile flags to use for each. There's # surely more elegant ways of doing this, but this doesn't require any extra @@ -169,28 +180,9 @@ for VARIANT in "${VARIANTS[@]}"; do # Create a temporary sysroot to dump our libraries into--we'll sort out the # final install location later. - VARIANT_TMP_SYSROOT="${VARIANT_BASE_BUILD_DIR}/sysroot" + VARIANT_TMP_SYSROOT="${VARIANT_BASE_BUILD_DIR}/tmp_sysroot" mkdir -p "${VARIANT_TMP_SYSROOT}" - # We have an issue in that we want to build/install/distribute - # multiple, possibly conficting (different ABIs, etc.) variants. Parts of the - # subsequent build steps need to be able to find the correct set of libraries - # for the given variant being built out--basically, we need multilib or to - # be able to manually point to the correct set of libraries. There's two - # situations where this causes issues (assuming conflicting variants of the - # same triple): - # 1. Basic "can we compile/link a simple thing" tests (roughly) of the form: - # `clang --target=-linux-gnu test.c ` - # 2. Locating the builtins through `--print-libgcc-file-name`. This can - # happen in ex: `add_compiler_rt_runtime`. - # We have lots of options to work around the first case. For the second, - # `-resource-dir` seems to be the only option. So, setup a temporary - # resource dir per variant that we can build out and point to using - # `-resource-dir` in subsequent steps. We'll copy libraries to the appropriate - # places at the end. - VARIANT_TMP_RESOURCE_DIR="${VARIANT_TMP_SYSROOT}/resource-dir" - cp -r "${CLANG_RESOURCE_DIR}" "${VARIANT_TMP_RESOURCE_DIR}" - VARIANT_TARGET="$(get_target_from_flags ${BUILD_FLAGS})" VARIANT_ARCH="$(get_arch_from_flags ${VARIANT_BUILD_FLAGS[$VARIANT]})" VARIANT_KERNEL_ARCH="$(get_kernel_arch ${VARIANT_ARCH})" @@ -202,7 +194,7 @@ for VARIANT in "${VARIANTS[@]}"; do EXTRA_MUSL_CONFIGS="${VARIANT_MUSL_CONFIGS[$VARIANT]}" CMAKE_OPT_LEVEL="Release" if [[ "${VARIANT_ARCH}" =~ riscv ]]; then - MUSL_DIR="${MUSL_SRC_DIR}" + MUSL_DIR="${MUSL_SOURCE_BASE_DIR}" EXTRA_MUSL_CONFIGS="${EXTRA_MUSL_CONFIGS} \ --disable-shared" CMAKE_OPT_LEVEL="MinSizeRel" @@ -219,7 +211,7 @@ for VARIANT in "${VARIANTS[@]}"; do ARCH="${VARIANT_KERNEL_ARCH}" # Flags common to all libraries. - LIB_BUILD_FLAGS="${BUILD_FLAGS} -isystem${KERNEL_BUILD_BASE}/include -resource-dir ${VARIANT_TMP_RESOURCE_DIR} --sysroot=${VARIANT_TMP_SYSROOT}" + LIB_BUILD_FLAGS="${BUILD_FLAGS} -isystem${KERNEL_BUILD_BASE}/include --sysroot=${VARIANT_TMP_SYSROOT}" LIB_BUILD_FLAGS="${LIB_BUILD_FLAGS} -ffunction-sections -fdata-sections" # Install musl headers @@ -247,8 +239,8 @@ for VARIANT in "${VARIANTS[@]}"; do # at the moment so test links won't end well. And, we're only building # the builtins. cmake -G Ninja \ - -DCMAKE_INSTALL_PREFIX="${VARIANT_TMP_RESOURCE_DIR}" \ - -DCMAKE_SYSROOT="${VARIANT_TMP_SYSROOT}" \ + -DCMAKE_INSTALL_PREFIX="${RESOURCE_DIR}" \ + -DCMAKE_SYSROOT="${TMP_RESOURCE_DIR}" \ -DCMAKE_BUILD_TYPE="${CMAKE_OPT_LEVEL}" \ -DCMAKE_C_COMPILER="clang" \ -DCMAKE_CXX_COMPILER="clang++" \ @@ -275,7 +267,43 @@ for VARIANT in "${VARIANTS[@]}"; do -S "${LLVM_BASE_DIR}/runtimes" ninja -C "${BUILTINS_BUILD_DIR}" install + # **** Nasty hack **** + # We have an issue in that we want to build/install/distribute + # multiple, possibly conficting (different ABIs, etc.) variants. Parts of the + # subsequent build steps need to be able to find the correct set of libraries + # for the given variant being built out--basically, we need multilib or to + # be able to manually point to the correct set of libraries. AFAIK, there's + # no great way of handling this for Linux currently where builtins are + # concerned. There's two situations where this comes up: + # 1. Basic "can we compile/link a simple thing" tests (roughly) of the form: + # `clang --target=-linux-gnu test.c ` + # 2. Locating the builtins through `--print-libgcc-file-name`. This can + # happen in ex: `add_compiler_rt_runtime`. + # We have lots of options to work around the first case. In the second case, + # there doesn't seem to be a way to influence where clang looks for compiler-rt + # (without source changes) in a way that helps us--it always looks into + # some "fixed" path that, at best, is common for variants of the same triple. + # + # To work around this, we install the builtins in one of the expected places + # to let the various compile/link/--print-libgcc-file-name tests work + # correctly. Make an additional copy in our temporary sysroot though and at + # the end of building this variant, we'll delete the installed folder for + # the next variant. Once all variants are built, we can go through and install + # everything in the correct location again. + # + # UPDATE/FIXME: Apparently `--resource-dir ` is a thing--this improves + # the situation a bit in that we can set up per-variant resource dirs for + # building rather than share the single "real" one in clang. That should + # at least allow us to relax the sequential ordering between same-target + # variants. This script is throwaway code so don't make this change here-- + # we'll address this post refactor. + mkdir -p "${VARIANT_TMP_SYSROOT}/lib" + VARIANT_RESOURCE_DIR="${RESOURCE_DIR}/lib/${VARIANT_TARGET}" + cp -r "${VARIANT_RESOURCE_DIR}" "${VARIANT_TMP_SYSROOT}/lib" + # Install musl, including the libraries this time. + # FIXME: we already configured above, is reconfiguring actually helpful? (Does + # it matter that the builtin path didn't exist previously?) echo "Installing musl libraries for ${VARIANT}" pushd "${VARIANT_MUSL_BUILD_DIR}" >/dev/null make distclean @@ -288,7 +316,7 @@ for VARIANT in "${VARIANTS[@]}"; do CROSS_COMPILE="llvm-" \ CC="clang --target=${VARIANT_TARGET} -fuse-ld=eld" \ CFLAGS="${LIB_BUILD_FLAGS} ${ARCH_MUSL_CFLAGS[$VARIANT_ARCH]}" \ - LIBCC="${VARIANT_TMP_RESOURCE_DIR}/lib/${VARIANT_TARGET}/libclang_rt.builtins.a" + LIBCC="${VARIANT_RESOURCE_DIR}/libclang_rt.builtins.a" make -j"${JOBS}" make install popd >/dev/null @@ -302,15 +330,15 @@ for VARIANT in "${VARIANTS[@]}"; do # -lc++. cmake -G Ninja \ -DCMAKE_INSTALL_PREFIX="${VARIANT_TMP_SYSROOT}" \ - -DCMAKE_SYSROOT="${VARIANT_TMP_SYSROOT}" \ + -DCMAKE_SYSROOT="${TMP_RESOURCE_DIR}" \ -DCMAKE_BUILD_TYPE="${CMAKE_OPT_LEVEL}" \ -DCMAKE_C_COMPILER="clang" \ -DCMAKE_CXX_COMPILER="clang++" \ -DCMAKE_SYSTEM_NAME="Linux" \ -DCMAKE_TRY_COMPILE_TARGET_TYPE="STATIC_LIBRARY" \ - -DCMAKE_ASM_COMPILER_TARGET="${VARIANT_TARGET}" \ - -DCMAKE_C_COMPILER_TARGET="${VARIANT_TARGET}" \ - -DCMAKE_CXX_COMPILER_TARGET="${VARIANT_TARGET}" \ + -DCMAKE_ASM_COMPILER_TARGET="${TARGET_TRIPLE}" \ + -DCMAKE_C_COMPILER_TARGET="${TARGET_TRIPLE}" \ + -DCMAKE_CXX_COMPILER_TARGET="${TARGET_TRIPLE}" \ -DCMAKE_ASM_FLAGS="${LIBCXX_COMPILE_FLAGS}" \ -DCMAKE_C_FLAGS="${LIBCXX_COMPILE_FLAGS}" \ -DCMAKE_CXX_FLAGS="${LIBCXX_COMPILE_FLAGS}" \ @@ -357,6 +385,8 @@ for VARIANT in "${VARIANTS[@]}"; do -DCOMPILER_RT_BUILD_MEMPROF=OFF" fi + # As a continuation of the hack above, just install these into the temp + # sysroot and we'll move them later. # FIXME: Disable fuzzers as well to work around (seemingly) an upstream bug. # `partially_link_libcxx` in fuzzer/CMakeLists.txt has a custom command that # invokes the linker, but it just uses the toolchain default. So, we get @@ -364,13 +394,13 @@ for VARIANT in "${VARIANTS[@]}"; do # our just-built lld with no way to override this. Note that re-enabling # this also requires messing with some libc++ configuration similar to # above. - # FIXME: Investigate if we can merge this with the libc++ build above as + # TODO: Investigate if we can merge this with the libc++ build above as # it'd simplify things a bit. Not sure how that works with install dirs echo "Installing compiler-rt for ${VARIANT}" COMPILER_RT_BUILD_DIR="${VARIANT_BASE_BUILD_DIR}/compiler-rt" cmake -G Ninja \ - -DCMAKE_INSTALL_PREFIX="${VARIANT_TMP_RESOURCE_DIR}" \ - -DCMAKE_SYSROOT="${VARIANT_TMP_SYSROOT}" \ + -DCMAKE_INSTALL_PREFIX="${VARIANT_TMP_SYSROOT}" \ + -DCMAKE_SYSROOT="${TMP_RESOURCE_DIR}" \ -DCMAKE_BUILD_TYPE="${CMAKE_OPT_LEVEL}" \ -DCMAKE_C_COMPILER="clang" \ -DCMAKE_CXX_COMPILER="clang++" \ @@ -398,6 +428,10 @@ for VARIANT in "${VARIANTS[@]}"; do -B "${COMPILER_RT_BUILD_DIR}" \ -S "${LLVM_BASE_DIR}/runtimes" ninja -C "${COMPILER_RT_BUILD_DIR}" install + + # As a continuation of the hack above, clean up the resource dir for the + # next variant. + rm -rf "${RESOURCE_DIR}/lib/${VARIANT_TARGET}" done # Move libraries into the final layout/install. The layout looks something @@ -409,17 +443,12 @@ done # and sanitizer binaries we can revisit this. echo "Copying libraries to their final locations" for VARIANT in "${VARIANTS[@]}"; do - VARIANT_TMP_SYSROOT="${BASE_BUILD_DIR}/${VARIANT}/sysroot" + VARIANT_TMP_SYSROOT="${BASE_BUILD_DIR}/${VARIANT}/tmp_sysroot" VARIANT_TARGET="$(get_target_from_flags ${VARIANT_BUILD_FLAGS[$VARIANT]})" + VARIANT_CRT_DEST="${RESOURCE_DIR}/lib/${VARIANT_TARGET}" + mkdir -p "${VARIANT_CRT_DEST}" + # Just move this so it isn't duplicated--we can fix this later. + mv "${VARIANT_TMP_SYSROOT}/lib/${VARIANT_TARGET}" "${VARIANT_CRT_DEST}/${VARIANT}" mkdir -p "${BASE_INSTALL_DIR}/${VARIANT_TARGET}/${VARIANT}" - cp -r "${VARIANT_TMP_SYSROOT}"/include \ - "${VARIANT_TMP_SYSROOT}"/lib \ - -t "${BASE_INSTALL_DIR}/${VARIANT_TARGET}/${VARIANT}" - - mv "${VARIANT_TMP_SYSROOT}/resource-dir/lib/${VARIANT_TARGET}" \ - "${VARIANT_TMP_SYSROOT}/resource-dir/lib/temp" - mkdir -p "${VARIANT_TMP_SYSROOT}/resource-dir/lib/${VARIANT_TARGET}" - mv "${VARIANT_TMP_SYSROOT}/resource-dir/lib/temp" \ - "${VARIANT_TMP_SYSROOT}/resource-dir/lib/${VARIANT_TARGET}/${VARIANT}" - cp -r "${VARIANT_TMP_SYSROOT}/resource-dir" "${BASE_INSTALL_DIR}" + cp -r "${VARIANT_TMP_SYSROOT}"/* "${BASE_INSTALL_DIR}/${VARIANT_TARGET}/${VARIANT}" done diff --git a/qualcomm-software/embedded/tools/patchctl.py b/qualcomm-software/embedded/tools/patchctl.py new file mode 100644 index 000000000000..d6934547743c --- /dev/null +++ b/qualcomm-software/embedded/tools/patchctl.py @@ -0,0 +1,258 @@ +#!/usr/bin/env python3 +from __future__ import annotations + +import argparse, hashlib, json, os, subprocess, sys, textwrap +from dataclasses import dataclass +from pathlib import Path +from typing import List, Optional + +PY_MIN = (3, 8) +if sys.version_info < PY_MIN: + print(f"Python {PY_MIN[0]}.{PY_MIN[1]}+ is required.", file=sys.stderr) + sys.exit(2) + +def sh(*cmd: str, cwd: Optional[Path] = None, check: bool = False, capture: bool = False): + return subprocess.run(cmd, cwd=str(cwd) if cwd else None, + text=True, capture_output=capture, check=check) + +def git(*args: str, cwd: Path, check: bool = False, capture: bool = False): + return sh("git", *args, cwd=cwd, check=check, capture=capture) + +def load_yaml(path: Path) -> dict: + # JSON-in-YAML + import re + content = path.read_text(encoding="utf-8") + lines = [ln for ln in content.splitlines() if not ln.strip().startswith("#")] + try: + return json.loads("\n".join(lines)) + except json.JSONDecodeError: + raise SystemExit(f"Use JSON-compatible YAML for {path} or switch to a YAML parser.") + +def compute_series_hash(files: List[Path]) -> str: + h = hashlib.sha256() + for p in files: + h.update(p.name.encode()) + h.update(p.read_bytes()) + return h.hexdigest()[:12] + +@dataclass +class PatchSet: + name: str + repo: Path + patch_dir: Path + method: str + three_way: bool + restore_on_fail: bool + ignore_whitespace: bool + reset_to: str + ensure_identity: Optional[dict] + +@dataclass +class RepoSnapshot: + head: str + +STATE_DIR = Path(".git") / "patchctl" +STATE_FILE = STATE_DIR / "state.json" + +def collect_patches(d: Path) -> List[Path]: + if not d.exists(): + return [] + return sorted([p for p in d.glob("*.patch") if p.is_file()]) + +def preflight_apply_check(repo: Path, patches: List[Path], three_way: bool, ignore_ws: bool) -> bool: + # Dry-run using 'git apply --check' for each patch + for patch in patches: + cmd = ["apply", "--check"] + if ignore_ws: cmd.append("--ignore-whitespace") + if three_way: cmd.append("--3way") + cmd.append(str(patch)) + rc = git(*cmd, cwd=repo).returncode + if rc != 0: + print(f"[preflight] {repo} would fail on: {patch.name}") + return False + return True + +def read_state(repo: Path) -> dict: + f = repo / STATE_FILE + if f.exists(): + try: + return json.loads(f.read_text(encoding="utf-8")) + except Exception: + return {} + return {} + +def write_state(repo: Path, data: dict): + f = repo / STATE_FILE + (repo / STATE_DIR).mkdir(parents=True, exist_ok=True) + f.write_text(json.dumps(data, indent=2), encoding="utf-8") + +def snapshot_repo(repo: Path) -> RepoSnapshot: + head = git("rev-parse", "HEAD", cwd=repo, capture=True).stdout.strip() + return RepoSnapshot(head=head) + +def reset_repo(repo: Path, to: str): + git("reset", "--hard", to, cwd=repo, check=True) + git("clean", "-fdx", cwd=repo, check=True) + +def already_applied(repo: Path, patchset_name: str, series_hash: str) -> bool: + st = read_state(repo) + return st.get("patchsets", {}).get(patchset_name, "") == series_hash + +def stamp_applied(repo: Path, patchset_name: str, series_hash: str): + st = read_state(repo) + ps = st.get("patchsets", {}) + ps[patchset_name] = series_hash + st["patchsets"] = ps + write_state(repo, st) + +def apply_with_am(repo: Path, patches: List[Path], three_way: bool, ignore_ws: bool) -> int: + args = ["am", "-k"] + if three_way: args.append("--3way") + if ignore_ws: args.append("--ignore-whitespace") + args += [str(p) for p in patches] + r = git(*args, cwd=repo, capture=True) + sys.stdout.write(r.stdout or "") + sys.stderr.write(r.stderr or "") + if r.returncode != 0: + # try abort if needed + git("am", "--abort", cwd=repo) + return r.returncode + +def apply_with_apply(repo: Path, patches: List[Path], three_way: bool, ignore_ws: bool) -> int: + applied: List[Path] = [] + for p in patches: + check = ["apply", "--check"] + if ignore_ws: check.append("--ignore-whitespace") + if three_way: check.append("--3way") + check.append(str(p)) + if git(*check, cwd=repo).returncode != 0: + if applied: + # rollback + for prev in reversed(applied): + rev = ["apply", "--reverse"] + if ignore_ws: rev.append("--ignore-whitespace") + if three_way: rev.append("--3way") + rev.append(str(prev)) + git(*rev, cwd=repo) + return 1 + # apply + args = ["apply"] + if ignore_ws: args.append("--ignore-whitespace") + if three_way: args.append("--3way") + args.append(str(p)) + if git(*args, cwd=repo).returncode != 0: + return 1 + applied.append(p) + # stage and commit to make changes durable + git("add", "-A", cwd=repo, check=True) + msg = f"patchctl: applied {len(applied)} patches" + git("commit", "-m", msg, cwd=repo, check=True) + return 0 + +def parse_manifest(mpath: Path) -> List[PatchSet]: + doc = load_yaml(mpath) + defaults = doc.get("defaults", {}) + defm = defaults.get("method", "am") + thw = bool(defaults.get("three_way", True)) + rof = bool(defaults.get("restore_on_fail", True)) + igw = bool(defaults.get("ignore_whitespace", True)) + ident = defaults.get("ensure_identity", None) + out: List[PatchSet] = [] + base = mpath.parent + for ps in doc.get("patchsets", []): + out.append(PatchSet( + name=ps["name"], + repo=(base / ps["repo"]).resolve(), + patch_dir=(base / ps["patches"]).resolve(), + method=ps.get("method", defm), + three_way=bool(ps.get("three_way", thw)), + restore_on_fail=bool(ps.get("restore_on_fail", rof)), + ignore_whitespace=bool(ps.get("ignore_whitespace", igw)), + reset_to=str(ps.get("reset_to", "") or ""), + ensure_identity=ps.get("ensure_identity", ident), + )) + return out + +def cmd_apply(manifest: Path) -> int: + patchsets = parse_manifest(manifest) + + # Collect all patches and preflight across repos first (transaction-friendly). + repo_to_data = [] + for ps in patchsets: + if not (ps.repo / ".git").exists(): + print(f"[error] Not a git repo: {ps.repo}") + return 2 + patches = collect_patches(ps.patch_dir) + if not patches: + print(f"[info] no patches for {ps.name} at {ps.patch_dir} — skipping") + continue + series_hash = compute_series_hash(patches) + if already_applied(ps.repo, ps.name, series_hash): + print(f"[skip] {ps.name} already applied (series {series_hash})") + continue + if ps.reset_to: + print(f"[info] resetting {ps.name} to {ps.reset_to}") + snap = snapshot_repo(ps.repo) + repo_to_data.append((ps, patches, series_hash, snap)) + + # preflight all + for (ps, patches, _, _) in repo_to_data: + if ps.reset_to: + reset_repo(ps.repo, ps.reset_to) + if ps.ensure_identity: + ensure_identity(ps.repo, ps.ensure_identity) + if not preflight_apply_check(ps.repo, patches, ps.three_way, ps.ignore_whitespace): + print(f"[preflight] failed for {ps.name}. Aborting.") + # restore any resets + for (pps, _, __, snap) in repo_to_data: + reset_repo(pps.repo, snap.head) + return 1 + + # apply, transactional across repos + applied_ok: List[tuple[PatchSet, str, RepoSnapshot]] = [] + for (ps, patches, series_hash, snap) in repo_to_data: + print(f"[apply] {ps.name}: {len(patches)} patches via {ps.method}") + rc = apply_with_am(ps.repo, patches, ps.three_way, ps.ignore_whitespace) \ + if ps.method == "am" else \ + apply_with_apply(ps.repo, patches, ps.three_way, ps.ignore_whitespace) + + if rc != 0: + print(f"[fail] {ps.name} (rc={rc}). Rolling back previously-applied repos...") + # rollback those already done + for (done_ps, _, done_snap) in applied_ok: + reset_repo(done_ps.repo, done_snap.head) + # and rollback this one + reset_repo(ps.repo, snap.head) + return rc + # success + stamp_applied(ps.repo, ps.name, series_hash) + applied_ok.append((ps, series_hash, snap)) + print(f"[ok] {ps.name} applied (series {series_hash})") + + print("[done] all patchsets applied") + return 0 + +def main(argv: List[str]) -> int: + p = argparse.ArgumentParser( + prog="patchctl", + formatter_class=argparse.RawDescriptionHelpFormatter, + description=textwrap.dedent(""" + patchctl — cross-platform patch orchestrator for multi-repo workspaces. + + Typical usage: + patchctl apply -f embedded/patchsets.yaml + """) + ) + sub = p.add_subparsers(dest="cmd", required=True) + p_apply = sub.add_parser("apply", help="Apply patchsets defined in the manifest") + p_apply.add_argument("-f", "--file", required=True, type=Path, help="Manifest path (YAML/JSON)") + args = p.parse_args(argv) + + if args.cmd == "apply": + return cmd_apply(args.file) + + return 0 + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) + diff --git a/qualcomm-software/musl-embedded.cfg b/qualcomm-software/musl-embedded.cfg deleted file mode 100644 index fbec52d5ea2f..000000000000 --- a/qualcomm-software/musl-embedded.cfg +++ /dev/null @@ -1 +0,0 @@ ---sysroot /../lib/clang-runtimes/musl-embedded \ No newline at end of file diff --git a/qualcomm-software/patches/eld/0001-Create-symlinks-via-llvm_add_tool_symlink-llvm_insta.patch b/qualcomm-software/patches/eld/0001-Create-symlinks-via-llvm_add_tool_symlink-llvm_insta.patch deleted file mode 100644 index 51e7e9e09713..000000000000 --- a/qualcomm-software/patches/eld/0001-Create-symlinks-via-llvm_add_tool_symlink-llvm_insta.patch +++ /dev/null @@ -1,252 +0,0 @@ -From 238c4c7053f6d7c2f12124c429db4bd9081d3e00 Mon Sep 17 00:00:00 2001 -From: Jonathon Penix -Date: Sun, 25 Jan 2026 14:15:14 -0800 -Subject: [PATCH 1/3] Create symlinks via - `llvm_add_tool_symlink`/`llvm_install_symlink` - -eld's symlink creation currently has a few issues/quirks in how it -interacts with CPack and where symlinks are placed in builds [1][2]. - -I think the easiest way to fix these issues is just to leverage LLVM's -cmake utilities `llvm_add_tool_symlink` and `llvm_install_symlink`. Also, -I think there's a few other advantages: -- The end result (created symlinks or copied/renamed binaries) should be the - same as before while allowing us to simplify eld's cmake. We are already - tightly coupled to LLVM/LLVM's cmake anyway -- eld should behave a bit more consistently to other LLVM tools now--for - example, if someone does want to force symlinks on Windows in LLVM, eld - will do the same - -The one (maybe negative, intended) functional change here is that the symlinks -now won't have their own targets. But: -- It isn't clear to me how helpful it is to have per-symlink targets - (any/all symlinks will be built/installed with ld.eld) -- This should be similar to how ex: lld's symlinks work -- Worst case, I think we can add them back without reverting any of this if - needed -So, I omitted them for now. - -[1] https://github.com/qualcomm/eld/issues/737 -[2] https://github.com/qualcomm/eld/issues/710 - -Signed-off-by: Jonathon Penix ---- - CMakeLists.txt | 10 ++--- - lib/LinkerWrapper/CMakeLists.txt | 2 +- - tools/eld/CMakeLists.txt | 68 +++++++------------------------- - tools/eld/eld_install.cmake.in | 7 ---- - tools/eld/ld_eld_symlink.cmake | 52 ------------------------ - 5 files changed, 20 insertions(+), 119 deletions(-) - delete mode 100644 tools/eld/eld_install.cmake.in - delete mode 100644 tools/eld/ld_eld_symlink.cmake - -diff --git a/CMakeLists.txt b/CMakeLists.txt -index c253e9e6..c1287f20 100755 ---- a/CMakeLists.txt -+++ b/CMakeLists.txt -@@ -9,6 +9,11 @@ set(ELD_BINARY_DIR - ${CMAKE_CURRENT_BINARY_DIR} - CACHE STRING "") - -+# Most users probably don't want/need to change this, but it is referenced -+# by `llvm_install_symlink` (and other llvm cmake utilities). -+set(ELD_TOOLS_INSTALL_DIR "bin" CACHE PATH "Path for binary subdirectory (defaults to 'bin')") -+mark_as_advanced(ELD_TOOLS_INSTALL_DIR) -+ - list(APPEND CMAKE_MODULE_PATH "${ELD_SOURCE_DIR}/cmake/modules") - list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/") - -@@ -61,11 +66,6 @@ set(ELD_DEFAULT_TARGET_TRIPLE - "${LLVM_DEFAULT_TARGET_TRIPLE}" - CACHE STRING "Default target for which LLVM will generate code.") - --# ############################################################################## --# ELD_RUNTIME_OUTPUT_DIR is needed since CMAKE_RUNTIME_OUTPUT_DIRECTORY appends --# config type by default. --set(ELD_RUNTIME_OUTPUT_DIR ${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin) -- - # These two variables are for runnable lit tests and should be set to the - # 32/64-bit emulators used to run the tests. - set(ELD_EMULATOR "") -diff --git a/lib/LinkerWrapper/CMakeLists.txt b/lib/LinkerWrapper/CMakeLists.txt -index af0e7a84..87ea4603 100644 ---- a/lib/LinkerWrapper/CMakeLists.txt -+++ b/lib/LinkerWrapper/CMakeLists.txt -@@ -109,7 +109,7 @@ endforeach() - # library is one of the dependent libraries of the linker. - if(ELD_ON_MSVC) - install(TARGETS LW -- RUNTIME DESTINATION bin -+ RUNTIME DESTINATION "${ELD_TOOLS_INSTALL_DIR}" - ARCHIVE DESTINATION lib) - else() - install(TARGETS LW LIBRARY DESTINATION lib) -diff --git a/tools/eld/CMakeLists.txt b/tools/eld/CMakeLists.txt -index 208918af..7e39b406 100644 ---- a/tools/eld/CMakeLists.txt -+++ b/tools/eld/CMakeLists.txt -@@ -3,6 +3,11 @@ option( - "Enable this option to create -link symlinks in the bin folder of both the build and install directories." - On) - -+macro(add_eld_symlink name dest) -+ llvm_add_tool_symlink(ELD ${name} ${dest} ALWAYS_GENERATE) -+ llvm_install_symlink(ELD ${name} ${dest} ALWAYS_GENERATE) -+endmacro() -+ - set(LLVM_LINK_COMPONENTS ${LLVM_TARGETS_TO_BUILD} irreader Support) - - add_llvm_executable(ld.eld eld.cpp) -@@ -11,24 +16,7 @@ add_dependencies(ld.eld update-eld-verinfo) - set(LINKER_WRAPPER_LIB "LW") - target_link_libraries(ld.eld PRIVATE ${LINKER_WRAPPER_LIB}) - --if(UNIX) -- set(LD_ELD_LINK_OR_COPY create_symlink) -- set(ld_eld_binary "ld.eld${CMAKE_EXECUTABLE_SUFFIX}") --else() -- set(LD_ELD_LINK_OR_COPY copy) -- set(ld_eld_binary -- "${ELD_RUNTIME_OUTPUT_DIR}/ld.eld${CMAKE_EXECUTABLE_SUFFIX}") --endif() -- --# This is to support "install/local" from eld build directory --configure_file(eld_install.cmake.in -- ${CMAKE_CURRENT_BINARY_DIR}/eld_install.cmake) -- --# We can either reconfigure or copy the same file. This is to support "install" --# from top level llvm build directory --configure_file(eld_install.cmake.in ${CMAKE_BINARY_DIR}/eld_install.cmake) -- --install(TARGETS ld.eld RUNTIME DESTINATION bin) -+install(TARGETS ld.eld RUNTIME DESTINATION "${ELD_TOOLS_INSTALL_DIR}") - - # FIXME: We need to switch to ELD_TARGETS_TO_BUILD after the buildbot upgrade - if("${LLVM_TARGETS_TO_BUILD}" MATCHES "Hexagon" AND "${TARGET_TRIPLE}" MATCHES -@@ -39,44 +27,16 @@ if(ELD_CREATE_SYMLINKS) - foreach(target_name ${LLVM_TARGETS_TO_BUILD}) - string(TOLOWER ${target_name} ld_eld_name) - if(${is_hexagon_linux}) -- set(ld_eld_name "${ld_eld_name}-linux-link${CMAKE_EXECUTABLE_SUFFIX}") -+ set(ld_eld_name "${ld_eld_name}-linux-link") - else(NOT ${is_hexagon_linux}) -- set(ld_eld_name "${ld_eld_name}-link${CMAKE_EXECUTABLE_SUFFIX}") -- endif(${is_hexagon_linux}) -+ set(ld_eld_name "${ld_eld_name}-link") -+ endif() - if(${target_name} MATCHES "X86") -- set(ld_eld_name "x86_64-link${CMAKE_EXECUTABLE_SUFFIX}") -+ set(ld_eld_name "x86_64-link") - endif() -- add_custom_command( -- TARGET ld.eld -- POST_BUILD -- COMMAND ${CMAKE_COMMAND} -E ${LD_ELD_LINK_OR_COPY} "${ld_eld_binary}" -- "${ld_eld_name}" -- WORKING_DIRECTORY "${ELD_RUNTIME_OUTPUT_DIR}") -- -- add_custom_target( -- ${ld_eld_name} -- DEPENDS ${ld_eld_binary} -- COMMAND ${CMAKE_COMMAND} -E ${LD_ELD_LINK_OR_COPY} "${ld_eld_binary}" -- "${ld_eld_name}" -- WORKING_DIRECTORY "${ELD_RUNTIME_OUTPUT_DIR}") -- endforeach(target_name) -+ add_eld_symlink("${ld_eld_name}" ld.eld) -+ endforeach() - endif() --if(DEFINED USE_LINKER_ALT_NAME) -- if(NOT "${USE_LINKER_ALT_NAME}" STREQUAL "") -- add_custom_command( -- TARGET ld.eld -- POST_BUILD -- COMMAND ${CMAKE_COMMAND} -E ${LD_ELD_LINK_OR_COPY} "${ld_eld_binary}" -- "${USE_LINKER_ALT_NAME}" -- WORKING_DIRECTORY "${ELD_RUNTIME_OUTPUT_DIR}") -- add_custom_target( -- ${USE_LINKER_ALT_NAME} -- DEPENDS ${ld_eld_binary} -- COMMAND ${CMAKE_COMMAND} -E ${LD_ELD_LINK_OR_COPY} "${ld_eld_binary}" -- "${USE_LINKER_ALT_NAME}" -- WORKING_DIRECTORY "${ELD_RUNTIME_OUTPUT_DIR}") -- endif() -+if(NOT "${USE_LINKER_ALT_NAME}" STREQUAL "") -+ add_eld_symlink("${USE_LINKER_ALT_NAME}" ld.eld) - endif() -- --# Create the symlink at installation time. --install(SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/ld_eld_symlink.cmake) -diff --git a/tools/eld/eld_install.cmake.in b/tools/eld/eld_install.cmake.in -deleted file mode 100644 -index a50ce822..00000000 ---- a/tools/eld/eld_install.cmake.in -+++ /dev/null -@@ -1,7 +0,0 @@ --# install.cmake.in corresponding to install.cmake. The list of targets passed --# are used by installer to figure out the *-link shortcuts/copies for ld.eld -- --set(LINK_TARGETS @LLVM_TARGETS_TO_BUILD@) --set(USE_LINKER_ALT_NAME @USE_LINKER_ALT_NAME@) --set(TARGET_TRIPLE @TARGET_TRIPLE@) --set(ELD_CREATE_SYMLINKS @ELD_CREATE_SYMLINKS@) -diff --git a/tools/eld/ld_eld_symlink.cmake b/tools/eld/ld_eld_symlink.cmake -deleted file mode 100644 -index 379481de..00000000 ---- a/tools/eld/ld_eld_symlink.cmake -+++ /dev/null -@@ -1,52 +0,0 @@ --# CMAKE_BINARY_DIR is context dependent --if(EXISTS ${CMAKE_BINARY_DIR}/eld_install.cmake) -- include(${CMAKE_BINARY_DIR}/eld_install.cmake) --else() -- include(${CMAKE_BINARY_DIR}/tools/eld/eld_install.cmake) --endif() -- --if(UNIX) -- set(LD_ELD_LINK_OR_COPY create_symlink) -- set(LD_ELD_DESTDIR $ENV{DESTDIR}) --else() -- set(LD_ELD_LINK_OR_COPY copy) --endif() -- --# CMAKE_EXECUTABLE_SUFFIX is undefined on cmake scripts. See PR9286. --if(WIN32) -- set(EXECUTABLE_SUFFIX ".exe") --else() -- set(EXECUTABLE_SUFFIX "") --endif() -- --set(bindir "${CMAKE_INSTALL_PREFIX}/bin/") --set(ld_eld "ld.eld${EXECUTABLE_SUFFIX}") -- --if("${LINK_TARGETS}" MATCHES "Hexagon" AND "${TARGET_TRIPLE}" MATCHES -- "hexagon-unknown-linux") -- set(is_hexagon_linux 1) --endif() -- --# Create symlinks at install only if ELD_CREATE_SYMLINKS is ON --if (ELD_CREATE_SYMLINKS) -- foreach(target_name ${LINK_TARGETS}) -- string(TOLOWER ${target_name} ld_eld_name) -- if(${is_hexagon_linux}) -- set(ld_eld_symlink "${ld_eld_name}-linux-link${EXECUTABLE_SUFFIX}") -- else(NOT ${is_hexagon_linux}) -- set(ld_eld_symlink "${ld_eld_name}-link${EXECUTABLE_SUFFIX}") -- endif(${is_hexagon_linux}) -- -- message("Creating ${ld_eld_symlink} symlink based on ${ld_eld}") -- execute_process( -- COMMAND "${CMAKE_COMMAND}" -E ${LD_ELD_LINK_OR_COPY} "${ld_eld}" -- "${ld_eld_symlink}" WORKING_DIRECTORY "${bindir}") -- endforeach(target_name) --endif() -- --if(NOT "${USE_LINKER_ALT_NAME}" STREQUAL "") -- message("Creating ${USE_LINKER_ALT_NAME} symlink based on ${ld_eld}") -- execute_process( -- COMMAND "${CMAKE_COMMAND}" -E ${LD_ELD_LINK_OR_COPY} "${ld_eld}" -- "${USE_LINKER_ALT_NAME}" WORKING_DIRECTORY "${bindir}") --endif() --- -2.43.0 - diff --git a/qualcomm-software/patches/eld/0002-Add-basic-LLVM_DISTRIBUTION_COMPONENTS-compatibility.patch b/qualcomm-software/patches/eld/0002-Add-basic-LLVM_DISTRIBUTION_COMPONENTS-compatibility.patch deleted file mode 100644 index e74240d1f967..000000000000 --- a/qualcomm-software/patches/eld/0002-Add-basic-LLVM_DISTRIBUTION_COMPONENTS-compatibility.patch +++ /dev/null @@ -1,122 +0,0 @@ -From 034d25fd3d45d892aa47c5e11beb172ec52e3a2e Mon Sep 17 00:00:00 2001 -From: Jonathon Penix -Date: Tue, 6 Jan 2026 18:59:15 -0800 -Subject: [PATCH 2/3] Add basic LLVM_DISTRIBUTION_COMPONENTS compatibility - -LLVM_DISTRIBUTION_COMPONENTS seems to require that each component have -a) install-* and install-*-stripped targets and b) a component associated with -the things to install. - -So, there's a few separate changes made to add this support into eld: -- For each of the components we want to support, `add_llvm_install_targets` - is used to create the install* and install-*-stripped targets. I think this - is the easiest (and a recommended) way to do this. -- I think eld has five components we need to handle: ld.eld, LW, - PluginAPIHeaders, linker-script (templates), and YAMLMapParser. Each - of these had the appropriate install targets and components added, as - needed. Hopefully I'm not missing any additional components we need to - consider for distributions. - - It might make sense to more aggressively couple some of these, but I - think the granularity here matches pretty well with available CMake - options (to ex: disable the YAMLMapParser) and current toolchains with - eld included (where the PluginAPIHeaders, templates, etc. may be stripped - out). So, I left this as-is. -- A dependency was added between the ld.eld and LW install targets--without - these ld.eld would always fail at runtime about the missing LW library, if - only ex: `ninja install-ld.eld` was run. Which, doesn't seem that helpful. - So, make ld.eld's install depend on LW's. - -Expected usage looks something like: - -`-DLLVM_DISTRIBUTION_COMPONENTS='...;ld.eld;LW;PluginAPIHeaders;linker-script;YAMLMapParser'` - -and these items can generally be added/removed based on what people want in -the distribution (modulo ld.eld's dependency on LW). - -And, as per the install-* target requirement mentioned above, each of -these components can now be individually installed with ex: install-ld.eld. - -Fixes #693 - -Signed-off-by: Jonathon Penix ---- - CMakeLists.txt | 17 ++++++++++++++++- - include/eld/CMakeLists.txt | 8 ++++++++ - tools/eld/CMakeLists.txt | 10 +++++++++- - 3 files changed, 33 insertions(+), 2 deletions(-) - -diff --git a/CMakeLists.txt b/CMakeLists.txt -index c1287f20..cec116e6 100755 ---- a/CMakeLists.txt -+++ b/CMakeLists.txt -@@ -212,6 +212,13 @@ install( - PATTERN "*.template" - PATTERN ".git" EXCLUDE - PATTERN ".svn" EXCLUDE) -+if (NOT LLVM_ENABLE_IDE) -+ # LLVM_DISTRIBUTION_COMPONENTS requires that each component have both a -+ # component and an install-component target, so add appropriate dummy targets. -+ add_custom_target(linker-script) -+ add_llvm_install_targets(install-linker-script -+ COMPONENT linker-script) -+endif() - - set(ELD_INSTALL_YAML_MAP_PARSER - ON -@@ -224,5 +231,13 @@ if(ELD_INSTALL_YAML_MAP_PARSER) - set(ELD_YAML_MAP_PARSER_DEST_DIR "bin") - endif() - install(PROGRAMS utils/YAMLMapParser/YAMLMapParser.py -- DESTINATION ${ELD_YAML_MAP_PARSER_DEST_DIR}) -+ DESTINATION ${ELD_YAML_MAP_PARSER_DEST_DIR} -+ COMPONENT YAMLMapParser) -+ if (NOT LLVM_ENABLE_IDE) -+ # LLVM_DISTRIBUTION_COMPONENTS requires that each component have both a -+ # component and an install-component target, so add appropriate dummy targets. -+ add_custom_target(YAMLMapParser) -+ add_llvm_install_targets(install-YAMLMapParser -+ COMPONENT YAMLMapParser) -+ endif() - endif() -diff --git a/include/eld/CMakeLists.txt b/include/eld/CMakeLists.txt -index f3c69b44..320889ac 100644 ---- a/include/eld/CMakeLists.txt -+++ b/include/eld/CMakeLists.txt -@@ -21,4 +21,12 @@ if(ENABLE_ELD_PLUGIN_SUPPORT) - DESTINATION include/ELD/PluginAPI/ - COMPONENT PluginAPIHeaders) - endif() -+ -+ if (NOT LLVM_ENABLE_IDE) -+ # LLVM_DISTRIBUTION_COMPONENTS requires that each component have both a -+ # component and an install-component target, so add appropriate dummy targets. -+ add_custom_target(PluginAPIHeaders) -+ add_llvm_install_targets(install-PluginAPIHeaders -+ COMPONENT PluginAPIHeaders) -+ endif() - endif() -diff --git a/tools/eld/CMakeLists.txt b/tools/eld/CMakeLists.txt -index 7e39b406..8f6f86b9 100644 ---- a/tools/eld/CMakeLists.txt -+++ b/tools/eld/CMakeLists.txt -@@ -16,7 +16,15 @@ add_dependencies(ld.eld update-eld-verinfo) - set(LINKER_WRAPPER_LIB "LW") - target_link_libraries(ld.eld PRIVATE ${LINKER_WRAPPER_LIB}) - --install(TARGETS ld.eld RUNTIME DESTINATION "${ELD_TOOLS_INSTALL_DIR}") -+install(TARGETS ld.eld RUNTIME DESTINATION "${ELD_TOOLS_INSTALL_DIR}" COMPONENT ld.eld) -+if (NOT LLVM_ENABLE_IDE) -+ add_llvm_install_targets(install-ld.eld -+ DEPENDS ld.eld -+ COMPONENT ld.eld) -+ add_dependencies(install-ld.eld install-${LINKER_WRAPPER_LIB}) -+ add_dependencies(install-ld.eld-stripped -+ install-${LINKER_WRAPPER_LIB}-stripped) -+endif() - - # FIXME: We need to switch to ELD_TARGETS_TO_BUILD after the buildbot upgrade - if("${LLVM_TARGETS_TO_BUILD}" MATCHES "Hexagon" AND "${TARGET_TRIPLE}" MATCHES --- -2.43.0 - diff --git a/qualcomm-software/patches/eld/0003-Place-ELDExpectedUsage-in-LLVM_BINARY_DIR-relative-l.patch b/qualcomm-software/patches/eld/0003-Place-ELDExpectedUsage-in-LLVM_BINARY_DIR-relative-l.patch deleted file mode 100644 index fdac82048a72..000000000000 --- a/qualcomm-software/patches/eld/0003-Place-ELDExpectedUsage-in-LLVM_BINARY_DIR-relative-l.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 92ea1b21404d45209bf2bb0756ccc42105eec1ce Mon Sep 17 00:00:00 2001 -From: Jonathon Penix -Date: Sun, 25 Jan 2026 15:24:27 -0800 -Subject: [PATCH 3/3] Place ELDExpectedUsage in `LLVM_BINARY_DIR`-relative - location - -ELDExpected.test looks for ELDExpectedUsage in an `%llvmobjroot`- -relative dir (which IIUC boils down to a `LLVM_BINARY_DIR`-relative dir). -ELDExpectedUsage is currently installed in a `CMAKE_BINARY_DIR`-relative -dir. `LLVM_BINARY_DIR` and `CMAKE_BINARY_DIR` aren't always the same--they -can be different when llvm is added as a subdirectory. When this is the -case, we see test failures due to the mismatched location--see [1] for -details. - -To fix this, just place ELDExpectedUsage in a `LLVM_BINARY_DIR`-relative -location as that is what the tests expect and it matches the behavior -when `LLVM_BINARY_DIR == CMAKE_BINARY_DIR`. - -This partially fixes [1], the symlink/wrapper part is handled in [2]. - -[1] https://github.com/qualcomm/eld/issues/710 -[2] https://github.com/qualcomm/eld/pull/738 - -Signed-off-by: Jonathon Penix ---- - test/Common/standalone/PluginAPI/CMakeLists.txt | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/test/Common/standalone/PluginAPI/CMakeLists.txt b/test/Common/standalone/PluginAPI/CMakeLists.txt -index f19ed7e5..b37a6264 100644 ---- a/test/Common/standalone/PluginAPI/CMakeLists.txt -+++ b/test/Common/standalone/PluginAPI/CMakeLists.txt -@@ -3,5 +3,5 @@ target_include_directories(ELDExpectedUsage - PRIVATE ${CMAKE_SOURCE_DIR}/include/eld/PluginAPI) - target_link_libraries(ELDExpectedUsage PRIVATE LW) - set_target_properties(ELDExpectedUsage PROPERTIES RUNTIME_OUTPUT_DIRECTORY -- ${CMAKE_BINARY_DIR}/bin/tests) -+ ${LLVM_BINARY_DIR}/bin/tests) - set_target_properties(ELDExpectedUsage PROPERTIES INSTALL_RPATH "\$ORIGIN/../../lib") --- -2.43.0 - diff --git a/qualcomm-software/patches/llvm-project/0001-Define-_LIBCPP_HAS_C8RTOMB_MBRTOC8.patch b/qualcomm-software/patches/llvm-project/0001-Define-_LIBCPP_HAS_C8RTOMB_MBRTOC8.patch deleted file mode 100644 index 839fd316258c..000000000000 --- a/qualcomm-software/patches/llvm-project/0001-Define-_LIBCPP_HAS_C8RTOMB_MBRTOC8.patch +++ /dev/null @@ -1,51 +0,0 @@ -From 3f847ac6d71dbe36ddbda74c200f9cccb47bfb42 Mon Sep 17 00:00:00 2001 -From: Simi Pallipurath -Date: Wed, 26 Nov 2025 09:26:52 +0000 -Subject: Define _LIBCPP_HAS_C8RTOMB_MBRTOC8 - -LLVM libcxx does not define the `char8_t` related functions, instead -delegating their definitions to the underlying C library. - -libcxx defines a macro called `_LIBCPP_HAS_C8RTOMB_MBRTOC8` when it -infers that the underlying C library provides these functions. - -picolibc provides the `char8_t` related functions regardless of the C++ -version used, but this support only landed after version 1.8.8 and, at -the time of writing, has not made into any released version yet. - -This is a temporary fix and should be removed when a picolibc release -includes the support for `char8_t` and its related functions. When it's -time to implement a proper solution, one needs to create logic to detect -the picolibc version and define the macro accordingly. The macros that -govern picolibc version are in `picolibc.h`. ---- - libcxx/include/__config | 13 ++----------- - 1 file changed, 2 insertions(+), 11 deletions(-) - -diff --git a/libcxx/include/__config b/libcxx/include/__config -index e758acfa870a..e8b9132c323a 100644 ---- a/libcxx/include/__config -+++ b/libcxx/include/__config -@@ -820,17 +820,8 @@ typedef __char32_t char32_t; - // functions is gradually being added to existing C libraries. The conditions - // below check for known C library versions and conditions under which these - // functions are declared by the C library. --// --// GNU libc 2.36 and newer declare c8rtomb() and mbrtoc8() in C++ modes if --// __cpp_char8_t is defined or if C2X extensions are enabled. Determining --// the latter depends on internal GNU libc details that are not appropriate --// to depend on here, so any declarations present when __cpp_char8_t is not --// defined are ignored. --# if _LIBCPP_GLIBC_PREREQ(2, 36) && defined(__cpp_char8_t) --# define _LIBCPP_HAS_C8RTOMB_MBRTOC8 1 --# else --# define _LIBCPP_HAS_C8RTOMB_MBRTOC8 0 --# endif -+// For picolibc: -+#define _LIBCPP_HAS_C8RTOMB_MBRTOC8 1 - - // There are a handful of public standard library types that are intended to - // support CTAD but don't need any explicit deduction guides to do so. This --- -2.34.1 - diff --git a/qualcomm-software/patches/picolibc/0001-Enable-libcxx-builds.patch b/qualcomm-software/patches/picolibc/0001-Enable-libcxx-builds.patch deleted file mode 100644 index 28aecd46cb8b..000000000000 --- a/qualcomm-software/patches/picolibc/0001-Enable-libcxx-builds.patch +++ /dev/null @@ -1,52 +0,0 @@ -From a59afdaf3697da7a1cfc62e0f957be780d6ae11a Mon Sep 17 00:00:00 2001 -From: Simi Pallipurath -Date: Thu, 14 Nov 2024 10:07:08 +0000 -Subject: Enable libcxx builds - -Modifications to build config and linker script required to enable -libc++ builds. ---- - meson.build | 12 ++++++++++++ - picolibc.ld.in | 3 +++ - 2 files changed, 15 insertions(+) - -diff --git a/meson.build b/meson.build -index f33d011b2..2c653de02 100644 ---- a/meson.build -+++ b/meson.build -@@ -1340,6 +1340,18 @@ NEWLIB_MAJOR_VERSION=4 - NEWLIB_MINOR_VERSION=3 - NEWLIB_PATCHLEVEL_VERSION=0 - -+conf_data.set('_GNU_SOURCE', '', -+ description: '''Enable GNU functions like strtof_l. -+It's necessary to set this globally because inline functions in -+libc++ headers call the GNU functions.''' -+) -+ -+conf_data.set('_PICOLIBC_CTYPE_SMALL', '0', -+ description: '''Disable picolibc's small ctype implementation. -+libc++ expects newlib-style ctype tables, and also expects support for locales -+and extended character sets, so picolibc's small ctype is not compatible with it''' -+) -+ - conf_data.set('__HAVE_CC_INHIBIT_LOOP_TO_LIBCALL', - cc.has_argument('-fno-tree-loop-distribute-patterns'), - description: 'Compiler flag to prevent detecting memcpy/memset patterns') -diff --git a/picolibc.ld.in b/picolibc.ld.in -index 0bcfe4ca8..c3055c49e 100644 ---- a/picolibc.ld.in -+++ b/picolibc.ld.in -@@ -69,6 +69,9 @@ SECTIONS - *(.literal.startup .text.startup .literal.startup.* .text.startup.*) - *(SORT(.text.sorted.*)) - *(.literal .text .literal.* .text.* .opd .opd.* .branch_lt .branch_lt.* @EXTRA_TEXT_SECTIONS@) -+ PROVIDE (__start___lcxx_override = .); -+ *(__lcxx_override) -+ PROVIDE (__stop___lcxx_override = .); - *(.gnu.linkonce.t.*) - KEEP (*(.fini .fini.*)) - @PREFIX@__text_end = .; --- -2.43.0 - diff --git a/qualcomm-software/patches/picolibc/0003-Add-support-for-strict-align-no-unaligned-access-in-.patch b/qualcomm-software/patches/picolibc/0003-Add-support-for-strict-align-no-unaligned-access-in-.patch deleted file mode 100644 index 8b179229df1d..000000000000 --- a/qualcomm-software/patches/picolibc/0003-Add-support-for-strict-align-no-unaligned-access-in-.patch +++ /dev/null @@ -1,431 +0,0 @@ -From 8e46182db50f6a8ee0b67354887e606344303b7c Mon Sep 17 00:00:00 2001 -From: Lucas Prates -Date: Mon, 11 Nov 2024 16:37:04 +0000 -Subject: Add support for strict-align/no-unaligned-access in AArch64 - ---- - newlib/libc/machine/aarch64/memchr-stub.c | 2 +- - newlib/libc/machine/aarch64/memchr.S | 2 +- - newlib/libc/machine/aarch64/memcmp-stub.c | 2 +- - newlib/libc/machine/aarch64/memcmp.S | 2 +- - newlib/libc/machine/aarch64/memcpy-stub.c | 2 +- - newlib/libc/machine/aarch64/memcpy.S | 2 +- - newlib/libc/machine/aarch64/memmove-stub.c | 2 +- - newlib/libc/machine/aarch64/memrchr-stub.c | 2 +- - newlib/libc/machine/aarch64/memrchr.S | 2 +- - newlib/libc/machine/aarch64/memset-stub.c | 2 +- - newlib/libc/machine/aarch64/memset.S | 2 +- - newlib/libc/machine/aarch64/rawmemchr-stub.c | 2 +- - newlib/libc/machine/aarch64/rawmemchr.S | 2 +- - newlib/libc/machine/aarch64/stpcpy-stub.c | 2 +- - newlib/libc/machine/aarch64/strchr-stub.c | 2 +- - newlib/libc/machine/aarch64/strchr.S | 2 +- - newlib/libc/machine/aarch64/strchrnul-stub.c | 2 +- - newlib/libc/machine/aarch64/strchrnul.S | 2 +- - newlib/libc/machine/aarch64/strcmp-stub.c | 2 +- - newlib/libc/machine/aarch64/strcmp.S | 2 +- - newlib/libc/machine/aarch64/strcpy-stub.c | 2 +- - newlib/libc/machine/aarch64/strcpy.S | 2 +- - newlib/libc/machine/aarch64/strlen-stub.c | 2 +- - newlib/libc/machine/aarch64/strlen.S | 2 +- - newlib/libc/machine/aarch64/strncmp-stub.c | 2 +- - newlib/libc/machine/aarch64/strncmp.S | 2 +- - newlib/libc/machine/aarch64/strnlen-stub.c | 2 +- - newlib/libc/machine/aarch64/strnlen.S | 2 +- - newlib/libc/machine/aarch64/strrchr-stub.c | 2 +- - newlib/libc/machine/aarch64/strrchr.S | 2 +- - 30 files changed, 30 insertions(+), 30 deletions(-) - -diff --git a/newlib/libc/machine/aarch64/memchr-stub.c b/newlib/libc/machine/aarch64/memchr-stub.c -index c2fabf07f..e688cf166 100644 ---- a/newlib/libc/machine/aarch64/memchr-stub.c -+++ b/newlib/libc/machine/aarch64/memchr-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/memchr.c" - #else - /* See memchr.S */ -diff --git a/newlib/libc/machine/aarch64/memchr.S b/newlib/libc/machine/aarch64/memchr.S -index da1163221..dd5dec477 100644 ---- a/newlib/libc/machine/aarch64/memchr.S -+++ b/newlib/libc/machine/aarch64/memchr.S -@@ -7,7 +7,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See memchr-stub.c */ - #else - /* Assumptions: -diff --git a/newlib/libc/machine/aarch64/memcmp-stub.c b/newlib/libc/machine/aarch64/memcmp-stub.c -index 74518257b..8b3e2b374 100644 ---- a/newlib/libc/machine/aarch64/memcmp-stub.c -+++ b/newlib/libc/machine/aarch64/memcmp-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/memcmp.c" - #else - /* See memcmp.S */ -diff --git a/newlib/libc/machine/aarch64/memcmp.S b/newlib/libc/machine/aarch64/memcmp.S -index afe78fee3..9ca75c447 100644 ---- a/newlib/libc/machine/aarch64/memcmp.S -+++ b/newlib/libc/machine/aarch64/memcmp.S -@@ -6,7 +6,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See memcmp-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/memcpy-stub.c b/newlib/libc/machine/aarch64/memcpy-stub.c -index 6702a67dc..5ac7ca914 100644 ---- a/newlib/libc/machine/aarch64/memcpy-stub.c -+++ b/newlib/libc/machine/aarch64/memcpy-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/memcpy.c" - #else - /* See memcpy.S */ -diff --git a/newlib/libc/machine/aarch64/memcpy.S b/newlib/libc/machine/aarch64/memcpy.S -index 7ba3fe347..6ac4ea577 100644 ---- a/newlib/libc/machine/aarch64/memcpy.S -+++ b/newlib/libc/machine/aarch64/memcpy.S -@@ -13,7 +13,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - /* See memcpy-stub.c */ - #else - #include "asmdefs.h" -diff --git a/newlib/libc/machine/aarch64/memmove-stub.c b/newlib/libc/machine/aarch64/memmove-stub.c -index 8b9154ffd..70e342a2c 100644 ---- a/newlib/libc/machine/aarch64/memmove-stub.c -+++ b/newlib/libc/machine/aarch64/memmove-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/memmove.c" - #else - /* See memcpy.S */ -diff --git a/newlib/libc/machine/aarch64/memrchr-stub.c b/newlib/libc/machine/aarch64/memrchr-stub.c -index 6d659d4d7..eea39c7ec 100644 ---- a/newlib/libc/machine/aarch64/memrchr-stub.c -+++ b/newlib/libc/machine/aarch64/memrchr-stub.c -@@ -6,7 +6,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - #include "../../string/memrchr.c" - #else - /* See memrchr.S */ -diff --git a/newlib/libc/machine/aarch64/memrchr.S b/newlib/libc/machine/aarch64/memrchr.S -index b0afa4eea..5c78cfb14 100644 ---- a/newlib/libc/machine/aarch64/memrchr.S -+++ b/newlib/libc/machine/aarch64/memrchr.S -@@ -13,7 +13,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See memrchr-stub.c */ - #else - #include "asmdefs.h" -diff --git a/newlib/libc/machine/aarch64/memset-stub.c b/newlib/libc/machine/aarch64/memset-stub.c -index a11c74573..c29cb0dae 100644 ---- a/newlib/libc/machine/aarch64/memset-stub.c -+++ b/newlib/libc/machine/aarch64/memset-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/memset.c" - #else - /* See memset.S */ -diff --git a/newlib/libc/machine/aarch64/memset.S b/newlib/libc/machine/aarch64/memset.S -index 51ea476e3..7db2e3a85 100644 ---- a/newlib/libc/machine/aarch64/memset.S -+++ b/newlib/libc/machine/aarch64/memset.S -@@ -13,7 +13,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See memset-stub.c */ - #else - #include "asmdefs.h" -diff --git a/newlib/libc/machine/aarch64/rawmemchr-stub.c b/newlib/libc/machine/aarch64/rawmemchr-stub.c -index 88d1e9472..3c379bfc4 100644 ---- a/newlib/libc/machine/aarch64/rawmemchr-stub.c -+++ b/newlib/libc/machine/aarch64/rawmemchr-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/rawmemchr.c" - #else - /* See rawmemchr.S. */ -diff --git a/newlib/libc/machine/aarch64/rawmemchr.S b/newlib/libc/machine/aarch64/rawmemchr.S -index 03e3b7525..9ed0464bf 100644 ---- a/newlib/libc/machine/aarch64/rawmemchr.S -+++ b/newlib/libc/machine/aarch64/rawmemchr.S -@@ -32,7 +32,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - /* See rawmemchr-stub.c. */ - #else - -diff --git a/newlib/libc/machine/aarch64/stpcpy-stub.c b/newlib/libc/machine/aarch64/stpcpy-stub.c -index 0166c6785..9dd10b2a4 100644 ---- a/newlib/libc/machine/aarch64/stpcpy-stub.c -+++ b/newlib/libc/machine/aarch64/stpcpy-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/stpcpy.c" - #else - /* See stpcpy.S */ -diff --git a/newlib/libc/machine/aarch64/strchr-stub.c b/newlib/libc/machine/aarch64/strchr-stub.c -index 0dcb5ca54..e187fcb4e 100644 ---- a/newlib/libc/machine/aarch64/strchr-stub.c -+++ b/newlib/libc/machine/aarch64/strchr-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/strchr.c" - #else - /* See strchr.S */ -diff --git a/newlib/libc/machine/aarch64/strchr.S b/newlib/libc/machine/aarch64/strchr.S -index 2d79a3911..c22509df6 100644 ---- a/newlib/libc/machine/aarch64/strchr.S -+++ b/newlib/libc/machine/aarch64/strchr.S -@@ -28,7 +28,7 @@ - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strchr-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strchrnul-stub.c b/newlib/libc/machine/aarch64/strchrnul-stub.c -index 407b949a6..7065c42b5 100644 ---- a/newlib/libc/machine/aarch64/strchrnul-stub.c -+++ b/newlib/libc/machine/aarch64/strchrnul-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/strchrnul.c" - #else - /* See strchrnul.S */ -diff --git a/newlib/libc/machine/aarch64/strchrnul.S b/newlib/libc/machine/aarch64/strchrnul.S -index 5999c6d68..fff1102bd 100644 ---- a/newlib/libc/machine/aarch64/strchrnul.S -+++ b/newlib/libc/machine/aarch64/strchrnul.S -@@ -28,7 +28,7 @@ - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strchrnul-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strcmp-stub.c b/newlib/libc/machine/aarch64/strcmp-stub.c -index 77177bfe0..f74140acd 100644 ---- a/newlib/libc/machine/aarch64/strcmp-stub.c -+++ b/newlib/libc/machine/aarch64/strcmp-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/strcmp.c" - #else - /* See strcmp.S */ -diff --git a/newlib/libc/machine/aarch64/strcmp.S b/newlib/libc/machine/aarch64/strcmp.S -index 0c8371d22..ae52d3e0e 100644 ---- a/newlib/libc/machine/aarch64/strcmp.S -+++ b/newlib/libc/machine/aarch64/strcmp.S -@@ -7,7 +7,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strcmp-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strcpy-stub.c b/newlib/libc/machine/aarch64/strcpy-stub.c -index af2e1dd3c..5adc53f76 100644 ---- a/newlib/libc/machine/aarch64/strcpy-stub.c -+++ b/newlib/libc/machine/aarch64/strcpy-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/strcpy.c" - #else - /* See strcpy.S */ -diff --git a/newlib/libc/machine/aarch64/strcpy.S b/newlib/libc/machine/aarch64/strcpy.S -index 5429c5c10..95987d4b3 100644 ---- a/newlib/libc/machine/aarch64/strcpy.S -+++ b/newlib/libc/machine/aarch64/strcpy.S -@@ -28,7 +28,7 @@ - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strcpy-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strlen-stub.c b/newlib/libc/machine/aarch64/strlen-stub.c -index 59b1289ff..2f7aa7305 100644 ---- a/newlib/libc/machine/aarch64/strlen-stub.c -+++ b/newlib/libc/machine/aarch64/strlen-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/strlen.c" - #else - /* See strlen.S */ -diff --git a/newlib/libc/machine/aarch64/strlen.S b/newlib/libc/machine/aarch64/strlen.S -index 8837c954b..f3b54c677 100644 ---- a/newlib/libc/machine/aarch64/strlen.S -+++ b/newlib/libc/machine/aarch64/strlen.S -@@ -25,7 +25,7 @@ - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strlen-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strncmp-stub.c b/newlib/libc/machine/aarch64/strncmp-stub.c -index 2202cb79e..caca7fa83 100644 ---- a/newlib/libc/machine/aarch64/strncmp-stub.c -+++ b/newlib/libc/machine/aarch64/strncmp-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/strncmp.c" - #else - /* See strncmp.S */ -diff --git a/newlib/libc/machine/aarch64/strncmp.S b/newlib/libc/machine/aarch64/strncmp.S -index ba7b89313..3b64978bf 100644 ---- a/newlib/libc/machine/aarch64/strncmp.S -+++ b/newlib/libc/machine/aarch64/strncmp.S -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strncmp-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strnlen-stub.c b/newlib/libc/machine/aarch64/strnlen-stub.c -index c4428f4f6..1e45ef066 100644 ---- a/newlib/libc/machine/aarch64/strnlen-stub.c -+++ b/newlib/libc/machine/aarch64/strnlen-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - # include "../../string/strnlen.c" - #else - /* See strnlen.S */ -diff --git a/newlib/libc/machine/aarch64/strnlen.S b/newlib/libc/machine/aarch64/strnlen.S -index f186fc13f..529e10276 100644 ---- a/newlib/libc/machine/aarch64/strnlen.S -+++ b/newlib/libc/machine/aarch64/strnlen.S -@@ -28,7 +28,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strnlen-stub.c */ - #else - -diff --git a/newlib/libc/machine/aarch64/strrchr-stub.c b/newlib/libc/machine/aarch64/strrchr-stub.c -index 0ec573222..4dfedee46 100644 ---- a/newlib/libc/machine/aarch64/strrchr-stub.c -+++ b/newlib/libc/machine/aarch64/strrchr-stub.c -@@ -26,7 +26,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined (__ARM_FEATURE_UNALIGNED) - # include "../../string/strrchr.c" - #else - /* See strrchr.S */ -diff --git a/newlib/libc/machine/aarch64/strrchr.S b/newlib/libc/machine/aarch64/strrchr.S -index a3ef3f8fe..f5ba3ad76 100644 ---- a/newlib/libc/machine/aarch64/strrchr.S -+++ b/newlib/libc/machine/aarch64/strrchr.S -@@ -29,7 +29,7 @@ - - #include - --#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) -+#if (defined (__OPTIMIZE_SIZE__) || defined (__PREFER_SIZE_OVER_SPEED)) || !defined(__LP64__) || !defined(__ARM_NEON) || !defined(__ARM_FEATURE_UNALIGNED) - /* See strrchr-stub.c */ - #else - --- -2.43.0 - diff --git a/qualcomm-software/scripts/build.ps1 b/qualcomm-software/scripts/build.ps1 deleted file mode 100755 index ac0ad60db809..000000000000 --- a/qualcomm-software/scripts/build.ps1 +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# A Powershell script to build the toolchain - -# The script creates a build of the toolchain in the 'build' directory, inside -# the repository tree. - -$ErrorActionPreference = 'Stop' - -. "$PSScriptRoot\init_win_env.ps1" -Set-VS-Env - -$repoRoot = git -C $PSScriptRoot rev-parse --show-toplevel -$buildDir = (Join-Path $repoRoot build) - -mkdir $buildDir -cd $buildDir - -# Omit target runtimes on Windows builds. -cmake ..\qualcomm-software ` - -GNinja ` - -DFETCHCONTENT_QUIET=OFF ` - -DLLVM_TOOLCHAIN_DISTRIBUTION_COMPONENTS="llvm-toolchain-docs;llvm-toolchain-third-party-licenses" ` - -DPREBUILT_TARGET_LIBRARIES=ON ` - -DCMAKE_C_COMPILER=clang-cl ` - -DCMAKE_CXX_COMPILER=clang-cl - -ninja package-llvm-toolchain diff --git a/qualcomm-software/scripts/build.sh b/qualcomm-software/scripts/build.sh deleted file mode 100755 index 1db253c66a55..000000000000 --- a/qualcomm-software/scripts/build.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# The script creates a build of the toolchain in the 'build' directory, inside -# the repository tree. - -set -ex - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -REPO_ROOT=$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel ) - -clang --version - -export CC=clang -export CXX=clang++ - -mkdir -p "${REPO_ROOT}"/build -cd "${REPO_ROOT}"/build - -cmake ../qualcomm-software -GNinja -DFETCHCONTENT_QUIET=OFF -DENABLE_LINUX_LIBRARIES=ON ${EXTRA_CMAKE_ARGS} - -ninja package-llvm-toolchain diff --git a/qualcomm-software/scripts/build_musl-embedded_overlay.sh b/qualcomm-software/scripts/build_musl-embedded_overlay.sh deleted file mode 100755 index 2052f19a0dc6..000000000000 --- a/qualcomm-software/scripts/build_musl-embedded_overlay.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# A bash script to build the musl-embedded overlay. - -# The script creates a build of the toolchain in the 'build_musl-embedded_overlay' -# directory, inside the repository tree. - -set -ex - -export CC=clang -export CXX=clang++ - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -REPO_ROOT=$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel ) -BUILD_DIR=${REPO_ROOT}/build_musl-embedded_overlay - -mkdir -p "${BUILD_DIR}" -cd "${BUILD_DIR}" - -cmake ../qualcomm-software -GNinja -DFETCHCONTENT_QUIET=OFF -DLLVM_TOOLCHAIN_C_LIBRARY=musl-embedded -DLLVM_TOOLCHAIN_LIBRARY_OVERLAY_INSTALL=on -ninja package-llvm-toolchain - -# The package-llvm-toolchain target will produce a .tar.xz package, but we also -# want a zip version for Windows users -cpack -G ZIP diff --git a/qualcomm-software/scripts/build_no_runtimes.sh b/qualcomm-software/scripts/build_no_runtimes.sh deleted file mode 100755 index 41ea4ac5928e..000000000000 --- a/qualcomm-software/scripts/build_no_runtimes.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# The script creates a build of the toolchain in the 'build' directory, inside -# the repository tree *excluding* all of the target runtime libraries. -# -# FIXME: This is intended as a convenience script while dependencies on various -# builders are sorted out. This probably should be removed. - -set -ex - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -REPO_ROOT=$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel ) - -clang --version - -export CC=clang -export CXX=clang++ - -mkdir -p "${REPO_ROOT}"/build -cd "${REPO_ROOT}"/build - -cmake ../qualcomm-software \ - -GNinja \ - -DFETCHCONTENT_QUIET=OFF \ - -DLLVM_TOOLCHAIN_DISTRIBUTION_COMPONENTS="llvm-toolchain-docs;llvm-toolchain-third-party-licenses" \ - -DPREBUILT_TARGET_LIBRARIES=ON \ - ${EXTRA_CMAKE_ARGS} - -ninja package-llvm-toolchain diff --git a/qualcomm-software/scripts/init_win_env.ps1 b/qualcomm-software/scripts/init_win_env.ps1 deleted file mode 100755 index 6194c36e28f8..000000000000 --- a/qualcomm-software/scripts/init_win_env.ps1 +++ /dev/null @@ -1,49 +0,0 @@ -# A Powershell script to find and call vcvarsall to setup the environment for -# building with Visual Studio tools and libraries. -function Set-VS-Env { - # Host architecture detection - $hostArch = $env:PROCESSOR_ARCHITECTURE - switch -Regex ($hostArch) { - 'ARM64' { $hostArch = 'ARM64' } - 'AMD64' { $hostArch = 'x64' } - default { - Write-Error "*** ERROR: unrecognized PROCESSOR_ARCHITECTURE ***" - exit 1 - } - } - - $vswhere = Join-Path ${env:ProgramFiles(x86)} "Microsoft Visual Studio\Installer\vswhere.exe" - - # Choose VS component and vcvars target - $vsRequires = $null - $vcvarsTarget = $null - if ($hostArch -eq 'ARM64') { - $vsRequires = 'Microsoft.VisualStudio.Component.VC.Tools.ARM64' - $vcvarsTarget = 'arm64' - } else { - $vsRequires = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64' - $vcvarsTarget = 'x64' - } - - # Query the latest VS with required component - $VS_INSTALL = & $vswhere -latest -products * ` - -requires $vsRequires ` - -property installationPath - if (-not $VS_INSTALL) { - Write-Error "*** ERROR: Visual Studio installation with '$vsRequires' not found via vswhere ***" - exit 1 - } - - # Get vcvarsall.bat and import the environment for selected host - $VCVARSALL = Join-Path $VS_INSTALL "VC\Auxiliary\Build\vcvarsall.bat" - if (-not (Test-Path $VCVARSALL)) { - Write-Error "*** ERROR: vcvarsall.bat not found at $VCVARSALL ***" - exit 1 - } - - cmd /c "call `"$VCVARSALL`" $vcvarsTarget && set" | ForEach-Object { - if ($_ -match '^(.*?)=(.*)$') { - [System.Environment]::SetEnvironmentVariable($matches[1], $matches[2], 'Process') - } - } -} \ No newline at end of file diff --git a/qualcomm-software/scripts/test.ps1 b/qualcomm-software/scripts/test.ps1 deleted file mode 100755 index aa0cd2ebc61e..000000000000 --- a/qualcomm-software/scripts/test.ps1 +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - - -# A Powershell script to run the tests for the toolchain. The script assumes a -# successful build of the toolchain exists in the 'build' directory inside the -# repository tree. - -$ErrorActionPreference = 'Stop' - -. "$PSScriptRoot\init_win_env.ps1" -Set-VS-Env - -$repoRoot = git -C $PSScriptRoot rev-parse --show-toplevel -$buildDir = (Join-Path $repoRoot build) - -cd $buildDir - -ninja check-all diff --git a/qualcomm-software/scripts/test.sh b/qualcomm-software/scripts/test.sh deleted file mode 100755 index 3eebd8668956..000000000000 --- a/qualcomm-software/scripts/test.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# The script assumes a successful build of the toolchain exists in the 'build' -# directory inside the repository tree. - -set -ex - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -REPO_ROOT=$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel ) - -# Run all relevant test targets. This might be too broad eventually, -# but while we have a limited number of variants (and no compiler-rt -# or libc++ testing enabled) we can run everything. -cd "${REPO_ROOT}"/build -ninja check-all-llvm-toolchain diff --git a/qualcomm-software/scripts/test_no_runtimes.sh b/qualcomm-software/scripts/test_no_runtimes.sh deleted file mode 100755 index 752b252f5aee..000000000000 --- a/qualcomm-software/scripts/test_no_runtimes.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash - -# Copyright (c) 2025, Arm Limited and affiliates. -# Part of the Arm Toolchain project, under the Apache License v2.0 with LLVM Exceptions. -# See https://llvm.org/LICENSE.txt for license information. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -# -# ​​​​​Changes from Qualcomm Technologies, Inc. are provided under the following license: -# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries. -# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception - -# The script assumes a successful build of the toolchain exists in the 'build' -# directory inside the repository tree and will only run tests that do not -# require the runtime libraries be present. - -set -ex - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -REPO_ROOT=$( git -C "${SCRIPT_DIR}" rev-parse --show-toplevel ) - -cd "${REPO_ROOT}"/build -ninja check-all diff --git a/qualcomm-software/test/CMakeLists.txt b/qualcomm-software/test/CMakeLists.txt deleted file mode 100644 index 34f900063620..000000000000 --- a/qualcomm-software/test/CMakeLists.txt +++ /dev/null @@ -1,30 +0,0 @@ -if(LLVM_ENABLE_BACKTRACES) - set(ENABLE_BACKTRACES 1) -endif() -llvm_canonicalize_cmake_booleans( - ENABLE_BACKTRACES -) - -configure_lit_site_cfg( - ${CMAKE_CURRENT_SOURCE_DIR}/lit.site.cfg.py.in - ${CMAKE_CURRENT_BINARY_DIR}/lit.site.cfg.py - MAIN_CONFIG - ${CMAKE_CURRENT_SOURCE_DIR}/lit.cfg.py -) - -list(APPEND LLVM_TOOLCHAIN_TEST_DEPS - llvm-toolchain - FileCheck - count - not -) - -add_lit_testsuite(check-llvm-toolchain-lit - "Running toolchain regression tests" - ${CMAKE_CURRENT_BINARY_DIR} - DEPENDS ${LLVM_TOOLCHAIN_TEST_DEPS} -) - -add_lit_testsuites(llvm-toolchain-lit ${CMAKE_CURRENT_SOURCE_DIR} - DEPENDS ${LLVM_TOOLCHAIN_TEST_DEPS} -) diff --git a/qualcomm-software/test/lit.cfg.py b/qualcomm-software/test/lit.cfg.py deleted file mode 100644 index b6b32e716d52..000000000000 --- a/qualcomm-software/test/lit.cfg.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- Python -*- - -import os - -import lit.formats - -from lit.llvm import llvm_config - -# Configuration file for the 'lit' test runner. - -config.name = "Toolchain regression tests" -config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell) -config.suffixes = [".c", ".cpp", ".test"] -config.excludes = ["CMakeLists.txt"] -config.test_source_root = os.path.dirname(__file__) - -llvm_config.use_default_substitutions() -llvm_config.use_clang() - -config.environment["CLANG_NO_DEFAULT_CONFIG"] = "1" diff --git a/qualcomm-software/test/lit.site.cfg.py.in b/qualcomm-software/test/lit.site.cfg.py.in deleted file mode 100644 index ab0a1e1fcb8f..000000000000 --- a/qualcomm-software/test/lit.site.cfg.py.in +++ /dev/null @@ -1,19 +0,0 @@ -@LIT_SITE_CFG_IN_HEADER@ - -config.llvm_src_root = "@LLVM_SOURCE_DIR@" -config.llvm_obj_root = "@LLVM_BINARY_DIR@" -config.llvm_tools_dir = lit_config.substitute("@LLVM_BINARY_DIR@/bin") -config.llvm_libs_dir = lit_config.substitute("@LLVM_BINARY_DIR@/lib") -config.lit_tools_dir = "@LLVM_LIT_TOOLS_DIR@" -config.enable_backtrace = @ENABLE_BACKTRACES@ -config.errc_messages = "@LLVM_LIT_ERRC_MESSAGES@" -config.host_triple = "@LLVM_HOST_TRIPLE@" -config.target_triple = "@LLVM_DEFAULT_TARGET_TRIPLE@" -config.python_executable = "@Python3_EXECUTABLE@" -config.test_exec_root = "@CMAKE_CURRENT_BINARY_DIR@" - -import lit.llvm -lit.llvm.initialize(lit_config, config) - -# Let the main config do the real work. -lit_config.load_config(config, "@CMAKE_CURRENT_SOURCE_DIR@/lit.cfg.py") diff --git a/qualcomm-software/test/multilib/aarch64.test b/qualcomm-software/test/multilib/aarch64.test deleted file mode 100644 index 8702063da4c6..000000000000 --- a/qualcomm-software/test/multilib/aarch64.test +++ /dev/null @@ -1,26 +0,0 @@ -# FIXME: we lose a lot of test coverage as we don't cover many variants. -# Might be worth keeping around the original multilib json *just* for testing. -# Should also revisit whether there are relevant tests we can reuse even without -# the original multilib. - -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -fno-exceptions | FileCheck %s --check-prefix=CHECK-AARCH64 -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -mcpu=cortex-a53 -fno-exceptions | FileCheck %s --check-prefix=CHECK-AARCH64 -# CHECK-AARCH64: aarch64-none-elf/aarch64a{{$}} -# CHECK-AARCH64-EMPTY: - -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -march=armv8+nofp+nosimd -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -mcpu=cortex-a53+nofp+nosimd -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -mgeneral-regs-only -mllvm -aarch64-enable-simd-scalar=false -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP -# CHECK-NOFP: aarch64-none-elf/aarch64a_soft_nofp{{$}} -# CHECK-NOFP-EMPTY: - -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -march=armv8.3a+nofp+nosimd -mbranch-protection=pac-ret+leaf+bti -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP-PACRET-BTI -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -mcpu=cortex-a53 -march=armv8.3a+nofp+nosimd -mbranch-protection=pac-ret+leaf+bti -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP-PACRET-BTI -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -march=armv8.3a -mgeneral-regs-only -mllvm -aarch64-enable-simd-scalar=false -mbranch-protection=pac-ret+leaf+bti -mabi=aapcs-soft -fno-exceptions | FileCheck %s --check-prefix=CHECK-NOFP-PACRET-BTI -# CHECK-NOFP-PACRET-BTI: aarch64-none-elf/aarch64a_soft_nofp_pacret_bti{{$}} -# CHECK-NOFP-PACRET-BTI-EMPTY: - -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -march=armv8.5a -mbranch-protection=pac-ret+leaf+b-key+bti -fno-exceptions | FileCheck %s --check-prefix=CHECK-PACRET-BKEY-BTI -# RUN: %clang -print-multi-directory --target=aarch64-none-elf -mcpu=cortex-a53 -march=armv8.5a -mbranch-protection=pac-ret+leaf+b-key+bti -fno-exceptions | FileCheck %s --check-prefix=CHECK-PACRET-BKEY-BTI -# CHECK-PACRET-BKEY-BTI: aarch64-none-elf/aarch64a_pacret_bkey_bti{{$}} -# CHECK-PACRET-BKEY-BTI-EMPTY: diff --git a/qualcomm-software/test/multilib/armv7.test b/qualcomm-software/test/multilib/armv7.test deleted file mode 100644 index 5a8a926ca90d..000000000000 --- a/qualcomm-software/test/multilib/armv7.test +++ /dev/null @@ -1,3 +0,0 @@ -# RUN: %clang -print-multi-directory --target=arm-none-eabi -march=armv7a -mfloat-abi=softfp -mthumb -mfpu=neon -fno-exceptions | FileCheck %s --check-prefix=CHECK-ARMV7 -# CHECK-ARMV7: arm-none-eabi/armv7a_soft_neon{{$}} -# CHECK-ARMV7-EMPTY: diff --git a/qualcomm-software/test/multilib/riscv32.test b/qualcomm-software/test/multilib/riscv32.test deleted file mode 100644 index bc918b1082eb..000000000000 --- a/qualcomm-software/test/multilib/riscv32.test +++ /dev/null @@ -1,21 +0,0 @@ -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imac -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imazca -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imafc_zba -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imafzca_zcf -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imafdzca_zcf_zcd -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# CHECK-IMAC-FNO-PIC: riscv32-unknown-elf/riscv32imac_ilp32_nopic{{$}} -# CHECK-IMAC-FNO-PIC-EMPTY: - -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imac_zba_zbb -mabi=ilp32 -fpic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-ZBA-ZBB -# CHECK-IMAC-ZBA-ZBB: riscv32-unknown-elf/riscv32imac_zba_zbb_ilp32{{$}} -# CHECK-IMAC-ZBA-ZBB-EMPTY: - -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imac_zba_zbb -mabi=ilp32 -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-ZBA-ZBB-NO-PIC -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imac_zba_zbb -mabi=ilp32 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-ZBA-ZBB-NO-PIC -# CHECK-IMAC-ZBA-ZBB-NO-PIC: riscv32-unknown-elf/riscv32imac_zba_zbb_ilp32_nopic{{$}} -# CHECK-IMAC-ZBA-ZBB-NO-PIC-EMPTY: - -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imac -mabi=ilp32 -fpic -fno-exceptions 2>&1 | FileCheck %s --check-prefix=NOT-FOUND -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32ima -mabi=ilp32 -fno-pic -fno-exceptions 2>&1 | FileCheck %s --check-prefix=NOT-FOUND -# RUN: %clang -print-multi-directory --target=riscv32-unknown-elf -march=rv32imafdzca_zcf -mabi=ilp32 -fno-pic -fno-exceptions 2>&1 | FileCheck %s --check-prefix=NOT-FOUND -# NOT-FOUND: warning: no multilib found matching flags diff --git a/qualcomm-software/test/multilib/riscv64.test b/qualcomm-software/test/multilib/riscv64.test deleted file mode 100644 index e0dad4e30129..000000000000 --- a/qualcomm-software/test/multilib/riscv64.test +++ /dev/null @@ -1,11 +0,0 @@ -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imac -mabi=lp64 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imafc_zba -mabi=lp64 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imafzca -mabi=lp64 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imafdzca_zcd -mabi=lp64 -fno-pic -fno-exceptions | FileCheck %s --check-prefix=CHECK-IMAC-FNO-PIC -# CHECK-IMAC-FNO-PIC: riscv64-unknown-elf/riscv64imac_lp64_nopic{{$}} -# CHECK-IMAC-FNO-PIC-EMPTY: - -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imac -mabi=lp64 -fpic -fno-exceptions 2>&1 | FileCheck %s --check-prefix=NOT-FOUND -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imc -mabi=lp64 -fno-pic -fno-exceptions 2>&1 | FileCheck %s --check-prefix=NOT-FOUND -# RUN: %clang -print-multi-directory --target=riscv64-unknown-elf -march=rv64imafdzca -mabi=lp64 -fno-pic -fno-exceptions 2>&1| FileCheck %s --check-prefix=NOT-FOUND -# NOT-FOUND: warning: no multilib found matching flags diff --git a/qualcomm-software/versions.json b/qualcomm-software/versions.json deleted file mode 100644 index d61f3e918327..000000000000 --- a/qualcomm-software/versions.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "comment": [ - "Release branches will use specific revisions of the repos it depends", - "on. This file provides a single source of truth for which revisions to", - "use that can be queried by CMake and other automation software." - ], - "repos": { - "picolibc": { - "url": "https://github.com/picolibc/picolibc.git", - "tagType": "tag", - "tag": "1.8.10" - }, - "musl": { - "url": "https://git.musl-libc.org/git/musl", - "tagType": "tag", - "tag": "v1.2.5" - }, - "musl-embedded": { - "url": "https://github.com/qualcomm/musl-embedded", - "tagType": "branch", - "tag": "main" - }, - "eld": { - "url": "https://github.com/qualcomm/eld", - "tagType": "branch", - "tag": "release/22.x" - } - } -}