diff --git a/.github/workflows/model-unittest-gpu.yml b/.github/workflows/model-unittest-gpu.yml deleted file mode 100644 index 56e2dfd1ea1..00000000000 --- a/.github/workflows/model-unittest-gpu.yml +++ /dev/null @@ -1,153 +0,0 @@ -name: Model Unittest GPU CI - -on: - pull_request: - schedule: - - cron: "0 18 * * *" - workflow_call: - inputs: - runner: - required: false - type: string - image_name: - required: false - type: string - -concurrency: - group: model-unittest-${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -env: - PR_ID: ${{ github.event.pull_request.number || '' }} - COMMIT_ID: ${{ github.event.pull_request.head.sha || github.sha }} - TASK: PaddleFormers-CI-${{ github.event.pull_request.number }}-model-unittest-gpu - CI_SCRIPTS_PATH: /workspace/PaddleFormers/scripts/ci_model_unittest.sh - BRANCH: ${{ github.event.pull_request.base.ref || github.ref_name }} - AGILE_COMPILE_BRANCH: ${{ github.event.pull_request.base.ref }} - CI_JOB_NAME: model-unittest-gpu-ci - NO_PROXY: "localhost,bj.bcebos.com,su.bcebos.com,bcebos.com,apiin.im.baidu.com,gitee.com,aliyun.com,.baidu.com,.tuna.tsinghua.edu.cn" - -defaults: - run: - shell: bash - -jobs: - model-unittest-gpu-ci: - name: model-unittest-gpu-ci - runs-on: ${{ inputs.runner || 'ernie-8gpu' }} - steps: - - name: Determine Image Name - env: - IMAGE_NAME: ${{ inputs.image_name }} - run: | - if [[ -n "${IMAGE_NAME}" ]]; then - echo "IMAGE_NAME=${IMAGE_NAME}" >> "$GITHUB_ENV" - else - echo "IMAGE_NAME=ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddle:cuda126-dev-latest" >> "$GITHUB_ENV" - fi - - - name: Run Container - env: - WORK_DIR: ${{ github.workspace }} - FLAGS_DYNAMIC_STATIC_UNIFIED_COMM: "True" - PYTHON_VERSION: "3.10" - PADDLE_WHL: https://paddle-qa.bj.bcebos.com/paddle-pipeline/Develop-GpuAll-LinuxCentos-Gcc11-Cuda126-Cudnn95-Trt105-Py310-Compile/latest/paddlepaddle_gpu-0.0.0-cp310-cp310-linux_x86_64.whl - run: | - CONTAINER_NAME=${TASK}-$(date +%Y%m%d-%H%M%S) - echo "CONTAINER_NAME=${CONTAINER_NAME}" >> "$GITHUB_ENV" - docker run -d -t --gpus all --name ${CONTAINER_NAME} --net=host -v /dev/shm:/dev/shm --shm-size=32G \ - -v ${WORK_DIR}/../../..:${WORK_DIR}/../../.. \ - -v ${WORK_DIR}:/workspace \ - -v /home/.cache/pip:/home/.cache/pip \ - -e "BRANCH=$BRANCH" \ - -e "AGILE_COMPILE_BRANCH=$AGILE_COMPILE_BRANCH" \ - -e "PR_ID=$PR_ID" \ - -e "COMMIT_ID=$COMMIT_ID" \ - -e "WORK_DIR=$WORK_DIR" \ - -e "CI_SCRIPTS_PATH=$CI_SCRIPTS_PATH" \ - -e "NO_PROXY=$NO_PROXY" \ - -e "CI_JOB_NAME=$CI_JOB_NAME" \ - -e "PADDLE_WHL=$PADDLE_WHL" \ - -e "FLAGS_DYNAMIC_STATIC_UNIFIED_COMM=$FLAGS_DYNAMIC_STATIC_UNIFIED_COMM" \ - -e "PYTHON_VERSION=$PYTHON_VERSION" \ - -e HF_PROXY_PATH=${WORK_DIR}/../../../proxy_huggingface \ - -e AISTUDIO_PROXY_PATH=${WORK_DIR}/../../../proxy_aistudio \ - -w /workspace --privileged ${IMAGE_NAME} - - - name: Download Code - run: | - docker exec -t $CONTAINER_NAME /bin/bash -c ' - rm -rf * .[^.]* - echo "Downloading PaddleFormers.tar" - wget -q --no-proxy https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddleFormers.tar --no-check-certificate - echo "Extracting PaddleFormers.tar" - tar xf PaddleFormers.tar && rm -rf PaddleFormers.tar - echo "WORK_DIR = ${WORK_DIR}" - source ${WORK_DIR}/../../../proxy - cat ${WORK_DIR}/../../../proxy - cd PaddleFormers - git config --global user.name "PaddleCI" - git config --global user.email "paddle_ci@example.com" - git pull - git submodule update --init --recursive --force - if [ -n "${PR_ID}" ]; then - git fetch origin pull/${PR_ID}/head - git checkout -b PR_${PR_ID} FETCH_HEAD - git remote add upstream https://github.com/PaddlePaddle/PaddleFormers.git - echo "Checking out ${BRANCH}..." - git fetch upstream ${BRANCH}:${BRANCH} - git merge ${BRANCH} --no-edit - git diff --numstat ${BRANCH} -- | awk "{print \$NF}" - else - echo "Not in a pull_request event. Skipping PR-specific operations." - fi - git log --pretty=oneline -10 - ' - - - name: Test - run: | - docker exec -t $CONTAINER_NAME /bin/bash -c ' - ldconfig - pip config set global.cache-dir "/home/.cache/pip" - set -e - rm -rf /root/.cache/aistudio/ - cd /workspace/PaddleFormers && git config --global --add safe.directory $PWD - echo "WORK_DIR = ${WORK_DIR}" - cp -r ${WORK_DIR}/../../../models ./models - echo "Check models:" - ls -l ./models - echo "Test Start" - hostname - timeout 30m bash scripts/regression/ci_model_unittest.sh ${PADDLE_WHL} - ' - - - name: Upload Products - if: always() - env: - HOME_PATH: ${{ github.workspace }}/../../.. - BOS_UPLOAD_SCRIPT: ${{ github.workspace }}/../../../bos/BosClient.py - run: | - docker exec -t $CONTAINER_NAME /bin/bash -c ' - if [ ! -f "${BOS_UPLOAD_SCRIPT}" ]; then - wget -q --no-proxy -O ${HOME_PATH}/bos_new.tar.gz https://xly-devops.bj.bcebos.com/home/bos_new.tar.gz --no-check-certificate - mkdir ${HOME_PATH}/bos - tar xf ${HOME_PATH}/bos_new.tar.gz -C ${HOME_PATH}/bos - fi - if [ -n "${PR_ID}" ]; then - bos_prefix="${PR_ID}/${COMMIT_ID}" - else - bos_prefix="schedule/$(date +%Y%m%d)" - fi - # logs - cd /workspace/PaddleFormers/model_unittest_logs - for FILE in /workspace/PaddleFormers/model_unittest_logs/*; do - file=$(basename "$FILE") - python ${BOS_UPLOAD_SCRIPT} $file paddle-github-action/PR/PaddleFormers/model-unittest-gpu/${bos_prefix}/logs - echo "$file: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/model-unittest-gpu/${bos_prefix}/logs/$file" - done - ' - - - name: Terminate And Delete the Container - if: always() - run: | - docker rm -f $CONTAINER_NAME 2>/dev/null || true \ No newline at end of file diff --git a/.github/workflows/unittest-gpu.yml b/.github/workflows/unittest-gpu.yml index 04316e5d824..8b409ff716c 100644 --- a/.github/workflows/unittest-gpu.yml +++ b/.github/workflows/unittest-gpu.yml @@ -14,18 +14,19 @@ on: type: string concurrency: - group: unittest-${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} cancel-in-progress: true env: PR_ID: ${{ github.event.pull_request.number }} - COMMIT_ID: ${{ github.event.pull_request.head.sha }} + COMMIT_ID: ${{ github.event.pull_request.head.sha || github.sha }} TASK: PaddleFormers-CI-${{ github.event.pull_request.number }}-unittest-gpu - ci_scripts: /workspace/PaddleFormers/scripts/unit_test - BRANCH: ${{ github.event.pull_request.base.ref }} + CI_UNITTEST_SCRIPTS_PATH: /workspace/PaddleFormers/scripts/ci_unittest.sh + CI_MODEL_UNITTEST_SCRIPTS_PATH: /workspace/PaddleFormers/scripts/ci_model_unittest.sh + BRANCH: ${{ github.event.pull_request.base.ref || github.ref_name }} AGILE_COMPILE_BRANCH: ${{ github.event.pull_request.base.ref }} - CI_name: unittest-gpu-ci - no_proxy: "localhost,bj.bcebos.com,su.bcebos.com,bcebos.com,apiin.im.baidu.com,gitee.com,aliyun.com,.baidu.com,.tuna.tsinghua.edu.cn" + CI_JOB_NAME: unittest-gpu-ci + NO_PROXY: "localhost,bj.bcebos.com,su.bcebos.com,bcebos.com,apiin.im.baidu.com,gitee.com,aliyun.com,.baidu.com,.tuna.tsinghua.edu.cn" defaults: run: @@ -48,44 +49,55 @@ jobs: - name: Run Container env: - work_dir: ${{ github.workspace }} - FLAGS_dynamic_static_unified_comm: "True" - python_version: "3.10" - paddle_whl: https://paddle-qa.bj.bcebos.com/paddle-pipeline/Develop-GpuAll-LinuxCentos-Gcc11-Cuda126-Cudnn95-Trt105-Py310-Compile/latest/paddlepaddle_gpu-0.0.0-cp310-cp310-linux_x86_64.whl + WORK_DIR: ${{ github.workspace }} + FLAGS_DYNAMIC_STATIC_UNIFIED_COMM: "True" + PYTHON_VERSION: "3.10" + PADDLE_WHL: https://paddle-qa.bj.bcebos.com/paddle-pipeline/Develop-GpuAll-LinuxCentos-Gcc11-Cuda126-Cudnn95-Trt105-Py310-Compile/latest/paddlepaddle_gpu-0.0.0-cp310-cp310-linux_x86_64.whl run: | - container_name=${TASK}-$(date +%Y%m%d-%H%M%S) - echo "container_name=${container_name}" >> "$GITHUB_ENV" - echo "Workspace path: ${{ github.workspace }}" - docker run -d -t --name ${container_name} --net=host -v /dev/shm:/dev/shm --shm-size=32G \ - -v $work_dir/../../..:$work_dir/../../.. \ - -v $work_dir:/workspace \ + CONTAINER_NAME=${TASK}-$(date +%Y%m%d-%H%M%S) + echo "CONTAINER_NAME=${CONTAINER_NAME}" >> "$GITHUB_ENV" + DOCKER_VER=$(docker version --format '{{.Server.Version}}' | cut -d. -f1,2) + if (( $(echo "$DOCKER_VER < 19.03" | bc -l) )); then + GPU_OPTION="--runtime=nvidia" + else + GPU_OPTION="--gpus all" + fi + echo "DOCKER_VER=${DOCKER_VER}" + echo "GPU_OPTION=${GPU_OPTION}" + docker run -d -t --name ${CONTAINER_NAME} --net=host -v /dev/shm:/dev/shm --shm-size=32G \ + -v ${WORK_DIR}/../../..:${WORK_DIR}/../../.. \ + -v ${WORK_DIR}:/workspace \ -v /home/.cache/pip:/home/.cache/pip \ - -e BRANCH \ - -e AGILE_COMPILE_BRANCH \ - -e PR_ID \ - -e COMMIT_ID \ - -e work_dir \ - -e ci_scripts \ - -e no_proxy \ - -e CI_name \ - -e paddle_whl \ - -e FLAGS_dynamic_static_unified_comm \ - -e python_version \ - -e HF_PROXY_PATH=$work_dir/../../../proxy_huggingface \ - -e AISTUDIO_PROXY_PATH=$work_dir/../../../proxy_aistudio \ - -e "HF_DATASETS_CACHE=$work_dir/../../../paddlenlp/huggingface/datasets" \ - -e "TRANSFORMERS_CACHE=$work_dir/../../../paddlenlp/huggingface" \ - -w /workspace --runtime=nvidia --privileged $IMAGE_NAME + -e "BRANCH=$BRANCH" \ + -e "AGILE_COMPILE_BRANCH=$AGILE_COMPILE_BRANCH" \ + -e "PR_ID=$PR_ID" \ + -e "COMMIT_ID=$COMMIT_ID" \ + -e "WORK_DIR=$WORK_DIR" \ + -e "CI_UNITTEST_SCRIPTS_PATH=$CI_UNITTEST_SCRIPTS_PATH" \ + -e "CI_MODEL_UNITTEST_SCRIPTS_PATH=$CI_MODEL_UNITTEST_SCRIPTS_PATH" \ + -e "NO_PROXY=$NO_PROXY" \ + -e "CI_JOB_NAME=$CI_JOB_NAME" \ + -e "PADDLE_WHL=$PADDLE_WHL" \ + -e "FLAGS_DYNAMIC_STATIC_UNIFIED_COMM=$FLAGS_DYNAMIC_STATIC_UNIFIED_COMM" \ + -e "PYTHON_VERSION=$PYTHON_VERSION" \ + -e HF_PROXY_PATH=${WORK_DIR}/../../../proxy_huggingface \ + -e AISTUDIO_PROXY_PATH=${WORK_DIR}/../../../proxy_aistudio \ + -e "HF_DATASETS_CACHE=${WORK_DIR}/../../../paddlenlp/huggingface/datasets" \ + -e "TRANSFORMERS_CACHE=${WORK_DIR}/../../../paddlenlp/huggingface" \ + -w /workspace \ + ${GPU_OPTION} \ + --privileged \ + ${IMAGE_NAME} - name: Download Code run: | - docker exec -t $container_name /bin/bash -c ' + docker exec -t $CONTAINER_NAME /bin/bash -c ' rm -rf * .[^.]* echo "Downloading PaddleFormers.tar" wget -q --no-proxy https://paddle-qa.bj.bcebos.com/CodeSync/develop/PaddleFormers.tar --no-check-certificate echo "Extracting PaddleFormers.tar" tar xf PaddleFormers.tar && rm -rf PaddleFormers.tar - source $work_dir/../../../proxy + source ${WORK_DIR}/../../../proxy cd PaddleFormers git config --global user.name "PaddleCI" git config --global user.email "paddle_ci@example.com" @@ -104,39 +116,45 @@ jobs: git log --pretty=oneline -10 ' - - name: Test + - name: Unit Test run: | - docker exec -t $container_name /bin/bash -c ' + docker exec -t $CONTAINER_NAME /bin/bash -c ' ldconfig pip config set global.cache-dir "/home/.cache/pip" set -e rm -rf /root/.cache/aistudio/ cd /workspace/PaddleFormers && git config --global --add safe.directory $PWD - source $work_dir/../../../proxy - source $work_dir/../../../AISTUDIO_ACCESS_TOKEN - echo "work_dir = ${work_dir}" - cp -r ${work_dir}/../../../models ./models - echo "Check models:" + source ${WORK_DIR}/../../../proxy + source ${WORK_DIR}/../../../AISTUDIO_ACCESS_TOKEN + echo "WORK_DIR = ${WORK_DIR}" + cp -r ${WORK_DIR}/../../../models ./models + echo "Check whether the local model file exists:" ls -l ./models - timeout 30m bash scripts/unit_test/ci_unittest.sh ${paddle_whl} + timeout 30m bash scripts/unit_test/ci_unittest.sh ${PADDLE_WHL} ' + - name: Model Test + run: | + docker exec -t $CONTAINER_NAME /bin/bash -c ' + ldconfig + pip config set global.cache-dir "/home/.cache/pip" + set -e + rm -rf /root/.cache/aistudio/ + cd /workspace/PaddleFormers && git config --global --add safe.directory $PWD + echo "Check whether the local model file exists:" + ls -l ./models + timeout 30m bash scripts/regression/ci_model_unittest.sh ${PADDLE_WHL} - name: Upload Products if: always() env: - home_path: ${{ github.workspace }}/../../.. - bos_file: ${{ github.workspace }}/../../../bos/BosClient.py - allure_file: ${{ github.workspace }}/../../../allure-2.19.0/bin/allure + HOME_PATH: ${{ github.workspace }}/../../.. + BOS_UPLOAD_SCRIPT: ${{ github.workspace }}/../../../bos/BosClient.py run: | - docker exec -t $container_name /bin/bash -c ' - if [ ! -f "${{ env.bos_file }}" ]; then - wget -q --no-proxy -O ${{ env.home_path }}/bos_new.tar.gz https://xly-devops.bj.bcebos.com/home/bos_new.tar.gz --no-check-certificate - mkdir ${{ env.home_path }}/bos - tar xf ${{ env.home_path }}/bos_new.tar.gz -C ${{ env.home_path }}/bos - fi - if [ ! -f "${{ env.allure_file }}" ]; then - wget -q --no-proxy -O ${{ env.home_path }}/allure-2.19.0.zip https://xly-devops.bj.bcebos.com/tools/allure-2.19.0.zip --no-check-certificate - unzip -q ${{ env.home_path }}/allure-2.19.0.zip + docker exec -t $CONTAINER_NAME /bin/bash -c ' + if [ ! -f "${BOS_UPLOAD_SCRIPT}" ]; then + wget -q --no-proxy -O ${HOME_PATH}/bos_new.tar.gz https://xly-devops.bj.bcebos.com/home/bos_new.tar.gz --no-check-certificate + mkdir ${HOME_PATH}/bos + tar xf ${HOME_PATH}/bos_new.tar.gz -C ${HOME_PATH}/bos fi if [ -n "${PR_ID}" ]; then bos_prefix="${PR_ID}/${COMMIT_ID}" @@ -145,27 +163,21 @@ jobs: fi # coverage.xml cd /workspace/PaddleFormers - python ${{ env.bos_file }} coverage.xml paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs + python ${BOS_UPLOAD_SCRIPT} coverage.xml paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs echo "cov-report: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs/coverage.xml" # logs cd /workspace/PaddleFormers/unittest_logs for FILE in /workspace/PaddleFormers/unittest_logs/*; do file=$(basename "$FILE") - python ${{ env.bos_file }} $file paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs + python ${BOS_UPLOAD_SCRIPT} $file paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs echo "$file: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs/$file" done - # allure - # cd /workspace/PaddleFormers/ - # ${{ env.allure_file }} generate result -o report - # tar -czf report.tar.gz report - # python ${{ env.bos_file }} report.tar.gz paddle-github-action/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs - # echo "report: https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs/report.tar.gz" ' - name: Terminate And Delete the Container if: always() run: | - docker rm -f $container_name 2>/dev/null || true + docker rm -f $CONTAINER_NAME 2>/dev/null || true upload-coverage: name: upload-coverage @@ -211,40 +223,4 @@ jobs: with: files: coverage.xml env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - - # upload-allure: - # name: upload-allure - # needs: [unittest-gpu-ci] - # if: success() || failure() - # runs-on: ubuntu-latest - # steps: - # - name: Checkout Code - # uses: actions/checkout@v4 - # with: - # fetch-depth: 0 - - # - name: Download report.tar.gz - # run: | - # if [ -n "${PR_ID}" ]; then - # bos_prefix="${PR_ID}/${COMMIT_ID}" - # else - # bos_prefix="schedule/$(date +%Y%m%d)" - # fi - # wget -q --no-proxy \ - # https://paddle-github-action.bj.bcebos.com/PR/PaddleFormers/unittest-gpu/${bos_prefix}/logs/report.tar.gz \ - # --no-check-certificate -O report.tar.gz - # tar -xzf report.tar.gz - - # - name: Upload Allure Report - # uses: actions/upload-artifact@v4 - # with: - # name: allure-report - # path: report - # if-no-files-found: ignore - - # - name: Deploy allure report to GitHub Pages - # uses: peaceiris/actions-gh-pages@v4 - # with: - # github_token: ${{ secrets.GITHUB_TOKEN }} - # publish_dir: ./report \ No newline at end of file + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file diff --git a/scripts/regression/ci_model_unittest.sh b/scripts/regression/ci_model_unittest.sh index 52690b2cb95..d043d6d5055 100644 --- a/scripts/regression/ci_model_unittest.sh +++ b/scripts/regression/ci_model_unittest.sh @@ -17,27 +17,12 @@ set -e export paddle=$1 export FLAGS_enable_CE=${2-false} -export nlp_dir=/workspace/PaddleFormers -export log_path=/workspace/PaddleFormers/model_unittest_logs +export paddleformers_code_path=/workspace/PaddleFormers +export log_path=${paddleformers_code_path}/unittest_logs export model_unittest_path=/workspace/PaddleFormers/scripts/regression -cd $nlp_dir +cd $paddleformers_code_path mkdir -p $log_path -install_requirements() { - python -m pip config --user set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple - python -m pip config --user set global.trusted-host pypi.tuna.tsinghua.edu.cn - python -m pip install -r requirements.txt - python -m pip install -r requirements-dev.txt - python -m pip install -r tests/requirements.txt - python -m pip uninstall paddlepaddle paddlepaddle_gpu -y - python -m pip install --no-cache-dir ${paddle} --no-dependencies --progress-bar off --force-reinstall - python -c "import paddle;print('paddle');print(paddle.__version__);print(paddle.version.show())" >> ${log_path}/commit_info.txt - python setup.py bdist_wheel > /dev/null - python -m pip install dist/p****.whl - python -c "from paddleformers import __version__; print('paddleformers version:', __version__)" >> ${log_path}/commit_info.txt - python -c "import paddleformers; print('paddleformers commit:',paddleformers.version.commit)" >> ${log_path}/commit_info.txt - python -m pip list >> ${log_path}/commit_info.txt -} set_env() { export NVIDIA_TF32_OVERRIDE=0 @@ -49,7 +34,7 @@ set_env() { if [[ ${FLAGS_enable_CE} == "true" ]];then export CE_TEST_ENV=1 export RUN_SLOW_TEST=1 - export PYTHONPATH=${nlp_dir}:${nlp_dir}/llm:${PYTHONPATH} + export PYTHONPATH=${paddleformers_code_path}:${paddleformers_code_path}/llm:${PYTHONPATH} fi } @@ -75,31 +60,30 @@ print_info() { } get_diff_TO_case(){ -export FLAGS_enable_CI=false -if [ -z "${AGILE_COMPILE_BRANCH}" ]; then - # Scheduled Regression Test - FLAGS_enable_CI=true -else - for file_name in `git diff --numstat ${AGILE_COMPILE_BRANCH} -- |awk '{print $NF}'`;do - ext="${file_name##*.}" - echo "file_name: ${file_name}, ext: ${file_name##*.}" - - if [ ! -f ${file_name} ];then # Delete Files for a Pull Request - continue - elif [[ "$ext" == "md" || "$ext" == "rst" || "$file_name" == docs/* ]]; then - continue - else - FLAGS_enable_CI=true - fi - done -fi + export FLAGS_enable_CI=false + if [ -z "${AGILE_COMPILE_BRANCH}" ]; then + # Scheduled Regression Test + FLAGS_enable_CI=true + else + for file_name in `git diff --numstat ${AGILE_COMPILE_BRANCH} -- |awk '{print $NF}'`;do + ext="${file_name##*.}" + echo "file_name: ${file_name}, ext: ${file_name##*.}" + + if [ ! -f ${file_name} ];then # Delete Files for a Pull Request + continue + elif [[ "$ext" == "md" || "$ext" == "rst" || "$file_name" == docs/* ]]; then + continue + else + FLAGS_enable_CI=true + fi + done + fi } get_diff_TO_case set_env if [[ ${FLAGS_enable_CI} == "true" ]] || [[ ${FLAGS_enable_CE} == "true" ]];then - install_requirements - cd ${nlp_dir} + cd ${paddleformers_code_path} echo ' Testing all model unittest cases ' unset http_proxy && unset https_proxy set +e @@ -107,27 +91,15 @@ if [[ ${FLAGS_enable_CI} == "true" ]] || [[ ${FLAGS_enable_CE} == "true" ]];then python -c "import paddle; print(paddle.version.cuda()); print(paddle.version.cudnn()); print(paddle.is_compiled_with_cuda())" echo "Check docker Cuda Version" nvcc -V - cat /usr/local/cuda/version.txt echo "Check nvidia-smi" nvidia-smi + echo "Check paddle GPU count" python -c "import paddle; print(paddle.device.device_count())" - export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 PYTHONPATH=$(pwd) \ COVERAGE_SOURCE=paddleformers \ python -m pytest -s -v ${model_unittest_path} > ${log_path}/model_unittest.log 2>&1 exit_code=$? - print_info $exit_code model_unittest - - if [ -n "${AGILE_JOB_BUILD_ID}" ]; then - cd ${nlp_dir} - echo -e "\033[35m ---- Generate Allure Report \033[0m" - unset http_proxy && unset https_proxy - cp ${nlp_dir}/scripts/unit_test/gen_allure_report.py ./ - python gen_allure_report.py > /dev/null - echo -e "\033[35m ---- Report: https://xly.bce.baidu.com/ipipe/ipipe-report/report/${AGILE_JOB_BUILD_ID}/report/ \033[0m" - else - echo "AGILE_JOB_BUILD_ID is empty, skip generate allure report" - fi + print_info $exit_code else echo -e "\033[32m Changed Not CI case, Skips \033[0m" exit_code=0 diff --git a/scripts/unit_test/ci_unittest.sh b/scripts/unit_test/ci_unittest.sh index 0f4e8833b36..5264216dcd5 100644 --- a/scripts/unit_test/ci_unittest.sh +++ b/scripts/unit_test/ci_unittest.sh @@ -17,12 +17,9 @@ set -e export paddle=$1 export FLAGS_enable_CE=${2-false} -export nlp_dir=/workspace/PaddleFormers -export log_path=/workspace/PaddleFormers/unittest_logs -cd $nlp_dir -if [ ! -d "unittest_logs" ];then - mkdir unittest_logs -fi +export paddleformers_code_path=/workspace/PaddleFormers +export log_path=${paddleformers_code_path}/unittest_logs +cd $paddleformers_code_path mkdir -p $log_path install_requirements() { @@ -32,7 +29,7 @@ install_requirements() { python -m pip install -r requirements-dev.txt python -m pip install -r tests/requirements.txt python -m pip uninstall paddlepaddle paddlepaddle_gpu -y - python -m pip install --no-cache-dir ${paddle} --no-dependencies --progress-bar off + python -m pip install --no-cache-dir ${paddle} --no-dependencies --progress-bar off --force-reinstall python -c "import paddle;print('paddle');print(paddle.__version__);print(paddle.version.show())" >> ${log_path}/commit_info.txt python setup.py bdist_wheel > /dev/null python -m pip install dist/p****.whl @@ -51,7 +48,7 @@ set_env() { if [[ ${FLAGS_enable_CE} == "true" ]];then export CE_TEST_ENV=1 export RUN_SLOW_TEST=1 - export PYTHONPATH=${nlp_dir}:${nlp_dir}/llm:${PYTHONPATH} + export PYTHONPATH=${paddleformers_code_path}:${paddleformers_code_path}/llm:${PYTHONPATH} fi } @@ -77,34 +74,42 @@ print_info() { } get_diff_TO_case(){ -export FLAGS_enable_CI=false -if [ -z "${AGILE_COMPILE_BRANCH}" ]; then - # Scheduled Regression Test - FLAGS_enable_CI=true -else - for file_name in `git diff --numstat ${AGILE_COMPILE_BRANCH} -- |awk '{print $NF}'`;do - ext="${file_name##*.}" - echo "file_name: ${file_name}, ext: ${file_name##*.}" - - if [ ! -f ${file_name} ];then # Delete Files for a Pull Request - continue - elif [[ "$ext" == "md" || "$ext" == "rst" || "$file_name" == docs/* ]]; then - continue - else - FLAGS_enable_CI=true - fi - done -fi + export FLAGS_enable_CI=false + if [ -z "${AGILE_COMPILE_BRANCH}" ]; then + # Scheduled Regression Test + FLAGS_enable_CI=true + else + for file_name in `git diff --numstat ${AGILE_COMPILE_BRANCH} -- |awk '{print $NF}'`;do + ext="${file_name##*.}" + echo "file_name: ${file_name}, ext: ${file_name##*.}" + + if [ ! -f ${file_name} ];then # Delete Files for a Pull Request + continue + elif [[ "$ext" == "md" || "$ext" == "rst" || "$file_name" == docs/* ]]; then + continue + else + FLAGS_enable_CI=true + fi + done + fi } get_diff_TO_case set_env if [[ ${FLAGS_enable_CI} == "true" ]] || [[ ${FLAGS_enable_CE} == "true" ]];then install_requirements - cd ${nlp_dir} + cd ${paddleformers_code_path} echo ' Testing all unittest cases ' unset http_proxy && unset https_proxy set +e + echo "Check paddle Cuda Version" + python -c "import paddle; print(paddle.version.cuda()); print(paddle.version.cudnn()); print(paddle.is_compiled_with_cuda())" + echo "Check docker Cuda Version" + nvcc -V + echo "Check nvidia-smi" + nvidia-smi + echo "Check paddle GPU count" + python -c "import paddle; print(paddle.device.device_count())" DOWNLOAD_SOURCE=aistudio WAIT_UNTIL_DONE=True \ PYTHONPATH=$(pwd) \ COVERAGE_SOURCE=paddleformers \ @@ -116,18 +121,7 @@ if [[ ${FLAGS_enable_CI} == "true" ]] || [[ ${FLAGS_enable_CE} == "true" ]];then --cov=paddleformers \ --cov-report=xml:coverage.xml > ${log_path}/unittest.log 2>&1 exit_code=$? - print_info $exit_code unittest - - if [ -n "${AGILE_JOB_BUILD_ID}" ]; then - cd ${nlp_dir} - echo -e "\033[35m ---- Generate Allure Report \033[0m" - unset http_proxy && unset https_proxy - cp scripts/unit_test/gen_allure_report.py ./ - python gen_allure_report.py > /dev/null - echo -e "\033[35m ---- Report: https://xly.bce.baidu.com/ipipe/ipipe-report/report/${AGILE_JOB_BUILD_ID}/report/ \033[0m" - else - echo "AGILE_JOB_BUILD_ID is empty, skip generate allure report" - fi + print_info $exit_code else echo -e "\033[32m Changed Not CI case, Skips \033[0m" exit_code=0