diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9a9ccaf2883..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -dist: trusty -sudo: off -language: python -services: - - docker -git: - depth: false -addons: - apt: - packages: - - libssl-dev - - libffi-dev - - python-dev -install: pip install --upgrade pip tox six -cache: pip -jobs: - include: - - stage: precheck - env: PURPOSE='Dependency Check' - script: ./scripts/ci/dependency_check.sh - python: 3.6 - - stage: unittest - python: 3.6 - env: TOXENV=py36 - script: ./scripts/ci/unittest.sh - - stage: verify - script: ./scripts/ci/test_profile_integration.sh - env: PURPOSE='Integration for profiles' - python: 3.6 - - stage: verify - script: ./scripts/ci/test_ref_doc.sh - env: PURPOSE='RefDocVerify' - python: 3.6 - - stage: verify - env: PURPOSE='Load extension commands' - script: ./scripts/ci/test_extensions.sh - python: 3.6 - - stage: publish - script: ./scripts/ci/publish.sh - python: 3.6 - env: PURPOSE='Nightly Edge Build' - if: branch = dev and type = push - - stage: publish - script: ./scripts/ci/build_droid.sh - python: 3.6 - env: PURPOSE='Automation Docker' - if: repo = Azure/azure-cli and type = push -stages: - - precheck - - unittest - - verify - - publish diff --git a/scripts/ci/build_droid.sh b/scripts/ci/build_droid.sh deleted file mode 100755 index 913d4788687..00000000000 --- a/scripts/ci/build_droid.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env bash - -# Build the docker image for A01 automation system - -set -ev - -dp0=`cd $(dirname $0); pwd` - -############################################# -# Define colored output func -function title { - LGREEN='\033[1;32m' - CLEAR='\033[0m' - - echo -e ${LGREEN}$1${CLEAR} -} - -############################################# -# Clean up artifacts -title 'Remove artifacts folder' -if [ -d artifacts ] && [ "$AZURE_CLI_BUILD_INCREMENTAL" != "True" ]; then rm -r artifacts; fi - -############################################# -# Build the whl files first -if [ ! -d artifacts ]; then $dp0/build.sh; fi - -############################################# -# Move dockerfile -cp $dp0/a01/dockerfiles/py36/Dockerfile artifacts/ - -############################################# -# Move other scripts for docker -cp -R $dp0/a01/* artifacts/ - -############################################# -# Move privates for docker -if [ -d privates ]; then - cp -R privates artifacts/privates -else - mkdir -p artifacts/privates - echo 'placeholder' > artifacts/privates/placeholder -fi - -############################################# -# for travis repo slug, remove the suffix to reveal the owner -# - the offical repo will generate image: azurecli-test-Azure -# - the fork repo will generate image: azurecli-test-johnongithub -# for local private build uses local user name. -# - eg. azurecli-test-private-john -title 'Determine docker image name' -image_owner=${TRAVIS_REPO_SLUG%/azure-cli} -image_owner=${image_owner:="private-${USER}"} -image_owner=`echo $image_owner | tr '[:upper:]' '[:lower:]'` -version=`cat artifacts/version` -image_name=azureclidev.azurecr.io/azurecli-test-$image_owner:python3.6-$version -echo "Image name: $image_name" - -title 'Login docker registry' -if [ $AZURECLIDEV_ACR_SP_USERNAME ] && [ $AZURECLIDEV_ACR_SP_PASSWORD ]; then - docker login azureclidev.azurecr.io -u $AZURECLIDEV_ACR_SP_USERNAME -p $AZURECLIDEV_ACR_SP_PASSWORD -fi - -title 'Build docker image' -docker build -t $image_name -f artifacts/Dockerfile artifacts - -title 'Push docker image' -if [ "$1" == "push" ] || [ "$TRAVIS" == "true" ]; then - docker push $image_name -else - echo "Skip" -fi - -title 'Push docker image as latest' -if [ "$TRAVIS" == "true" ]; then - docker tag $image_name azureclidev.azurecr.io/azurecli-test-$image_owner:latest - docker push azureclidev.azurecr.io/azurecli-test-$image_owner:latest -fi diff --git a/scripts/ci/publish.py b/scripts/ci/publish.py deleted file mode 100644 index 5f12404eb4a..00000000000 --- a/scripts/ci/publish.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/env python - -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- - -import os -import argparse -import glob -import mimetypes -import logging -import re - -from azure.storage.blob import BlockBlobService, ContentSettings - - -logger = logging.getLogger('az-publish') - -def publish(build, account, container, sas, **_) -> None: - client = BlockBlobService(account_name=account, sas_token=sas) - - publishing_files = (p for p in glob.iglob(os.path.join(build , '**/*'), recursive=True)) - for source in publishing_files: - if os.path.isdir(source): - continue - - blob_path = os.path.join(os.environ['TRAVIS_REPO_SLUG'], - os.environ['TRAVIS_BRANCH'], - os.environ['TRAVIS_BUILD_NUMBER'], - os.path.relpath(source, build)) - - content_type, content_encoding = mimetypes.guess_type(os.path.basename(source)) - content_settings = ContentSettings(content_type, content_encoding) - logger.info(f'Uploading {blob_path} ...') - client.create_blob_from_path(container_name=container, - blob_name=blob_path, - file_path=source, - content_settings=content_settings) - - -def generate_package_list_in_html(title: str, links: list): - package_list = '\n'.join((f' {p}
' for p in links)) - return f""" - - {title} - - -

{title}

- {package_list} - -""" - - -def nightly(build: str, account: str, container: str, sas: str, **_) -> None: - client = BlockBlobService(account_name=account, sas_token=sas) - - modules_list = [] - for wheel_file in glob.iglob(os.path.join(build, 'build/*.whl')): - package_name = os.path.basename(wheel_file).split('-', maxsplit=1)[0].replace('_', '-') - sdist_file = next(glob.iglob(os.path.join(build, 'source', f'{package_name}*.tar.gz'))) - - content_type, content_encoding = mimetypes.guess_type(os.path.basename(wheel_file)) - content_settings = ContentSettings(content_type, content_encoding) - client.create_blob_from_path(container_name=container, - blob_name=f'{package_name}/{os.path.basename(wheel_file)}', - file_path=wheel_file, - content_settings=content_settings) - - content_type, content_encoding = mimetypes.guess_type(os.path.basename(sdist_file)) - content_settings = ContentSettings(content_type, content_encoding) - client.create_blob_from_path(container_name=container, - blob_name=f'{package_name}/{os.path.basename(sdist_file)}', - file_path=sdist_file, - content_settings=content_settings) - - package_blobs = (os.path.basename(b.name) for b in client.list_blobs(container, prefix=package_name + '/') - if b.name != f"{package_name}/") - - client.create_blob_from_text(container_name=container, - blob_name=f'{package_name}/', - text=generate_package_list_in_html(f'Links for {package_name}', package_blobs), - content_settings=ContentSettings('text/html')) - - modules_list.append(f"{package_name}/") - - client.create_blob_from_text(container_name=container, - blob_name='index.html', - text=generate_package_list_in_html('Simple Index', modules_list), - content_settings=ContentSettings('text/html')) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(prog='az-publish') - subparsers = parser.add_subparsers(title='Actions') - - store_parser = subparsers.add_parser('store', help='Publish the build artifacts to a long-term storage.') - store_parser.set_defaults(func=publish) - store_parser.add_argument('-b', dest='build', help='The folder where the artifacts are saved.') - store_parser.add_argument('-a', dest='account', help='The storage account name.') - store_parser.add_argument('-c', dest='container', help='The storage account container.') - store_parser.add_argument('-s', dest='sas', help='The storage account access token.') - - - nightly_parser = subparsers.add_parser('nightly', help='Publish the build artifacts to a nightly build storage.') - nightly_parser.set_defaults(func=nightly) - nightly_parser.add_argument('-b', dest='build', help='The folder where the artifacts are saved.') - nightly_parser.add_argument('-a', dest='account', help='The storage account name.') - nightly_parser.add_argument('-c', dest='container', help='The storage account container.') - nightly_parser.add_argument('-s', dest='sas', help='The storage account access token.') - - - args = parser.parse_args() - args.func(**vars(args)) - diff --git a/scripts/ci/publish.sh b/scripts/ci/publish.sh deleted file mode 100755 index fafdde05820..00000000000 --- a/scripts/ci/publish.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env bash - -set -ev - -unset AZURE_CLI_DIAGNOSTICS_TELEMETRY -pip install azure-storage-blob==1.1.0 - -wd=`cd $(dirname $0); pwd` - -if [ -z "$PUBLISH_STORAGE_SAS" ] || [ -z "$PUBLISH_STORAGE_ACCT" ] || [ -z "$PUBLISH_CONTAINER" ]; then - echo 'Missing publish storage account credential. Skip publishing to store.' - exit 0 -fi - -echo 'Generate artifacts' -. $wd/artifacts.sh - -echo 'Upload artifacts to store' -python $wd/publish.py store -b $share_folder -c $PUBLISH_CONTAINER -a $PUBLISH_STORAGE_ACCT -s "$PUBLISH_STORAGE_SAS" - -if [ -z "$EDGE_STORAGE_SAS" ] || [ -z "$EDGE_STORAGE_ACCT" ] || [ -z "$EDGE_CONTAINER" ]; then - echo 'Missing edge storage account credential. Skip publishing the edge build.' - exit 0 -fi - -echo 'Upload artifacts to edge feed' -python $wd/publish.py nightly -b $share_folder -c $EDGE_CONTAINER -a $EDGE_STORAGE_ACCT -s "$EDGE_STORAGE_SAS" -