diff --git a/Makefile b/Makefile
index 51914c4e2e5e..3b0057dfc968 100644
--- a/Makefile
+++ b/Makefile
@@ -465,6 +465,10 @@ else
CFLAGS += -DMXNET_USE_LIBJPEG_TURBO=0
endif
+ifeq ($(CI), 1)
+ MAVEN_ARGS := -B
+endif
+
# For quick compile test, used smaller subset
ALLX_DEP= $(ALL_DEP)
@@ -629,7 +633,7 @@ scalatestcompile:
scalapkg:
(cd $(ROOTDIR)/scala-package && \
- mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \
+ mvn package $(MAVEN_ARGS) -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \
-Dbuild.platform="$(SCALA_PKG_PROFILE)" \
-Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \
-Dcurrent_libdir="$(ROOTDIR)/lib" \
@@ -637,19 +641,19 @@ scalapkg:
scalaunittest:
(cd $(ROOTDIR)/scala-package && \
- mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),unittest -Dcxx="$(CXX)" \
+ mvn integration-test $(MAVEN_ARGS) -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),unittest -Dcxx="$(CXX)" \
-Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \
-Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS))
scalaintegrationtest:
(cd $(ROOTDIR)/scala-package && \
- mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \
+ mvn integration-test $(MAVEN_ARGS) -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \
-Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \
-Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS))
scalainstall:
(cd $(ROOTDIR)/scala-package && \
- mvn install -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -DskipTests=true -Dcxx="$(CXX)" \
+ mvn install $(MAVEN_ARGS) -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -DskipTests=true -Dcxx="$(CXX)" \
-Dbuild.platform="$(SCALA_PKG_PROFILE)" \
-Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \
-Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a")
@@ -674,14 +678,14 @@ scalarelease-perform:
scaladeploy:
(cd $(ROOTDIR)/scala-package && \
- mvn deploy -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \
+ mvn deploy $(MAVEN_ARGS) -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \
-Dbuild.platform="$(SCALA_PKG_PROFILE)" \
-Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \
-Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a")
scaladeploylocal:
(cd $(ROOTDIR)/scala-package && \
- mvn deploy -Papache-release,deployLocal,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \
+ mvn deploy $(MAVEN_ARGS) -Papache-release,deployLocal,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \
-DaltDeploymentRepository=snapshot-repo::default::file:local-snapshot \
-Dgpg.skip \
-Dbuild.platform="$(SCALA_PKG_PROFILE)" \
diff --git a/ci/Jenkinsfile_utils.groovy b/ci/Jenkinsfile_utils.groovy
index f82c238ed075..8291bae1f7b7 100644
--- a/ci/Jenkinsfile_utils.groovy
+++ b/ci/Jenkinsfile_utils.groovy
@@ -64,7 +64,7 @@ def init_git_win() {
// pack libraries for later use
def pack_lib(name, libs, include_gcov_data = false) {
- sh """
+ sh returnStatus: true, script: """
set +e
echo "Packing ${libs} into ${name}"
echo ${libs} | sed -e 's/,/ /g' | xargs md5sum
@@ -83,7 +83,7 @@ return 0
def unpack_and_init(name, libs, include_gcov_data = false) {
init_git()
unstash name
- sh """
+ sh returnStatus: true, script: """
set +e
echo "Unpacked ${libs} from ${name}"
echo ${libs} | sed -e 's/,/ /g' | xargs md5sum
@@ -147,8 +147,9 @@ def collect_test_results_windows(original_file_name, new_file_name) {
}
-def docker_run(platform, function_name, use_nvidia, shared_mem = '500m') {
- def command = "ci/build.py --docker-registry ${env.DOCKER_CACHE_REGISTRY} %USE_NVIDIA% --platform %PLATFORM% --docker-build-retries 3 --shm-size %SHARED_MEM% /work/runtime_functions.sh %FUNCTION_NAME%"
+def docker_run(platform, function_name, use_nvidia, shared_mem = '500m', env_vars = "") {
+ def command = "ci/build.py %ENV_VARS% --docker-registry ${env.DOCKER_CACHE_REGISTRY} %USE_NVIDIA% --platform %PLATFORM% --docker-build-retries 3 --shm-size %SHARED_MEM% /work/runtime_functions.sh %FUNCTION_NAME%"
+ command = command.replaceAll('%ENV_VARS%', env_vars.length() > 0 ? "-e ${env_vars}" : '')
command = command.replaceAll('%USE_NVIDIA%', use_nvidia ? '--nvidiadocker' : '')
command = command.replaceAll('%PLATFORM%', platform)
command = command.replaceAll('%FUNCTION_NAME%', function_name)
diff --git a/ci/build.py b/ci/build.py
index 0069392d9a2a..e5cf933d2fd7 100755
--- a/ci/build.py
+++ b/ci/build.py
@@ -215,20 +215,21 @@ def container_run(platform: str,
local_ccache_dir: str,
command: List[str],
cleanup: Cleanup,
+ environment: Dict[str, str],
dry_run: bool = False) -> int:
"""Run command in a container"""
container_wait_s = 600
#
# Environment setup
#
- environment = {
+ environment.update({
'CCACHE_MAXSIZE': '500G',
'CCACHE_TEMPDIR': '/tmp/ccache', # temp dir should be local and not shared
'CCACHE_DIR': '/work/ccache', # this path is inside the container as /work/ccache is
# mounted
'CCACHE_LOGFILE': '/tmp/ccache.log', # a container-scoped log, useful for ccache
# verification.
- }
+ })
# These variables are passed to the container to the process tree killer can find runaway
# process inside the container
# https://wiki.jenkins.io/display/JENKINS/ProcessTreeKiller
@@ -446,6 +447,10 @@ def main() -> int:
parser.add_argument("--no-cache", action="store_true",
help="passes --no-cache to docker build")
+ parser.add_argument("-e", "--environment", nargs="*", default=[],
+ help="Environment variables for the docker container. "
+ "Specify with a list containing either names or name=value")
+
parser.add_argument("command",
help="command to run in the container",
nargs='*', action='append', type=str)
@@ -474,6 +479,9 @@ def signal_handler(signum, _):
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
+ environment = dict([(e.split('=')[:2] if '=' in e else (e, os.environ[e]))
+ for e in args.environment])
+
if args.list:
print(list_platforms())
elif args.platform:
@@ -493,13 +501,13 @@ def signal_handler(signum, _):
ret = container_run(
platform=platform, nvidia_runtime=args.nvidiadocker,
shared_memory_size=args.shared_memory_size, command=command, docker_registry=args.docker_registry,
- local_ccache_dir=args.ccache_dir, cleanup=cleanup)
+ local_ccache_dir=args.ccache_dir, cleanup=cleanup, environment=environment)
elif args.print_docker_run:
command = []
ret = container_run(
platform=platform, nvidia_runtime=args.nvidiadocker,
shared_memory_size=args.shared_memory_size, command=command, docker_registry=args.docker_registry,
- local_ccache_dir=args.ccache_dir, dry_run=True, cleanup=cleanup)
+ local_ccache_dir=args.ccache_dir, dry_run=True, cleanup=cleanup, environment=environment)
else:
# With no commands, execute a build function for the target platform
command = ["/work/mxnet/ci/docker/runtime_functions.sh", "build_{}".format(platform)]
@@ -507,7 +515,7 @@ def signal_handler(signum, _):
ret = container_run(
platform=platform, nvidia_runtime=args.nvidiadocker,
shared_memory_size=args.shared_memory_size, command=command, docker_registry=args.docker_registry,
- local_ccache_dir=args.ccache_dir, cleanup=cleanup)
+ local_ccache_dir=args.ccache_dir, cleanup=cleanup, environment=environment)
if ret != 0:
logging.critical("Execution of %s failed with status: %d", command, ret)
@@ -535,7 +543,7 @@ def signal_handler(signum, _):
container_run(
platform=platform, nvidia_runtime=args.nvidiadocker,
shared_memory_size=args.shared_memory_size, command=command, docker_registry=args.docker_registry,
- local_ccache_dir=args.ccache_dir, cleanup=cleanup)
+ local_ccache_dir=args.ccache_dir, cleanup=cleanup, environment=environment)
shutil.move(buildir(), plat_buildir)
logging.info("Built files left in: %s", plat_buildir)
diff --git a/ci/docker/install/ubuntu_scala.sh b/ci/docker/install/ubuntu_scala.sh
index 6ecb8d801186..22be230efdd3 100755
--- a/ci/docker/install/ubuntu_scala.sh
+++ b/ci/docker/install/ubuntu_scala.sh
@@ -33,4 +33,7 @@ apt-get install -y openjdk-8-jre
apt-get update || true
apt-get install -y \
maven \
+ gnupg \
+ gnupg2 \
+ gnupg-agent \
scala
diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh
index e4eea93e6ee8..4ec6b76d6267 100755
--- a/ci/docker/runtime_functions.sh
+++ b/ci/docker/runtime_functions.sh
@@ -36,6 +36,11 @@ clean_repo() {
git submodule update --init --recursive
}
+scala_prepare() {
+ # Clean up maven logs
+ export MAVEN_OPTS="-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
+}
+
build_ccache_wrappers() {
set -ex
@@ -309,7 +314,6 @@ build_amzn_linux_cpu() {
ninja -v
}
-
build_centos7_mkldnn() {
set -ex
cd /work/mxnet
@@ -842,21 +846,24 @@ unittest_ubuntu_python3_quantization_gpu() {
unittest_ubuntu_cpu_scala() {
set -ex
- make scalapkg USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
- make scalaunittest USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
+ scala_prepare
+ make scalapkg USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1 CI=1
+ make scalaunittest USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1 CI=1
}
unittest_centos7_cpu_scala() {
set -ex
cd /work/mxnet
+ scala_prepare
make scalapkg USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
make scalaunittest USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
}
unittest_ubuntu_cpu_clojure() {
set -ex
- make scalapkg USE_OPENCV=1 USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
- make scalainstall USE_OPENCV=1 USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
+ scala_prepare
+ make scalapkg USE_OPENCV=1 USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1 CI=1
+ make scalainstall USE_OPENCV=1 USE_BLAS=openblas USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1 CI=1
./contrib/clojure-package/ci-test.sh
}
@@ -1000,8 +1007,9 @@ integrationtest_ubuntu_cpu_dist_kvstore() {
integrationtest_ubuntu_gpu_scala() {
set -ex
- make scalapkg USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 USE_DIST_KVSTORE=1 SCALA_ON_GPU=1 ENABLE_TESTCOVERAGE=1
- make scalaintegrationtest USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 SCALA_TEST_ON_GPU=1 USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1
+ scala_prepare
+ make scalapkg USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 USE_DIST_KVSTORE=1 SCALA_ON_GPU=1 ENABLE_TESTCOVERAGE=1 CI=1
+ make scalaintegrationtest USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 SCALA_TEST_ON_GPU=1 USE_DIST_KVSTORE=1 ENABLE_TESTCOVERAGE=1 CI=1
}
integrationtest_ubuntu_gpu_dist_kvstore() {
@@ -1065,7 +1073,6 @@ build_docs() {
popd
}
-
# Functions that run the nightly Tests:
#Runs Apache RAT Check on MXNet Source for License Headers
@@ -1263,6 +1270,30 @@ deploy_jl_docs() {
# ...
}
+publish_scala_build() {
+ set -ex
+ pushd .
+ scala_prepare
+ ./scala-package/dev/build.sh
+ popd
+}
+
+publish_scala_test() {
+ set -ex
+ pushd .
+ scala_prepare
+ ./scala-package/dev/test.sh
+ popd
+}
+
+publish_scala_deploy() {
+ set -ex
+ pushd .
+ scala_prepare
+ ./scala-package/dev/deploy.sh
+ popd
+}
+
# broken_link_checker
broken_link_checker() {
diff --git a/ci/docker_cache.py b/ci/docker_cache.py
index 70dd51c9056f..db66f722b22e 100755
--- a/ci/docker_cache.py
+++ b/ci/docker_cache.py
@@ -210,8 +210,7 @@ def _get_dockerhub_credentials(): # pragma: no cover
logging.exception("The request was invalid due to:")
elif client_error.response['Error']['Code'] == 'InvalidParameterException':
logging.exception("The request had invalid params:")
- else:
- raise
+ raise
else:
secret = get_secret_value_response['SecretString']
secret_dict = json.loads(secret)
diff --git a/ci/publish/Jenkinsfile b/ci/publish/Jenkinsfile
new file mode 100644
index 000000000000..49e7d7a88d40
--- /dev/null
+++ b/ci/publish/Jenkinsfile
@@ -0,0 +1,105 @@
+// -*- mode: groovy -*-
+
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Jenkins pipeline
+// See documents at https://jenkins.io/doc/book/pipeline/jenkinsfile/
+
+//mxnet libraries
+mx_scala_pub = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a, config.mk, scala-package/pom.xml, scala-package/**/pom.xml, scala-package/*/target/test-classes/**, scala-package/local-snapshot/**'
+
+// timeout in minutes
+max_time = 120
+
+node('restricted-utility') {
+ // Loading the utilities requires a node context unfortunately
+ checkout scm
+ utils = load('ci/Jenkinsfile_utils.groovy')
+}
+utils.assign_node_labels(utility: 'restricted-utility', linux_cpu: 'restricted-mxnetlinux-cpu', linux_gpu: 'restricted-mxnetlinux-gpu', linux_gpu_p3: 'restricted-mxnetlinux-gpu-p3', windows_cpu: 'restricted-mxnetwindows-cpu', windows_gpu: 'restricted-mxnetwindows-gpu')
+
+// CPU and GPU. OSX nodes are not currently supported by Jenkins
+def nodeMap = ['cpu': NODE_LINUX_CPU, 'gpu': NODE_LINUX_GPU]
+def scalaOSMap = ['cpu': 'linux-x86_64-cpu', 'gpu': 'linux-x86_64-gpu']
+
+def wrapStep(nodeToRun, workspaceName, step) {
+ return {
+ node(nodeToRun) {
+ ws("workspace/${workspaceName}") {
+ timeout(time: max_time, unit: 'MINUTES') {
+ step()
+ }
+ }
+ }
+ }
+}
+
+def toBuild = [:]
+def labels = ['cpu'] // , 'gpu']
+for (x in labels) {
+ def label = x // Required due to language
+ toBuild["Scala Build ${label}"] = wrapStep(nodeMap[label], "build-scala-${label}") {
+ env.MAVEN_PUBLISH_OS_TYPE = scalaOSMap[label]
+ utils.init_git()
+ utils.docker_run("ubuntu_${label}", 'publish_scala_build', label == 'gpu', '500m', 'MAVEN_PUBLISH_OS_TYPE')
+ utils.pack_lib("scala_${label}", mx_scala_pub, false)
+ }
+}
+
+def toTest = [:]
+def systems = ['ubuntu'] // , 'centos7']
+for (x in labels) {
+ def label = x // Required due to language
+ for (y in systems) {
+ def system = y // Required due to language
+ toTest["Scala Test ${system} ${label}"] = wrapStep(nodeMap[label], "test-scala-${system}-${label}") {
+ utils.unpack_and_init("scala_${label}", mx_scala_pub, false)
+ utils.docker_run("${system}_${label}", 'publish_scala_test', label == 'gpu')
+ }
+ }
+}
+
+def toDeploy = [:]
+for (x in labels) {
+ def label = x // Required due to language
+ toDeploy["Scala Deploy ${label}"] = wrapStep(nodeMap[label], "deploy-scala-${label}") {
+ env.MAVEN_PUBLISH_OS_TYPE = scalaOSMap[label]
+ utils.unpack_and_init("scala_${label}", mx_scala_pub, false)
+ utils.docker_run("ubuntu_${label}", 'publish_scala_deploy', label == 'gpu', '500m', 'MAVEN_PUBLISH_OS_TYPE MAVEN_PUBLISH_SECRET_ENDPOINT_URL MAVEN_PUBLISH_SECRET_NAME_CREDENTIALS MAVEN_PUBLISH_SECRET_NAME_GPG DOCKERHUB_SECRET_ENDPOINT_REGION')
+ }
+}
+
+utils.main_wrapper(
+core_logic: {
+ stage('Build Packages') {
+ parallel toBuild
+ }
+ stage('Test Packages') {
+ parallel toTest
+ }
+ stage('Deploy Packages') {
+ parallel toDeploy
+ }
+}
+,
+failure_handler: {
+ if (currentBuild.result == "FAILURE") {
+ // emailext body: 'Generating the nightly maven has failed. Please view the build at ${BUILD_URL}', replyTo: '${EMAIL}', subject: '[NIGHTLY MAVEN FAILED] Build ${BUILD_NUMBER}', to: '${EMAIL}'
+ }
+}
+)
diff --git a/scala-package/.gitignore b/scala-package/.gitignore
index 9a89bef324bc..f75826b29281 100644
--- a/scala-package/.gitignore
+++ b/scala-package/.gitignore
@@ -8,4 +8,4 @@ core/src/main/scala/org/apache/mxnet/NDArrayRandomAPIBase.scala
core/src/main/scala/org/apache/mxnet/SymbolRandomAPIBase.scala
examples/scripts/infer/images/
examples/scripts/infer/models/
-local-snapshot
\ No newline at end of file
+local-snapshot
diff --git a/scala-package/dev/build.sh b/scala-package/dev/build.sh
new file mode 100755
index 000000000000..c336fd84e5e2
--- /dev/null
+++ b/scala-package/dev/build.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -ex
+
+# Setup Environment Variables
+# MAVEN_PUBLISH_OS_TYPE: linux-x86_64-cpu|linux-x86_64-gpu|osx-x86_64-cpu
+# export MAVEN_PUBLISH_OS_TYPE=linux-x86_64-cpu
+
+bash scala-package/dev/compile-mxnet-backend.sh $MAVEN_PUBLISH_OS_TYPE ./
+
+# Scala steps to deploy
+make scalapkg CI=1
+
+# Compile tests for discovery later
+export GPG_TTY=$(tty)
+make scalatestcompile CI=1
+# make scalainstall CI=1
+make scaladeploylocal CI=1
diff --git a/scala-package/dev/buildkey.py b/scala-package/dev/buildkey.py
new file mode 100644
index 000000000000..8a1b7bf63286
--- /dev/null
+++ b/scala-package/dev/buildkey.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import json
+import logging
+import subprocess
+
+HOME = os.environ['HOME']
+KEY_PATH = os.path.join(HOME, ".m2")
+
+
+'''
+This file would do the following items:
+ Import keys from AWS Credential services
+ Create settings.xml in .m2 with pass phrase
+ Create security-settings.xml in .m2 with master password
+ Import keys.asc the encrypted keys in gpg
+'''
+
+
+def getCredentials():
+ import boto3
+ import botocore
+ endpoint_url = os.environ['MAVEN_PUBLISH_SECRET_ENDPOINT_URL']
+ secret_creds_name = os.environ['MAVEN_PUBLISH_SECRET_NAME_CREDENTIALS']
+ secret_key_name = os.environ['MAVEN_PUBLISH_SECRET_NAME_GPG']
+ region_name = os.environ['DOCKERHUB_SECRET_ENDPOINT_REGION']
+
+ session = boto3.Session()
+ client = session.client(
+ service_name='secretsmanager',
+ region_name=region_name,
+ endpoint_url=endpoint_url
+ )
+ try:
+ get_secret_value_response = client.get_secret_value(
+ SecretId=secret_creds_name
+ )
+ get_secret_key_response = client.get_secret_value(
+ SecretId=secret_key_name
+ )
+ except botocore.exceptions.ClientError as client_error:
+ if client_error.response['Error']['Code'] == 'ResourceNotFoundException':
+ name = (secret_key_name if get_secret_value_response
+ else secret_creds_name)
+ logging.exception("The requested secret %s was not found", name)
+ elif client_error.response['Error']['Code'] == 'InvalidRequestException':
+ logging.exception("The request was invalid due to:")
+ elif client_error.response['Error']['Code'] == 'InvalidParameterException':
+ logging.exception("The request had invalid params:")
+ raise
+ else:
+ secret = get_secret_value_response['SecretString']
+ secret_dict = json.loads(secret)
+ secret_key = get_secret_key_response['SecretString']
+ return secret_dict, secret_key
+
+
+def importASC(key, gpgPassphrase):
+ filename = os.path.join(KEY_PATH, "key.asc")
+ with open(filename, 'w') as f:
+ f.write(key)
+ subprocess.check_output(['gpg2', '--batch', '--yes',
+ '--passphrase-fd', '0',
+ "--import", "{}".format(filename)],
+ input=str.encode(gpgPassphrase))
+
+
+def encryptMasterPSW(password):
+ filename = os.path.join(KEY_PATH, "encryptMasterPassword.exp")
+ with open(filename, 'w') as f:
+ f.write('''
+ spawn mvn --encrypt-master-password
+ expect -exact "Master password: "
+ send -- "{}\r"
+ expect eof
+ '''.format(password))
+ result = subprocess.check_output(['expect', filename])
+ return str(result).split('\r\n')[-1][2:-3]
+
+
+def encryptPSW(password):
+ filename = os.path.join(KEY_PATH, "encryptPassword.exp")
+ with open(filename, 'w') as f:
+ f.write('''
+ spawn mvn --encrypt-password
+ expect -exact "Password: "
+ send -- "{}\r"
+ expect eof
+ '''.format(password))
+ result = subprocess.check_output(['expect', filename])
+ return str(result).split('\r\n')[-1][2:-3]
+
+
+def masterPSW(password):
+ with open(os.path.join(KEY_PATH, "settings-security.xml"), "w") as f:
+ f.write("\n {}\n"
+ .format(password))
+
+
+def serverPSW(username, password, gpgPassphrase):
+ with open(os.path.join(KEY_PATH, "settings.xml"), "w") as f:
+ settingsString = '''
+
+
+
+
+
+ apache.snapshots.https
+ {}
+ {}
+
+
+
+ apache.releases.https
+ {}
+ {}
+
+
+
+
+
+ gpg
+
+ gpg2
+ {}
+ true
+
+
+
+
+ gpg
+
+ '''.format(username, password, username, password, gpgPassphrase)
+ f.write(settingsString)
+
+
+if __name__ == "__main__":
+ if not os.path.exists(KEY_PATH):
+ os.makedirs(KEY_PATH)
+ credentials, gpgKey = getCredentials()
+ masterPass = encryptMasterPSW(credentials['masterpass'])
+ masterPSW(masterPass)
+ passwordEncrypted = encryptPSW(credentials['password'])
+ serverPSW(credentials['user'], passwordEncrypted,
+ credentials['gpgPassphrase'])
+ importASC(gpgKey, credentials['gpgPassphrase'])
diff --git a/scala-package/dev/compile-mxnet-backend.sh b/scala-package/dev/compile-mxnet-backend.sh
index b065e01afc8e..114bf0766444 100755
--- a/scala-package/dev/compile-mxnet-backend.sh
+++ b/scala-package/dev/compile-mxnet-backend.sh
@@ -33,7 +33,7 @@ MXNETDIR=$2
# below routine shamelessly copied from
# https://github.com/apache/incubator-mxnet/blob/master/setup-utils/install-mxnet-osx-python.sh
-# This routine executes a command,
+# This routine executes a command,
# prints error message on the console on non-zero exit codes and
# returns the exit code to the caller.
chkret() {
@@ -51,7 +51,10 @@ chkret() {
UNAME=`uname -s`
chkret pushd $MXNETDIR
-chkret git submodule update --init --recursive
+
+set +e
+git submodule update --init --recursive
+set -e
# don't want to overwrite an existing config file
cp make/config.mk ./config.mk
diff --git a/scala-package/dev/deploy.sh b/scala-package/dev/deploy.sh
new file mode 100755
index 000000000000..6f845ba5d78f
--- /dev/null
+++ b/scala-package/dev/deploy.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -ex
+
+# Setup Environment Variables
+# MAVEN_PUBLISH_OS_TYPE: linux-x86_64-cpu|linux-x86_64-gpu|osx-x86_64-cpu
+# export MAVEN_PUBLISH_OS_TYPE=linux-x86_64-cpu
+
+# Run python to configure keys
+python3 $PWD/scala-package/dev/buildkey.py
+
+# Updating cache
+mkdir -p ~/.gnupg
+echo "default-cache-ttl 14400" > ~/.gnupg/gpg-agent.conf
+echo "max-cache-ttl 14400" >> ~/.gnupg/gpg-agent.conf
+echo "allow-loopback-pinentry" >> ~/.gnupg/gpg-agent.conf
+echo "pinentry-mode loopback" >> ~/.gnupg/gpg-agent.conf
+export GPG_TTY=$(tty)
+
+cd scala-package
+VERSION=$(mvn -q -Dexec.executable="echo" -Dexec.args='${project.version}' --non-recursive exec:exec)
+cd ..
+
+# echo "\n\n$VERSION\n" | make scalarelease-dryrun
+make scaladeploy CI=1
+
+# Clear all password .xml files, exp files, and gpg key files
+rm -rf ~/.m2/*.xml ~/.m2/key.asc ~/.m2/*.exp
diff --git a/scala-package/dev/test.sh b/scala-package/dev/test.sh
new file mode 100755
index 000000000000..03810fbf8d55
--- /dev/null
+++ b/scala-package/dev/test.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -ex
+
+# Test
+cd scala-package/packageTest
+make scalaintegrationtestlocal CI=1
diff --git a/scala-package/packageTest/Makefile b/scala-package/packageTest/Makefile
index 6073ff8a722f..8c12c1d04189 100644
--- a/scala-package/packageTest/Makefile
+++ b/scala-package/packageTest/Makefile
@@ -43,6 +43,10 @@ else
endif
endif
+ifeq ($(CI), 1)
+ MAVEN_ARGS := -B
+endif
+
PROFILES := -Ptest
ifeq ($(UNIT), 1)
PROFILES := "$(PROFILES),unittest"
@@ -59,27 +63,27 @@ endif
clean:
- (mvn clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
+ (mvn $(MAVEN_ARGS) clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
-Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \
-Dmxnet.version=$(MXNET_VERSION) \
-Dscala.version=$(SCALA_VERSION))
testinstall:
- (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
+ (mvn $(MAVEN_ARGS) integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
$(PROFILES) \
-Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \
-Dmxnet.version=$(MXNET_VERSION) \
-Dscala.version=$(SCALA_VERSION))
testlocal:
- (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
+ (mvn $(MAVEN_ARGS) integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
$(PROFILES),fromLocal \
-Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \
-Dmxnet.version=$(MXNET_VERSION) \
-Dscala.version=$(SCALA_VERSION))
testsnapshot:
- (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
+ (mvn $(MAVEN_ARGS) integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \
$(PROFILES),fromSnapshots \
-Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \
-Dmxnet.repo=$(MXNET_REPO) \