diff --git a/docs/python_docs/python/tutorials/deploy/run-on-aws/use_sagemaker.rst b/docs/python_docs/python/tutorials/deploy/run-on-aws/use_sagemaker.rst index d627befbd8fd..dc8052b10cdd 100644 --- a/docs/python_docs/python/tutorials/deploy/run-on-aws/use_sagemaker.rst +++ b/docs/python_docs/python/tutorials/deploy/run-on-aws/use_sagemaker.rst @@ -18,7 +18,7 @@ Run on Amazon SageMaker ----------------------- -This chapter will give a high level overview about Amazon SageMaker, +This chapter will give a high level overview about running MXNet on Amazon SageMaker, in-depth tutorials can be found on the `Sagemaker website `__. @@ -29,16 +29,7 @@ charged by time. Within this notebook you can `fetch, explore and prepare training data `__. -:: - - import mxnet as mx - import sagemaker - mx.test_utils.get_cifar10() # Downloads Cifar-10 dataset to ./data - sagemaker_session = sagemaker.Session() - inputs = sagemaker_session.upload_data(path='data/cifar', - key_prefix='data/cifar10') - -Once the data is ready, you can easily launch training via the SageMaker +With your own data on the notebook instance, you can easily launch training via the SageMaker SDK. So there is no need to manually configure and log into EC2 instances. You can either bring your own model or use SageMaker's `built-in @@ -51,11 +42,11 @@ instance: :: from sagemaker.mxnet import MXNet as MXNetEstimator - estimator = MXNetEstimator(entry_point='train.py', + estimator = MXNetEstimator(entry_point='train.py', role=sagemaker.get_execution_role(), - train_instance_count=1, + train_instance_count=1, train_instance_type='local', - hyperparameters={'batch_size': 1024, + hyperparameters={'batch_size': 1024, 'epochs': 30}) estimator.fit(inputs) diff --git a/docs/static_site/src/pages/api/faq/cloud.md b/docs/static_site/src/pages/api/faq/cloud.md index 2a5837b017eb..dd1643cb54a1 100644 --- a/docs/static_site/src/pages/api/faq/cloud.md +++ b/docs/static_site/src/pages/api/faq/cloud.md @@ -37,40 +37,16 @@ and maintain the resources for precisely the amount of time needed. In this document, we provide a step-by-step guide that will teach you how to set up an AWS cluster with _MXNet_. We show how to: -- [Use Amazon S3 to host data](#use-amazon-s3-to-host-data) -- [Set up an EC2 GPU instance with all dependencies installed](#set-up-an-ec2-gpu-instance) +- [Use Pre-installed EC2 GPU Instance](#use-pre-installed-ec2-gpu-instance) - [Build and run MXNet on a single computer](#build-and-run-mxnet-on-a-gpu-instance) - [Set up an EC2 GPU cluster for distributed training](#set-up-an-ec2-gpu-cluster-for-distributed-training) -### Use Amazon S3 to Host Data - -Amazon S3 provides distributed data storage which proves especially convenient for hosting large datasets. -To use S3, you need [AWS credentials](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSGettingStartedGuide/AWSCredentials.html), -including an `ACCESS_KEY_ID` and a `SECRET_ACCESS_KEY`. - -To use _MXNet_ with S3, set the environment variables `AWS_ACCESS_KEY_ID` and -`AWS_SECRET_ACCESS_KEY` by adding the following two lines in -`~/.bashrc` (replacing the strings with the correct ones): - -```bash -export AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE -export AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY -``` - -There are several ways to upload data to S3. One simple way is to use -[s3cmd](https://s3tools.org/s3cmd). For example: - -```bash -wget http://data.mxnet.io/mxnet/data/mnist.zip -unzip mnist.zip && s3cmd put t*-ubyte s3://dmlc/mnist/ -``` - ### Use Pre-installed EC2 GPU Instance The [Deep Learning AMIs](https://aws.amazon.com/marketplace/search/results?x=0&y=0&searchTerms=Deep+Learning+AMI) are a series of images supported and maintained by Amazon Web Services for use on Amazon Elastic Compute Cloud (Amazon EC2) and contain the latest MXNet release. -Now you can launch _MXNet_ directly on an EC2 GPU instance. +Now you can launch _MXNet_ directly on an EC2 GPU instance. You can also use [Jupyter](https://jupyter.org) notebook on EC2 machine. Here is a [good tutorial](https://github.com/dmlc/mxnet-notebooks) on how to connect to a Jupyter notebook running on an EC2 instance. @@ -81,7 +57,7 @@ on how to connect to a Jupyter notebook running on an EC2 instance. provide a foundational image with NVIDIA CUDA, cuDNN, GPU drivers, Intel MKL-DNN, Docker and Nvidia-Docker, etc. for deploying your own custom deep learning environment. You may follow the [MXNet Build From Source -instructions](>> get_zip_data("data_dir", - "http://files.grouplens.org/datasets/movielens/ml-10m.zip", - "ml-10m.zip") - """ - data_origin_name = os.path.join(data_dir, data_origin_name) - if not os.path.exists(data_origin_name): - download(url, dirname=data_dir, overwrite=False) - zip_file = zipfile.ZipFile(data_origin_name) - zip_file.extractall(path=data_dir) - def get_bz2_data(data_dir, data_name, url, data_origin_name): """Download and extract bz2 data. diff --git a/tests/nightly/download.sh b/tests/nightly/download.sh deleted file mode 100644 index d07fc6f4ab8a..000000000000 --- a/tests/nightly/download.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -dmlc_download() { - url=http://data.mxnet.io/mxnet/datasets/ - dir=$1 - file=$2 - if [ ! -e data/${dir}/$file ]; then - wget ${url}/${dir}/${file} -P data/${dir}/ || exit -1 - else - echo "data/${dir}/$file already exits" - fi -} - -dmlc_download mnist t10k-images-idx3-ubyte -dmlc_download mnist t10k-labels-idx1-ubyte -dmlc_download mnist train-images-idx3-ubyte -dmlc_download mnist train-labels-idx1-ubyte - -dmlc_download cifar10 train.rec -dmlc_download cifar10 test.rec diff --git a/tests/python/gpu/test_gluon_model_zoo_gpu.py b/tests/python/gpu/test_gluon_model_zoo_gpu.py index 1d0d3f4b2313..bad65489d9a1 100644 --- a/tests/python/gpu/test_gluon_model_zoo_gpu.py +++ b/tests/python/gpu/test_gluon_model_zoo_gpu.py @@ -36,7 +36,7 @@ def eprint(*args, **kwargs): VAL_DATA='data/val-5k-256.rec' def download_data(): return mx.test_utils.download( - 'http://data.mxnet.io/data/val-5k-256.rec', VAL_DATA) + 'https://repo.mxnet.io/gluon/dataset/test/val-5k-256-9e70d85e0.rec', VAL_DATA) @with_seed() @pytest.mark.serial diff --git a/tests/python/unittest/test_contrib_gluon_data_vision.py b/tests/python/unittest/test_contrib_gluon_data_vision.py index 166b07f843d7..fee9177969a6 100644 --- a/tests/python/unittest/test_contrib_gluon_data_vision.py +++ b/tests/python/unittest/test_contrib_gluon_data_vision.py @@ -51,7 +51,7 @@ def _generate_objects(): class TestImage(unittest.TestCase): - IMAGES_URL = "http://data.mxnet.io/data/test_images.tar.gz" + IMAGES_URL = "https://repo.mxnet.io/gluon/dataset/test/test_images-9cebe48a.tar.gz" def setUp(self): self.IMAGES_DIR = tempfile.mkdtemp() @@ -146,3 +146,4 @@ def test_bbox_augmenters(self): max_attempts=50) for batch in det_iter: pass + mx.nd.waitall() diff --git a/tests/python/unittest/test_gluon_data.py b/tests/python/unittest/test_gluon_data.py index b68d03f697fc..abdf558c2b1c 100644 --- a/tests/python/unittest/test_gluon_data.py +++ b/tests/python/unittest/test_gluon_data.py @@ -52,7 +52,7 @@ def test_array_dataset(): def prepare_record(tmpdir_factory): test_images = tmpdir_factory.mktemp("test_images") test_images_tar = test_images.join("test_images.tar.gz") - gluon.utils.download("http://data.mxnet.io/data/test_images.tar.gz", str(test_images_tar)) + gluon.utils.download("https://repo.mxnet.io/gluon/dataset/test/test_images-9cebe48a.tar.gz", str(test_images_tar)) tarfile.open(test_images_tar).extractall(str(test_images)) imgs = os.listdir(str(test_images.join("test_images"))) record = mx.recordio.MXIndexedRecordIO(str(test_images.join("test.idx")), str(test_images.join("test.rec")), 'w') diff --git a/tests/python/unittest/test_image.py b/tests/python/unittest/test_image.py index 9cb287a50b11..d638946ea10a 100644 --- a/tests/python/unittest/test_image.py +++ b/tests/python/unittest/test_image.py @@ -110,7 +110,7 @@ def _test_imageiter_last_batch(imageiter_list, assert_data_shape): class TestImage(unittest.TestCase): - IMAGES_URL = "http://data.mxnet.io/data/test_images.tar.gz" + IMAGES_URL = "https://repo.mxnet.io/gluon/dataset/test/test_images-9cebe48a.tar.gz" def setUp(self): self.IMAGES_DIR = tempfile.mkdtemp() diff --git a/tests/python/unittest/test_numpy_contrib_gluon_data_vision.py b/tests/python/unittest/test_numpy_contrib_gluon_data_vision.py index 8c3e76aac91c..6713965fd093 100644 --- a/tests/python/unittest/test_numpy_contrib_gluon_data_vision.py +++ b/tests/python/unittest/test_numpy_contrib_gluon_data_vision.py @@ -51,7 +51,7 @@ def _generate_objects(): class TestImage(unittest.TestCase): - IMAGES_URL = "http://data.mxnet.io/data/test_images.tar.gz" + IMAGES_URL = "https://repo.mxnet.io/gluon/dataset/test/test_images-9cebe48a.tar.gz" def setUp(self): self.IMAGES_DIR = tempfile.mkdtemp()