Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Merge branch 'master' into cpp
Browse files Browse the repository at this point in the history
  • Loading branch information
piiswrong committed Apr 16, 2017
2 parents 96eb4f5 + 1722b08 commit 2e66d77
Show file tree
Hide file tree
Showing 27 changed files with 432 additions and 162 deletions.
12 changes: 6 additions & 6 deletions R-package/R/callback.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ mx.callback.log.train.metric <- function(period, logger=NULL) {
if (nbatch %% period == 0 && !is.null(env$metric)) {
result <- env$metric$get(env$train.metric)
if (nbatch != 0 & verbose)
cat(paste0("Batch [", nbatch, "] Train-", result$name, "=", result$value, "\n"))
message(paste0("Batch [", nbatch, "] Train-", result$name, "=", result$value))
if (!is.null(logger)) {
if (class(logger) != "mx.metric.logger") {
stop("Invalid mx.metric.logger.")
Expand All @@ -21,7 +21,7 @@ mx.callback.log.train.metric <- function(period, logger=NULL) {
if (!is.null(env$eval.metric)) {
result <- env$metric$get(env$eval.metric)
if (nbatch != 0 & verbose)
cat(paste0("Batch [", nbatch, "] Validation-", result$name, "=", result$value, "\n"))
message(paste0("Batch [", nbatch, "] Validation-", result$name, "=", result$value))
logger$eval <- c(logger$eval, result$value)
}
}
Expand Down Expand Up @@ -49,8 +49,8 @@ mx.callback.log.speedometer <- function(batch.size, frequency=50){
speed <- frequency*batch.size/time
result <- env$metric$get(env$train.metric)
if (nbatch != 0 & verbose)
cat(paste0("Batch [", nbatch, "] Speed: ", speed, " samples/sec Train-",
result$name, "=", result$value, "\n"))
message(paste0("Batch [", nbatch, "] Speed: ", speed, " samples/sec Train-",
result$name, "=", result$value))
env$tic = Sys.time()
}
} else {
Expand All @@ -69,7 +69,7 @@ mx.callback.save.checkpoint <- function(prefix, period=1) {
function(iteration, nbatch, env, verbose=TRUE) {
if (iteration %% period == 0) {
mx.model.save(env$model, prefix, iteration)
if(verbose) cat(sprintf("Model checkpoint saved to %s-%04d.params\n", prefix, iteration))
if(verbose) message(sprintf("Model checkpoint saved to %s-%04d.params\n", prefix, iteration))
}
return(TRUE)
}
Expand Down Expand Up @@ -139,7 +139,7 @@ mx.callback.early.stop <- function(train.metric = NULL, eval.metric = NULL, bad.

if (mx.best.iter == bad.steps) {
if (verbose) {
cat(paste0("Best score=", mx.best.score, ", iteration [", iteration - bad.steps, "] \n"))
message(paste0("Best score=", mx.best.score, ", iteration [", iteration - bad.steps, "]"))
}
return(FALSE)
} else {
Expand Down
16 changes: 8 additions & 8 deletions R-package/R/lr_scheduler.R
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ mx.lr_scheduler.FactorScheduler <- function(step, factor_val, stop_factor_lr=1e-
lr <- lr * factor_val
if(lr < stop_factor_lr){
lr <- stop_factor_lr
if(verbose) cat(paste0("Update[", num_update,
if(verbose) message(paste0("Update[", num_update,
"]: now learning rate arrived at ", lr,
"will not change in the future\n"))
"will not change in the future"))
} else{
if(verbose) cat(paste0("Update[", num_update,
"]: learning rate is changed to ", lr, "\n"))
if(verbose) message(paste0("Update[", num_update,
"]: learning rate is changed to ", lr))
}
optimizerEnv$lr <- lr
optimizerEnv$count <- count
Expand Down Expand Up @@ -62,12 +62,12 @@ mx.lr_scheduler.MultiFactorScheduler <- function(step, factor_val, stop_factor_l
lr <- lr * factor_val
if(lr < stop_factor_lr){
lr <- stop_factor_lr
if(verbose) cat(paste0("Update[", num_update,
if(verbose) message(paste0("Update[", num_update,
"]: now learning rate arrived at ", lr,
"will not change in the future\n"))
"will not change in the future"))
} else{
if(verbose) cat(paste0("Update[", num_update,
"]: learning rate is changed to ", lr, "\n"))
if(verbose) message(paste0("Update[", num_update,
"]: learning rate is changed to ", lr))

}
optimizerEnv$lr <- lr
Expand Down
8 changes: 4 additions & 4 deletions R-package/R/model.R
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ mx.model.create.kvstore <- function(kvstore, arg.params, ndevice, verbose=TRUE)
} else {
kvstore <- 'local_allreduce_cpu'
}
if(verbose) cat(paste0("Auto-select kvstore type = ", kvstore, "\n"))
if(verbose) message(paste0("Auto-select kvstore type = ", kvstore))
}
return(mx.kv.create(kvstore))
}
Expand All @@ -101,7 +101,7 @@ mx.model.train <- function(symbol, ctx, input.shape,
kvstore,
verbose=TRUE) {
ndevice <- length(ctx)
if(verbose) cat(paste0("Start training with ", ndevice, " devices\n"))
if(verbose) message(paste0("Start training with ", ndevice, " devices"))
# create the executors
sliceinfo <- mx.model.slice.shape(input.shape, ndevice)
train.execs <- lapply(1:ndevice, function(i) {
Expand Down Expand Up @@ -204,7 +204,7 @@ mx.model.train <- function(symbol, ctx, input.shape,
train.data$reset()
if (!is.null(metric)) {
result <- metric$get(train.metric)
if(verbose) cat(paste0("[", iteration, "] Train-", result$name, "=", result$value, "\n"))
if(verbose) message(paste0("[", iteration, "] Train-", result$name, "=", result$value))
}
if (!is.null(eval.data)) {
if (!is.null(metric)) {
Expand Down Expand Up @@ -238,7 +238,7 @@ mx.model.train <- function(symbol, ctx, input.shape,
eval.data$reset()
if (!is.null(metric)) {
result <- metric$get(eval.metric)
if(verbose) cat(paste0("[", iteration, "] Validation-", result$name, "=", result$value, "\n"))
if(verbose) message(paste0("[", iteration, "] Validation-", result$name, "=", result$value))
}
} else {
eval.metric <- NULL
Expand Down
14 changes: 7 additions & 7 deletions R-package/R/rnn_model.R
Original file line number Diff line number Diff line change
Expand Up @@ -174,18 +174,18 @@ train.rnn <- function (model, train.data, eval.data,

nbatch <- nbatch + seq.len
if ((epoch.counter %% log.period) == 0) {
cat(paste0("Epoch [", epoch.counter,
message(paste0("Epoch [", epoch.counter,
"] Train: NLL=", train.nll / nbatch,
", Perp=", exp(train.nll / nbatch), "\n"))
", Perp=", exp(train.nll / nbatch)))
}
}
train.data$reset()
# end of training loop
toc <- Sys.time()
cat(paste0("Iter [", iteration,
message(paste0("Iter [", iteration,
"] Train: Time: ", as.numeric(toc - tic, units="secs"),
" sec, NLL=", train.nll / nbatch,
", Perp=", exp(train.nll / nbatch), "\n"))
", Perp=", exp(train.nll / nbatch)))

if (!is.null(eval.data)) {
val.nll <- 0.0
Expand Down Expand Up @@ -216,9 +216,9 @@ train.rnn <- function (model, train.data, eval.data,
}
eval.data$reset()
perp <- exp(val.nll / nbatch)
cat(paste0("Iter [", iteration,
message(paste0("Iter [", iteration,
"] Val: NLL=", val.nll / nbatch,
", Perp=", exp(val.nll / nbatch), "\n"))
", Perp=", exp(val.nll / nbatch)))
}
}

Expand All @@ -241,4 +241,4 @@ check.data <- function(data, batch.size, is.train) {
if (!data$iter.next()) stop("Empty input")
}
return (data)
}
}
17 changes: 15 additions & 2 deletions R-package/R/zzz.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,22 @@ NULL
}

.onUnload <- function(libpath) {
print("Start unload")
message("Start unload")
mx.internal.notify.shutdown()
unloadModule("mxnet")
library.dynam.unload("mxnet", libpath)
library.dynam.unload("libmxnet", libpath)
message("MXNet shutdown")
}

.onAttach <- function(...) {
if (!interactive() || stats::runif(1) > 0.1) return()

tips <- c(
"Need help? Feel free to open an issue on https://github.com/dmlc/mxnet/issues",
"For more documents, please visit http://mxnet.io",
"Use suppressPackageStartupMessages() to eliminate package startup messages."
)

tip <- sample(tips, 1)
packageStartupMessage(paste(strwrap(tip), collapse = "\n"))
}
4 changes: 2 additions & 2 deletions R-package/src/mxnet.cc
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ void SetSeed(int seed) {
MX_CALL(MXRandomSeed(seed));
}

void NotifyShutdown(int seed) {
void NotifyShutdown() {
MX_CALL(MXNotifyShutdown());
}

Expand All @@ -35,7 +35,7 @@ void ProfilerSetState(int state) {
void InitRcppModule() {
using namespace Rcpp; // NOLINT(*)
function("mx.internal.set.seed", &SetSeed);
function("mx.internal.notify.shudown", &NotifyShutdown);
function("mx.internal.notify.shutdown", &NotifyShutdown);
function("mx.internal.profiler.config", &ProfilerSetConfig);
function("mx.internal.profiler.state", &ProfilerSetState);
}
Expand Down
1 change: 0 additions & 1 deletion R-package/src/symbol.cc
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,6 @@ SEXP SymbolFunction::operator() (SEXP* args) {
}

void Symbol::InitRcppModule() {
RLOG_INFO << "Init Rcpp";
using namespace Rcpp; // NOLINT(*)
class_<Symbol>("MXSymbol")
.method("debug.str", &Symbol::DebugStr,
Expand Down
2 changes: 1 addition & 1 deletion docs/tutorials/r/fiveMinutesNeuralNetwork.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ Note that `mx.set.seed` controls the random process in `mxnet`. You can see the
To get an idea of what is happening, view the computation graph from R:
```{r}
graph.viz(model$symbol$as.json())
graph.viz(model$symbol)
```
[<img src="https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/knitr/graph.computation.png">](https://github.com/dmlc/mxnet)
Expand Down
2 changes: 1 addition & 1 deletion docs/zh/api/r/fiveMinutesNeuralNetwork.md
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ model <- mx.mlp(train.x, train.y, hidden_node=10, out_node=2, out_activation="so
想要知道具体发生了什么,我们可以在R中轻松看到计算图:

```{r}
graph.viz(model$symbol$as.json())
graph.viz(model$symbol)
```

[![](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/knitr/graph.computation.png)](https://github.com/dmlc/mxnet)
Expand Down
1 change: 1 addition & 0 deletions example/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ If you want to contribute to this list and the examples, please open a new pull
* [Multi task tutorial](https://github.com/haria/mxnet-multi-task-example/blob/master/multi-task.ipynb) - A demo of how to train and predict multi-task network on both MNIST and your own dataset.
* [class active maps](https://github.com/dmlc/mxnet-notebooks/blob/master/python/moved-from-mxnet/class_active_maps.ipynb) - A demo of how to localize the discriminative regions in an image using global average pooling (GAP) in CNNs.
* [DMLC MXNet Notebooks](https://github.com/dmlc/mxnet-notebooks) DMLC's repo for various notebooks ranging from basic usages of MXNet to state-of-the-art deep learning applications.
* [AWS Seoul Summit 2017 Demos](https://github.com/sxjscience/aws-summit-2017-seoul) The demo codes and ipython notebooks in AWS Seoul Summit 2017.

### <a name="mobile-apps-examples"></a>Mobile App Examples
-------------------
Expand Down
32 changes: 30 additions & 2 deletions example/image-classification/train_mnist.R
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@ parse_args <- function() {
help='the batch size')
parser$add_argument('--lr', type='double', default=.05,
help='the initial learning rate')
parser$add_argument('--mom', type='double', default=.9,
help='momentum for sgd')
parser$add_argument('--model-prefix', type='character',
help='the prefix of the model to load/save')
parser$add_argument('--num-round', type='integer', default=10,
Expand All @@ -113,6 +115,32 @@ if (args$network == 'mlp') {
data_shape <- c(28, 28, 1)
net <- get_lenet()
}

# train
source("train_model.R")
train_model.fit(args, net, get_iterator(data_shape))
data_loader <- get_iterator(data_shape)
data <- data_loader(args)
train <- data$train
val <- data$value

if (is.null(args$gpus)) {
devs <- mx.cpu()
} else {
devs <- lapply(unlist(strsplit(args$gpus, ",")), function(i) {
mx.gpu(as.integer(i))
})
}

mx.set.seed(0)

model <- mx.model.FeedForward.create(
X = train,
eval.data = val,
ctx = devs,
symbol = net,
num.round = args$num_round,
array.batch.size = args$batch_size,
learning.rate = args$lr,
momentum = args$mom,
eval.metric = mx.metric.accuracy,
initializer = mx.init.uniform(0.07),
batch.end.callback = mx.callback.log.train.metric(100))
4 changes: 2 additions & 2 deletions example/image-classification/train_model.R
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ train_model.fit <- function(args, network, data_loader) {
}

# data
data <- data_loader
data <- data_loader(args)
train <- data$train
val <- data$value
val <- data$value

# devices
if (is.null(args$gpus)) {
Expand Down
Loading

0 comments on commit 2e66d77

Please sign in to comment.