Skip to content

Commit

Permalink
Merge pull request apache#10 from precedenceguo/simple
Browse files Browse the repository at this point in the history
based on dmlc/mxnet:20161011
  • Loading branch information
winstywang committed Nov 17, 2016
2 parents 2f3ada1 + 426f1e4 commit af4012b
Show file tree
Hide file tree
Showing 333 changed files with 15,687 additions and 2,986 deletions.
9 changes: 9 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -115,3 +115,12 @@ scala-package/*/*/target/
.project
.cproject
.pydevproject
CMakeFiles
cmake_install.cmake
dmlc-core
ps-lite
nnvm
lib

# Visual Studio Code
.vscode
54 changes: 50 additions & 4 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,15 @@ else(MSVC)
include(CheckCXXCompilerFlag)
check_cxx_compiler_flag("-std=c++11" SUPPORT_CXX11)
check_cxx_compiler_flag("-msse2" SUPPORT_MSSE2)
set(CMAKE_C_FLAGS "-O3 -Wall -msse2 -Wno-unknown-pragmas -fPIC")
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
set(CMAKE_C_FLAGS "-Wall -msse2 -Wno-unknown-pragmas -fPIC")
if(NDEBUG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3")
else(NDEBUG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O0 -ggdb3")
endif(NDEBUG)
if(SUPPORT_CXX11)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
endif()
endif(MSVC)

if(USE_OPENCV)
Expand Down Expand Up @@ -72,7 +79,7 @@ if(USE_CUDNN)
add_definitions(-DUSE_CUDNN)
include_directories(SYSTEM ${CUDNN_INCLUDE})
list(APPEND mxnet_LINKER_LIBS ${CUDNN_LIBRARY})
add_definitions(-DMSHADOW_USE_CUDNN=1)
add_definitions(-DMSHADOW_USE_CUDNN=1)
endif()
endif()

Expand Down Expand Up @@ -135,6 +142,37 @@ if(USE_PLUGINS_WARPCTC)
list(APPEND CUDA ${PLUGINS_CUSRC})
endif()

if(USE_PLUGIN_CAFFE)
if(NOT DEFINED CAFFE_PATH)
if(EXISTS ${PROJECT_SOURCE_DIR}/caffe)
# Need newer FindCUDA.cmake that correctly handles -std=c++11
cmake_minimum_required(VERSION 3.3)
set(CAFFE_PATH ${PROJECT_SOURCE_DIR}/caffe)
endif()
endif()
list(APPEND CMAKE_MODULE_PATH ${CAFFE_PATH}/cmake)
include_directories(${CAFFE_PATH}/include)
include_directories(${CAFFE_PATH}/build/src)
include_directories(${CMAKE_BINARY_DIR}/caffe/include)
link_directories(${CAFFE_PATH}/build/lib)
if(NOT DEFINED CAFFE_PATH)
message(FATAL_ERROR "Please set CAFFE_PATH to point to the caffe source installation")
endif()
mxnet_source_group("Include\\plugin\\caffe" GLOB "plugin/caffe/*.h")
mxnet_source_group("Source\\plugin\\caffe" GLOB "plugin/caffe/*.cc")
mxnet_source_group("Cuda\\plugin\\caffe" GLOB "plugin/caffe/*.cu")
FILE(GLOB_RECURSE PLUGINS_SOURCE "plugin/caffe/*.cc" "plugin/caffe/*.h")
FILE(GLOB_RECURSE PLUGINS_CUSRC "plugin/caffe/*.cu")
list(APPEND SOURCE ${PLUGINS_SOURCE})
list(APPEND CUDA ${PLUGINS_CUSRC})
include_directories(${CMAKE_BINARY_DIR}/include)
list(APPEND mxnet_LINKER_LIBS
protobuf boost_system boost_thread boost_filesystem
gflags glog caffe
${Caffe_LINKER_LIBS}
)
endif()

if (NOT (EXTRA_OPERATORS STREQUAL ""))
mxnet_source_group("Extra" GLOB_RECURSE "${EXTRA_OPERATORS}/*.cc")
mxnet_source_group("Extra\\Cuda" GLOB_RECURSE "${EXTRA_OPERATORS}/*.cu")
Expand Down Expand Up @@ -163,15 +201,23 @@ if(USE_CUDA)
list(APPEND mxnet_LINKER_LIBS ${CUDA_cuda_LIBRARY})
else(MSVC)
list(APPEND mxnet_LINKER_LIBS nvrtc cuda)
link_directories("${CUDA_TOOLKIT_ROOT_DIR}/lib64")
endif()
list(APPEND SOURCE ${cuda_objs} ${CUDA})
endif()

# unsupported: if caffe is a subdirectory of mxnet, load its CMakeLists.txt as well
if(USE_PLUGIN_CAFFE)
if(EXISTS ${PROJECT_SOURCE_DIR}/caffe)
add_subdirectory(caffe)
endif()
endif()

if(NOT MSVC)
# Only add c++11 flags and definitions after cuda compiling
add_definitions(-DDMLC_USE_CXX11)
add_definitions(-DMSHADOW_IN_CXX11)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c++0x")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHsc")
Expand Down
3 changes: 2 additions & 1 deletion CONTRIBUTORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ The committers are the granted write access to the project.
- Chuntao is the initiator and provides the initial design of engine.
* [Chiyuan Zhang](https://github.com/pluskid)
- Chiyuan is the creator of MXNet Julia Package.
* [Junyuan Xie](https://github.com/piiswrong)
* [Qiang Kou](https://github.com/thirdwing)
- KK is a R ninja, he makes mxnet available for R users.
* [Tong He](https://github.com/hetong007)
Expand Down Expand Up @@ -64,7 +65,6 @@ List of Contributors
* [Shuzhe Wu](https://github.com/II-Matto)
* [Xiaodong](https://github.com/XD-DENG)
* [Nan Xiao](https://github.com/road2stat)
* [Junyuan Xie](https://github.com/piiswrong)
* [Wei Wu](https://github.com/tornadomeet)
* [Michaël Benesty](https://github.com/pommedeterresautee)
-Michaël contributes the R visualization module of mxnet
Expand Down Expand Up @@ -110,3 +110,4 @@ List of Contributors
* [Yang Bo](https://github.com/Atry)
* [Jonas Amaro](https://github.com/jonasrla)
* [Yan Li](https://github.com/Godricly)
* [Yuance Li](https://github.com/liyuance)
43 changes: 23 additions & 20 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,10 @@ CUOBJ = $(patsubst %.cu, build/%_gpu.o, $(CUSRC))

# extra operators
ifneq ($(EXTRA_OPERATORS),)
EXTRA_SRC = $(wildcard $(EXTRA_OPERATORS)/*.cc $(EXTRA_OPERATORS)/*/*.cc)
EXTRA_OBJ = $(patsubst $(EXTRA_OPERATORS)/%.cc, $(EXTRA_OPERATORS)/build/%.o, $(EXTRA_SRC))
EXTRA_CUSRC = $(wildcard $(EXTRA_OPERATORS)/*.cu $(EXTRA_OPERATORS)/*/*.cu)
EXTRA_CUOBJ = $(patsubst $(EXTRA_OPERATORS)/%.cu, $(EXTRA_OPERATORS)/build/%_gpu.o, $(EXTRA_CUSRC))
EXTRA_SRC = $(wildcard $(patsubst %, %/*.cc %/*/*.cc, $(EXTRA_OPERATORS)))
EXTRA_OBJ = $(patsubst %.cc, %.o, $(EXTRA_SRC))
EXTRA_CUSRC = $(wildcard $(patsubst %, %/*.cu %/*/*.cu, $(EXTRA_OPERATORS)))
EXTRA_CUOBJ = $(patsubst %.cu, %_gpu.o, $(EXTRA_CUSRC))
else
EXTRA_SRC =
EXTRA_OBJ =
Expand Down Expand Up @@ -157,28 +157,28 @@ build/src/%_gpu.o: src/%.cu
$(NVCC) $(NVCCFLAGS) -Xcompiler "$(CFLAGS)" -M -MT build/src/$*_gpu.o $< >build/src/$*_gpu.d
$(NVCC) -c -o $@ $(NVCCFLAGS) -Xcompiler "$(CFLAGS)" $<

build/plugin/%.o: plugin/%.cc
@mkdir -p $(@D)
$(CXX) -std=c++11 $(CFLAGS) -MM -MT build/plugin/$*.o $< >build/plugin/$*.d
$(CXX) -std=c++11 -c $(CFLAGS) -c $< -o $@

# A nvcc bug cause it to generate "generic/xxx.h" dependencies from torch headers.
# Use CXX to generate dependency instead.
build/plugin/%_gpu.o: plugin/%.cu
@mkdir -p $(@D)
$(CXX) -std=c++11 $(CFLAGS) -MM -MT build/plugin/$*_gpu.o $< >build/plugin/$*_gpu.d
$(NVCC) -c -o $@ $(NVCCFLAGS) -Xcompiler "$(CFLAGS)" $<

$(EXTRA_OPERATORS)/build/%.o: $(EXTRA_OPERATORS)/%.cc
build/plugin/%.o: plugin/%.cc
@mkdir -p $(@D)
$(CXX) -std=c++11 $(CFLAGS) -Isrc/operator -MM -MT $(EXTRA_OPERATORS)/build/$*.o $< >$(EXTRA_OPERATORS)/build/$*.d
$(CXX) -std=c++11 -c $(CFLAGS) -Isrc/operator -c $< -o $@
$(CXX) -std=c++11 $(CFLAGS) -MM -MT build/plugin/$*.o $< >build/plugin/$*.d
$(CXX) -std=c++11 -c $(CFLAGS) -c $< -o $@

$(EXTRA_OPERATORS)/build/%_gpu.o: $(EXTRA_OPERATORS)/%.cu
%_gpu.o: %.cu
@mkdir -p $(@D)
$(NVCC) $(NVCCFLAGS) -Xcompiler "$(CFLAGS) -Isrc/operator" -M -MT $(EXTRA_OPERATORS)/build/$*_gpu.o $< >$(EXTRA_OPERATORS)/build/$*_gpu.d
$(NVCC) $(NVCCFLAGS) -Xcompiler "$(CFLAGS) -Isrc/operator" -M -MT $*_gpu.o $< >$*_gpu.d
$(NVCC) -c -o $@ $(NVCCFLAGS) -Xcompiler "$(CFLAGS) -Isrc/operator" $<

%.o: %.cc
@mkdir -p $(@D)
$(CXX) -std=c++11 $(CFLAGS) -Isrc/operator -MM -MT $*.o $< >$*.d
$(CXX) -std=c++11 -c $(CFLAGS) -Isrc/operator -c $< -o $@

# NOTE: to statically link libmxnet.a we need the option
# --Wl,--whole-archive -lmxnet --Wl,--no-whole-archive
lib/libmxnet.a: $(ALL_DEP)
Expand All @@ -189,12 +189,15 @@ lib/libmxnet.so: $(ALL_DEP)
@mkdir -p $(@D)
$(CXX) $(CFLAGS) -shared -o $@ $(filter %.o %.a, $^) $(LDFLAGS)

$(PS_PATH)/build/libps.a:
$(PS_PATH)/build/libps.a: PSLITE

PSLITE:
$(MAKE) CXX=$(CXX) DEPS_PATH=$(DEPS_PATH) -C $(PS_PATH) ps
ln -fs $(PS_PATH)/tracker .

$(DMLC_CORE)/libdmlc.a:
+ cd $(DMLC_CORE); make libdmlc.a config=$(ROOTDIR)/$(config); cd $(ROOTDIR)
$(DMLC_CORE)/libdmlc.a: DMLCCORE

DMLCCORE:
+ cd $(DMLC_CORE); make libdmlc.a USE_SSE=$(USE_SSE) config=$(ROOTDIR)/$(config); cd $(ROOTDIR)

bin/im2rec: tools/im2rec.cc $(ALL_DEP)

Expand Down Expand Up @@ -265,7 +268,7 @@ clean:
$(RM) -r build lib bin *~ */*~ */*/*~ */*/*/*~
cd $(DMLC_CORE); make clean; cd -
cd $(PS_PATH); make clean; cd -
$(RM) -r $(EXTRA_OPERATORS)/build
$(RM) -r $(patsubst %, %/*.d %/*/*.d %/*.o %/*/*.o, $(EXTRA_OPERATORS))
else
clean:
$(RM) -r build lib bin *~ */*~ */*/*~ */*/*/*~
Expand All @@ -279,5 +282,5 @@ clean_all: clean
-include build/*/*.d
-include build/*/*/*.d
ifneq ($(EXTRA_OPERATORS),)
-include $(EXTRA_OPERATORS)/build/*.d
-include $(patsubst %, %/*.d %/*/*.d, $(EXTRA_OPERATORS))
endif
6 changes: 3 additions & 3 deletions R-package/R/context.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Initialize the global context
init.context.default <- function() {
.GlobalEnv$mx.ctx.internal.default.value <- mx.cpu()
assign("mx.ctx.internal.default.value", mx.cpu(), envir = .MXNetEnv)
}

#' Set/Get default context for array creation.
Expand All @@ -11,9 +11,9 @@ init.context.default <- function() {
#' @export
mx.ctx.default <- function(new = NULL) {
if (!is.null(new)) {
mx.ctx.internal.default.value <<- new
assign("mx.ctx.internal.default.value", new, envir = .MXNetEnv)
}
return (mx.ctx.internal.default.value)
return (.MXNetEnv$mx.ctx.internal.default.value)
}

#' Check if the type is mxnet context.
Expand Down
6 changes: 4 additions & 2 deletions R-package/R/model.R
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,8 @@ mx.model.select.layout.predict <- function(X, model) {
#' This is only used when X is R array.
#' @param ctx mx.context or list of mx.context, optional
#' The devices used to perform training.
#' @param begin.round integer (default=1)
#' The initial iteration over the training data to train the model.
#' @param num.round integer (default=10)
#' The number of iterations over training data to train the model.
#' @param optimizer string, default="sgd"
Expand Down Expand Up @@ -387,7 +389,7 @@ mx.model.select.layout.predict <- function(X, model) {
#' @export

mx.model.FeedForward.create <-
function(symbol, X, y=NULL, ctx=NULL,
function(symbol, X, y=NULL, ctx=NULL, begin.round=1,
num.round=10, optimizer="sgd",
initializer=mx.init.uniform(0.01),
eval.data=NULL, eval.metric=NULL,
Expand Down Expand Up @@ -444,7 +446,7 @@ function(symbol, X, y=NULL, ctx=NULL,
kvstore <- mx.model.create.kvstore(kvstore, params$arg.params, length(ctx), verbose=verbose)
model <- mx.model.train(symbol, ctx, input.shape,
params$arg.params, params$aux.params,
1, num.round, optimizer=optimizer,
begin.round, num.round, optimizer=optimizer,
train.data=X, eval.data=eval.data,
metric=eval.metric,
epoch.end.callback=epoch.end.callback,
Expand Down
Loading

0 comments on commit af4012b

Please sign in to comment.