From 740996baa650431b75e38865902e1119c28e2c1f Mon Sep 17 00:00:00 2001 From: kjw0612 Date: Fri, 9 Oct 2015 05:28:46 +0900 Subject: [PATCH 1/4] Deeply Supervised RCNN --- rcn_dag.m | 18 +++++++++--------- rcn_init_dag.m | 17 ++++++++++++++++- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/rcn_dag.m b/rcn_dag.m index 53b9fbb..37d0970 100644 --- a/rcn_dag.m +++ b/rcn_dag.m @@ -27,9 +27,9 @@ opts.gpus = 2; opts.resid = 1; opts.recursive = 1; -opts.dropout = 1; +opts.dropout = 0; opts.depth = 10; % 10 optimal5 -opts.filterSize = 512; +opts.filterSize = 64; opts.pad = 0; opts.useBnorm = false; exp_name = 'exp'; @@ -47,7 +47,7 @@ exp_name = strcat(exp_name, '_N', num2str(problem.v)); end end -exp_name = sprintf('%s_resid%d_depth%d', exp_name, opts.resid, opts.depth); +exp_name = sprintf('multi_obj_%s_resid%d_depth%d', exp_name, opts.resid, opts.depth); opts.expDir = fullfile('data','exp',exp_name); opts.dataDir = fullfile('data', '91'); opts.imdbPath = fullfile(opts.expDir, 'imdb.mat'); @@ -57,7 +57,7 @@ if opts.dropout, rep = rep * 5; end opts.train.learningRate = [0.1*ones(1,rep) 0.01*ones(1,rep) 0.001*ones(1,rep) 0.0001*ones(1,rep)];%*0.99 .^ (0:500); opts.train.numEpochs = numel(opts.train.learningRate); -opts.train.continue = 1; +opts.train.continue = 0; opts.train.gradRange = 1e-4; opts.train.sync = true; opts.train.expDir = opts.expDir; @@ -73,15 +73,15 @@ % Prepare data % -------------------------------------------------------------------- -if exist(opts.imdbPath, 'file') +if exist(opts.imdbPath, 'file') & 0 imdb = load(opts.imdbPath) ; else imdb = getRcnImdb(opts.dataDir, opts.problems, opts.depth, opts.pad, opts.resid); mkdir(opts.expDir) ; - save(opts.imdbPath, '-struct', 'imdb') ; + %save(opts.imdbPath, '-struct', 'imdb') ; end -net = rcn_init_dag(opts); +[net, opts.train.derOutputs] = rcn_init_dag(opts); net.initParams(); %net = dagnn.DagNN.fromSimpleNN(net) ; %net.addLayer('error', dagnn.Loss('loss', 'classerror'), ... @@ -108,8 +108,8 @@ function imdb = getRcnImdb(dataDir, problems, depth, pad, diff) f_lst = dir(fullfile(dataDir, '*.*')); -ps = (2*depth+1); % patch size -stride = ps;%ps - 2*pad; +ps = 2*depth+1; % patch size +stride = ps;%31;%ps - 2*pad; nPatches = 0; for f_iter = 1:numel(f_lst) diff --git a/rcn_init_dag.m b/rcn_init_dag.m index 25d6819..72223c0 100644 --- a/rcn_init_dag.m +++ b/rcn_init_dag.m @@ -1,4 +1,4 @@ -function net = rcn_init_dag(opts) +function [net, derOutputs] = rcn_init_dag(opts) % define net net = dagnn.DagNN() ; @@ -28,6 +28,15 @@ net.addLayer(['bnorm',num2str(i)], dagnn.BatchNorm(), {['x',num2str(x)]}, {['x',num2str(x+1)]}, {}) ; x = x + 1; end + + if i < opts.depth - 1 + init = [0.001, 0.5]; + if opts.resid, init(2)=0; end + convBlock = dagnn.Conv('size', [3,3,opts.filterSize,1], 'hasBias', true, 'init', init, 'pad', 1); + net.addLayer(sprintf('conv_out%d',i), convBlock, {sprintf('x%d',x)}, {sprintf('prediction%d',i)}, {['filters',num2str(opts.depth)], ['biases',num2str(opts.depth)]}); + net.addLayer(sprintf('objective%d',i), dagnn.EuclidLoss(), ... + {sprintf('prediction%d',i),'label'}, sprintf('objective%d',i)) ; + end end init = [0.001, 0.5]; if opts.resid, init(2)=0; end @@ -36,3 +45,9 @@ net.addLayer('objective', dagnn.EuclidLoss(), ... {'prediction','label'}, 'objective') ; + +derOutputs = {'objective', 1}; +for i=2:opts.depth-2 + derOutputs{end+1}=sprintf('objective%d',i); + derOutputs{end+1}=i/100; +end \ No newline at end of file From fc0368d6ee9c8a33d1e690dabb70da6442376116 Mon Sep 17 00:00:00 2001 From: kjw0612 Date: Sun, 11 Oct 2015 04:48:43 +0900 Subject: [PATCH 2/4] Switch to test mode when eval --- rcn_train_dag.m | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rcn_train_dag.m b/rcn_train_dag.m index a287330..5d2156a 100644 --- a/rcn_train_dag.m +++ b/rcn_train_dag.m @@ -110,7 +110,10 @@ end if numel(opts.gpus)>0, net.move('gpu'); end + backupmode = net.mode; + net.mode = 'test'; [baseline_psnr, stats.test(epoch)] = evalTest(epoch, opts, net); + net.mode = backupmode; net.reset(); if numel(opts.gpus)>0, net.move('cpu'); end From 28a860871ec3d35304cc9e9130006a5a17af4578 Mon Sep 17 00:00:00 2001 From: kjw0612 Date: Sun, 11 Oct 2015 04:50:50 +0900 Subject: [PATCH 3/4] Enlarge capacity when dropout on --- rcn_dag.m | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rcn_dag.m b/rcn_dag.m index 37d0970..c3b66d5 100644 --- a/rcn_dag.m +++ b/rcn_dag.m @@ -27,9 +27,10 @@ opts.gpus = 2; opts.resid = 1; opts.recursive = 1; -opts.dropout = 0; +opts.dropout = 1; opts.depth = 10; % 10 optimal5 opts.filterSize = 64; +if opts.dropout, opts.filterSize = opts.filterSize * 8; end opts.pad = 0; opts.useBnorm = false; exp_name = 'exp'; From 10f7339d3a93dda1f4c8c8a7028df75f95d90ffb Mon Sep 17 00:00:00 2001 From: kjw0612 Date: Sun, 11 Oct 2015 20:35:16 +0900 Subject: [PATCH 4/4] [code] Add RCN Experiment Framework --- rcn_dag.m | 2 +- rcn_experiments.m | 47 +++++++++++++++++++++-------------------------- 2 files changed, 22 insertions(+), 27 deletions(-) diff --git a/rcn_dag.m b/rcn_dag.m index c3b66d5..34881bf 100644 --- a/rcn_dag.m +++ b/rcn_dag.m @@ -27,7 +27,7 @@ opts.gpus = 2; opts.resid = 1; opts.recursive = 1; -opts.dropout = 1; +opts.dropout = 0; opts.depth = 10; % 10 optimal5 opts.filterSize = 64; if opts.dropout, opts.filterSize = opts.filterSize * 8; end diff --git a/rcn_experiments.m b/rcn_experiments.m index 78f5fc7..1f942fd 100644 --- a/rcn_experiments.m +++ b/rcn_experiments.m @@ -1,38 +1,33 @@ -%% Batch normalization effect experiment +%% Experiment Framework -[net_bn, info_bn] = rcn(... - 'useBnorm', true); - -[net_fc, info_fc] = rcn(... - 'useBnorm', false); +net = {}; +info = {}; +exp_name = {}; +for i = 1:9 + [net{end+1}, info{end+1}] = rcn_dag('filterSize', 2^i); + exp_name{end+1} = sprintf('filterSize %d (no dropout)', 2^i); +end %% figure(1) ; clf ; subplot(1,2,1) ; -semilogy(info_fc.val.objective, 'k') ; hold on ; -semilogy(info_bn.val.objective, 'b') ; -xlabel('Training samples [x10^3]'); ylabel('energy') ; +for i = 1:numel(net) + val = zeros(1,numel(info{1}.val)); + val(:) = info{i}.val.objective; + plot(val) ; hold on ; +end +xlabel('Training samples [x10^3]'); ylabel('objective (val)') ; grid on ; -h=legend('BASE', 'BNORM') ; -set(h,'color','none'); +h=legend(exp_name) ; title('objective') ; -subplot(1,2,2) ; -nProblem = numel(info_fc.test.error); -base = info_fc.test.error{1}.ours; -bnorm = info_bn.test.error{1}.ours; -for problem_iter = 2:nProblem - base = base + info_fc.test.error{problem_iter}.ours; - bnorm = bnorm + info_bn.test.error{problem_iter}.ours; -end -base = base / nProblem; -bnorm = bnorm / nProblem; +subplot(1,2,2); -plot(base, 'k') ; hold on ; % first row for top1e -plot(bnorm, 'b') ; -h=legend('BASE','BNORM') ; +for i =1:numel(net) + plot(info{i}.test) ; hold on ; +end +h=legend(exp_name, 'location', 'southeast') ; grid on ; xlabel('Training samples [x10^3]'); ylabel('error') ; -set(h,'color','none') ; -title('error') ; +title('PSNR') ; drawnow ; \ No newline at end of file