Skip to content

Commit 3832713

Browse files
author
Hiroyuki KASAI
committed
Version 1.0.2
1 parent ae46680 commit 3832713

File tree

6 files changed

+421
-33
lines changed

6 files changed

+421
-33
lines changed

README.md

+5-2
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
# RSOpt (Riemannian stochastic optimization algorithms)
22

33
Authors: [Hiroyuki Kasai](http://www.kasailab.com/kasai_e.htm)
4+
Collaborators: [Bamdev Mishra](https://bamdevmishra.in/) and [Hiroyuki Sato](https://sites.google.com/site/hiroyukisatoeng/)
45

5-
Last page update: July 20, 2018
6+
Last page update: September 13, 2018
67

7-
Latest version: 1.0.1 (see Release notes for more info)
8+
Latest version: 1.0.2 (see Release notes for more info)
89

910
<br />
1011

@@ -132,6 +133,8 @@ If you have any problems or questions, please contact the author: [Hiroyuki Kasa
132133

133134
Release Notes
134135
--------------
136+
* Version 1.0.2 (Sep. 13, 2018)
137+
- MC problem (with Jester dataset) example is added.
135138
* Version 1.0.1 (July 20, 2018)
136139
- Initial codes are available.
137140
* Version 1.0.0 (July 12, 2018)

dataset/jester/jester_mat.mat

4.74 MB
Binary file not shown.

demo.m

+12-9
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,18 @@
11
function demo()
2+
% This file is part of RSOpt package.
3+
%
4+
% Created by H.Kasai and B.Mishra on July 20, 2018
25

36
clc; close all; clear
47

58

6-
%% Define parameters
9+
%% define parameters
710
N = 500;
811
d = 3;
912

1013

1114

12-
%% Read dataset
15+
%% read dataset
1316
input_data = load('./dataset/psd/psd_mean_3_500_5.mat');
1417
A = zeros(d, d, N);
1518
A = input_data.x_sample{1};
@@ -18,13 +21,13 @@ function demo()
1821

1922

2023

21-
%% Set manifold
24+
%% set manifold
2225
problem.M = sympositivedefinitefactory_mod(d);
2326
problem.ncostterms = N;
2427

2528

26-
27-
%% Cost function
29+
%% define problem
30+
% cost function
2831
problem.cost = @cost;
2932
function f = cost(X)
3033
f=0;
@@ -46,7 +49,7 @@ function demo()
4649

4750

4851

49-
%% Riemannian gradient of the cost function
52+
% Riemannian gradient of the cost function
5053
problem.rgrad = @rgrad;
5154
function g = rgrad(X)
5255

@@ -64,7 +67,7 @@ function demo()
6467

6568

6669

67-
%% Riemannian stochastic gradient of the cost function
70+
% Riemannian stochastic gradient of the cost function
6871
problem.partialgrad = @partialgrad;
6972
function g = partialgrad(X, idx_batchsize)
7073

@@ -88,7 +91,7 @@ function demo()
8891

8992

9093

91-
%% Run SRG algorithms
94+
%% run SRG algorithm
9295

9396
Init = problem.M.rand();
9497

@@ -107,7 +110,7 @@ function demo()
107110

108111

109112

110-
%% Plots
113+
%% plots
111114
fs = 20;
112115

113116
% Optimality gap (Train loss - optimum) versus #grads/N

show_centroid_plots.m

+27-21
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
11
function show_centroid_plots()
2+
% This file is part of RSOpt package.
3+
%
4+
% Created by H.Kasai and B.Mishra on July 20, 2018
5+
% Modified by H.Kasai on Sep. 13, 2018
26

37
clc; close all; clear
48

5-
%% Define parameters
9+
%% define parameters
610
tolgradnorm = 1e-8;
711
inner_repeat = 1;
812

@@ -22,28 +26,31 @@ function show_centroid_plots()
2226
cn = 5;
2327
srg_varpi = 0.05;
2428
end
25-
2629

30+
31+
32+
33+
%% generate dataset
2734
input_filename = sprintf('./dataset/psd/psd_mean_%d_%d_%d.mat', d, N, cn);
2835
fprintf('Reading file %s with (d:%d N:%d cn:%d) .... ', input_filename, d, N, cn);
2936
input_data = load(input_filename);
30-
3137
A = zeros(d, d, N);
32-
3338
A = input_data.x_sample{1};
3439
fprintf('done\n');
35-
3640
f_sol = input_data.f_sol{1};
3741
fprintf('f_sol: %.16e\n', f_sol);
3842

3943

40-
%% Set manifold
44+
45+
46+
47+
%% set manifold
4148
problem.M = sympositivedefinitefactory_mod(d);
4249
problem.ncostterms = N;
4350

4451

4552

46-
% Cost function
53+
%% define problem
4754
problem.cost = @cost;
4855
function f = cost(X)
4956
f=0;
@@ -60,11 +67,9 @@ function show_centroid_plots()
6067
f = f + norm(logm(arg),'fro')^2;
6168
end
6269

63-
f = f/(N);
70+
f = f/(2*N);
6471
end
6572

66-
67-
6873
% Riemannian gradient of the cost function
6974
problem.rgrad = @rgrad;
7075
function g = rgrad(X)
@@ -78,11 +83,10 @@ function show_centroid_plots()
7883

7984
g = 2*X*logsum;
8085
g = (g+g')/2;
81-
g = g/N;
86+
g = g/(2*N);
8287
end
8388

8489

85-
8690
% Riemannian stochastic gradient of the cost function
8791
problem.partialgrad = @partialgrad;
8892
function g = partialgrad(X, idx_batchsize)
@@ -106,27 +110,29 @@ function show_centroid_plots()
106110

107111

108112

109-
%% Run algorithms
110113

111-
% Initialize
114+
115+
%% run algorithms
116+
117+
% initialize
112118
Uinit = problem.M.rand();
113119

114120

115-
% Run R-SD
121+
% R-SD
116122
clear options;
117123
options.maxiter = maxepoch;
118124
options.tolgradnorm = tolgradnorm;
119125
[~, ~, infos_sd, options_sd] = steepestdescent(problem, Uinit, options);
120126

121127

122-
% Run R-CG
128+
% R-CG
123129
clear options;
124130
options.maxiter = maxepoch;
125131
options.tolgradnorm = tolgradnorm;
126132
[~, ~, infos_cg, options_cg] = conjugategradient(problem, Uinit, options);
127133

128134

129-
% Run SGD with decay step-size
135+
% R-SGD with decay step-size
130136
clear options;
131137
options.verbosity = 1;
132138
options.batchsize = 10;
@@ -141,7 +147,7 @@ function show_centroid_plots()
141147
[~, ~, infos_sgd, options_sgd] = Riemannian_svrg(problem, Uinit, options);
142148

143149

144-
% Run SVRG
150+
% R-SVRG
145151
clear options;
146152
options.verbosity = 1;
147153
options.batchsize = 10;
@@ -158,7 +164,7 @@ function show_centroid_plots()
158164
[~, ~, infos_svrg, options_svrg] = Riemannian_svrg(problem, Uinit, options);
159165

160166

161-
% Run SRG
167+
% R-SRG
162168
clear options;
163169
options.verbosity = 1;
164170
options.batchsize = 10;
@@ -187,7 +193,7 @@ function show_centroid_plots()
187193
[~, ~, infos_srg_plus, options_srg_plus] = Riemannian_srg(problem, Uinit, options);
188194

189195

190-
% Calculate # of gradient evaluations
196+
% calculate # of gradient evaluations
191197
num_grads_sd = (1:length([infos_sd.cost])) - 1; % N*options_sd.maxiter;
192198
num_grads_cg = (1:length([infos_cg.cost])) - 1; % N*options_sd.maxiter;
193199
num_grads_sgd = ceil(options_sgd.maxinneriter/N)*((1:length([infos_sgd.cost])) - 1); % options.maxepoch*(options_sgd.maxinneriter);
@@ -206,7 +212,7 @@ function show_centroid_plots()
206212

207213

208214

209-
%% Plots
215+
%% plots
210216
fs = 20;
211217

212218
% Optimality gap (Train loss - optimum) versus #grads/N

0 commit comments

Comments
 (0)