Skip to content

Commit af0e242

Browse files
committed
Fixed some bit rot
1 parent a773c98 commit af0e242

9 files changed

+43
-59
lines changed

dynet-cpp/Makefile

+18-19
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11

22

3-
CUDA_PATH=/usr/local/cuda-8.0/targets/x86_64-linux/lib
3+
CUDA_PATH=/usr/local/cuda/targets/x86_64-linux/lib
44
EIGEN_PATH=${HOME}/usr/local/eigen
55
DYNET_PATH=${HOME}/work/dynet
66

7-
DYNET_LIB_CPU=-ldynet
8-
DYNET_LIB_GPU=-lgdynet -L${CUDA_PATH} -lcublas -lcudart
7+
DYNET_LIB=-ldynet
98

109
# *** Mac or linux
1110
UNAME_S := $(shell uname -s)
@@ -26,49 +25,49 @@ clean:
2625
rm -f rnnlm-batch treenn treenn-bulk bilstm-tagger bilstm-tagger-bulk bilstm-tagger-withchar bilstm-tagger-withchar-bulk rnnlm-batch-gpu treenn-gpu treenn-bulk-gpu bilstm-tagger-gpu bilstm-tagger-bulk-gpu bilstm-tagger-withchar-gpu bilstm-tagger-withchar-bulk-gpu
2726

2827
rnnlm-batch: rnnlm-batch.cc
29-
${CC} -o rnnlm-batch rnnlm-batch.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
28+
${CC} -o rnnlm-batch rnnlm-batch.cc ${CXX_FLAGS} ${DYNET_LIB}
3029

3130
rnnlm-seq: rnnlm-seq.cc
32-
${CC} -o rnnlm-seq rnnlm-seq.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
31+
${CC} -o rnnlm-seq rnnlm-seq.cc ${CXX_FLAGS} ${DYNET_LIB}
3332

3433
treenn: treenn.cc
35-
${CC} -o treenn treenn.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
34+
${CC} -o treenn treenn.cc ${CXX_FLAGS} ${DYNET_LIB}
3635

3736
treenn-bulk: treenn-bulk.cc
38-
${CC} -o treenn-bulk treenn-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
37+
${CC} -o treenn-bulk treenn-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}
3938

4039
bilstm-tagger: bilstm-tagger.cc
41-
${CC} -o bilstm-tagger bilstm-tagger.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
40+
${CC} -o bilstm-tagger bilstm-tagger.cc ${CXX_FLAGS} ${DYNET_LIB}
4241

4342
bilstm-tagger-bulk: bilstm-tagger-bulk.cc
44-
${CC} -o bilstm-tagger-bulk bilstm-tagger-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
43+
${CC} -o bilstm-tagger-bulk bilstm-tagger-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}
4544

4645
bilstm-tagger-withchar: bilstm-tagger-withchar.cc
47-
${CC} -o bilstm-tagger-withchar bilstm-tagger-withchar.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
46+
${CC} -o bilstm-tagger-withchar bilstm-tagger-withchar.cc ${CXX_FLAGS} ${DYNET_LIB}
4847

4948
bilstm-tagger-withchar-bulk: bilstm-tagger-withchar-bulk.cc
50-
${CC} -o bilstm-tagger-withchar-bulk bilstm-tagger-withchar-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_CPU}
49+
${CC} -o bilstm-tagger-withchar-bulk bilstm-tagger-withchar-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}
5150

5251
rnnlm-batch-gpu: rnnlm-batch.cc
53-
${CC} -o rnnlm-batch-gpu rnnlm-batch.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
52+
${CC} -o rnnlm-batch-gpu rnnlm-batch.cc ${CXX_FLAGS} ${DYNET_LIB}
5453

5554
rnnlm-seq-gpu: rnnlm-seq.cc
56-
${CC} -o rnnlm-seq-gpu rnnlm-seq.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
55+
${CC} -o rnnlm-seq-gpu rnnlm-seq.cc ${CXX_FLAGS} ${DYNET_LIB}
5756

5857
treenn-gpu: treenn.cc
59-
${CC} -o treenn-gpu treenn.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
58+
${CC} -o treenn-gpu treenn.cc ${CXX_FLAGS} ${DYNET_LIB}
6059

6160
treenn-bulk-gpu: treenn-bulk.cc
62-
${CC} -o treenn-bulk-gpu treenn-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
61+
${CC} -o treenn-bulk-gpu treenn-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}
6362

6463
bilstm-tagger-gpu: bilstm-tagger.cc
65-
${CC} -o bilstm-tagger-gpu bilstm-tagger.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
64+
${CC} -o bilstm-tagger-gpu bilstm-tagger.cc ${CXX_FLAGS} ${DYNET_LIB}
6665

6766
bilstm-tagger-bulk-gpu: bilstm-tagger-bulk.cc
68-
${CC} -o bilstm-tagger-bulk-gpu bilstm-tagger-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
67+
${CC} -o bilstm-tagger-bulk-gpu bilstm-tagger-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}
6968

7069
bilstm-tagger-withchar-gpu: bilstm-tagger-withchar.cc
71-
${CC} -o bilstm-tagger-withchar-gpu bilstm-tagger-withchar.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
70+
${CC} -o bilstm-tagger-withchar-gpu bilstm-tagger-withchar.cc ${CXX_FLAGS} ${DYNET_LIB}
7271

7372
bilstm-tagger-withchar-bulk-gpu: bilstm-tagger-withchar-bulk.cc
74-
${CC} -o bilstm-tagger-withchar-bulk-gpu bilstm-tagger-withchar-bulk.cc ${CXX_FLAGS} ${DYNET_LIB_GPU}
73+
${CC} -o bilstm-tagger-withchar-bulk-gpu bilstm-tagger-withchar-bulk.cc ${CXX_FLAGS} ${DYNET_LIB}

dynet-cpp/bilstm-tagger-bulk.cc

+4-6
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
using namespace std;
1818
using namespace std::chrono;
1919
using namespace dynet;
20-
using namespace dynet::expr;
2120

2221
// Read a file where each line is of the form "word1|tag1 word2|tag2 ..."
2322
// Yields pairs of lists of the form < [word1, word2, ...], [tag1, tag2, ...] >
@@ -43,7 +42,7 @@ vector<pair<vector<string>, vector<string> > > read(const string & fname) {
4342
class BiLSTMTagger {
4443
public:
4544

46-
BiLSTMTagger(unsigned layers, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, Model & model, Dict & wv, Dict & tv, unordered_map<string,int> & wc)
45+
BiLSTMTagger(unsigned layers, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, ParameterCollection & model, Dict & wv, Dict & tv, unordered_map<string,int> & wc)
4746
: wv(wv), tv(tv), wc(wc) {
4847
unsigned nwords = wv.size();
4948
unsigned ntags = tv.size();
@@ -129,8 +128,8 @@ int main(int argc, char**argv) {
129128

130129
time_point<system_clock> start = system_clock::now();
131130

132-
vector<pair<vector<string>, vector<string> > > train = read("../data/tags/train.txt");
133-
vector<pair<vector<string>, vector<string> > > dev = read("../data/tags/dev.txt");
131+
vector<pair<vector<string>, vector<string> > > train = read("data/tags/train.txt");
132+
vector<pair<vector<string>, vector<string> > > dev = read("data/tags/dev.txt");
134133
Dict word_voc, tag_voc;
135134
unordered_map<string, int> word_cnt;
136135
for(auto & sent : train) {
@@ -146,7 +145,7 @@ int main(int argc, char**argv) {
146145

147146
// DyNet Starts
148147
dynet::initialize(argc, argv);
149-
Model model;
148+
ParameterCollection model;
150149
AdamTrainer trainer(model);
151150
trainer.clipping_enabled = false;
152151

@@ -224,7 +223,6 @@ int main(int argc, char**argv) {
224223
trainer.update();
225224
}
226225
}
227-
trainer.update_epoch(1.0);
228226
}
229227
return 0;
230228
}

dynet-cpp/bilstm-tagger-withchar-bulk.cc

+4-6
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
using namespace std;
1818
using namespace std::chrono;
1919
using namespace dynet;
20-
using namespace dynet::expr;
2120

2221
// Read a file where each line is of the form "word1|tag1 word2|tag2 ..."
2322
// Yields pairs of lists of the form < [word1, word2, ...], [tag1, tag2, ...] >
@@ -43,7 +42,7 @@ vector<pair<vector<string>, vector<string> > > read(const string & fname) {
4342
class BiLSTMTagger {
4443
public:
4544

46-
BiLSTMTagger(unsigned layers, unsigned cembed_dim, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, Model & model, Dict & wv, Dict & cv, Dict & tv, unordered_map<string,int> & wc)
45+
BiLSTMTagger(unsigned layers, unsigned cembed_dim, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, ParameterCollection & model, Dict & wv, Dict & cv, Dict & tv, unordered_map<string,int> & wc)
4746
: wv(wv), cv(cv), tv(tv), wc(wc) {
4847
unsigned nwords = wv.size();
4948
unsigned ntags = tv.size();
@@ -143,7 +142,7 @@ int main(int argc, char**argv) {
143142

144143
// DyNet Starts
145144
dynet::initialize(argc, argv);
146-
Model model;
145+
ParameterCollection model;
147146
AdamTrainer trainer(model, 0.001);
148147
trainer.clipping_enabled = false;
149148

@@ -160,8 +159,8 @@ int main(int argc, char**argv) {
160159
int LAST_STEP = atoi(argv[7]);
161160
int TIMEOUT = atoi(argv[8]);
162161

163-
vector<pair<vector<string>, vector<string> > > train = read("../data/tags/train.txt");
164-
vector<pair<vector<string>, vector<string> > > dev = read("../data/tags/dev.txt");
162+
vector<pair<vector<string>, vector<string> > > train = read("data/tags/train.txt");
163+
vector<pair<vector<string>, vector<string> > > dev = read("data/tags/dev.txt");
165164
Dict word_voc, tag_voc, char_voc;
166165
unordered_map<string, int> word_cnt;
167166
for(auto & sent : train) {
@@ -237,7 +236,6 @@ int main(int argc, char**argv) {
237236
trainer.update();
238237
}
239238
}
240-
trainer.update_epoch(1.0);
241239
}
242240
return 0;
243241
}

dynet-cpp/bilstm-tagger-withchar.cc

+2-4
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
using namespace std;
1818
using namespace std::chrono;
1919
using namespace dynet;
20-
using namespace dynet::expr;
2120

2221
// Read a file where each line is of the form "word1|tag1 word2|tag2 ..."
2322
// Yields pairs of lists of the form < [word1, word2, ...], [tag1, tag2, ...] >
@@ -43,7 +42,7 @@ vector<pair<vector<string>, vector<string> > > read(const string & fname) {
4342
class BiLSTMTagger {
4443
public:
4544

46-
BiLSTMTagger(unsigned layers, unsigned cembed_dim, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, Model & model, Dict & wv, Dict & cv, Dict & tv, unordered_map<string,int> & wc)
45+
BiLSTMTagger(unsigned layers, unsigned cembed_dim, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, ParameterCollection & model, Dict & wv, Dict & cv, Dict & tv, unordered_map<string,int> & wc)
4746
: wv(wv), cv(cv), tv(tv), wc(wc) {
4847
unsigned nwords = wv.size();
4948
unsigned ntags = tv.size();
@@ -161,7 +160,7 @@ int main(int argc, char**argv) {
161160

162161
// DyNet Starts
163162
dynet::initialize(argc, argv);
164-
Model model;
163+
ParameterCollection model;
165164
AdamTrainer trainer(model, 0.001);
166165
trainer.clipping_enabled = false;
167166

@@ -225,7 +224,6 @@ int main(int argc, char**argv) {
225224
cg.backward(loss_exp);
226225
trainer.update();
227226
}
228-
trainer.update_epoch(1.0);
229227
}
230228
return 0;
231229
}

dynet-cpp/bilstm-tagger.cc

+2-4
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
using namespace std;
1818
using namespace std::chrono;
1919
using namespace dynet;
20-
using namespace dynet::expr;
2120

2221
// Read a file where each line is of the form "word1|tag1 word2|tag2 ..."
2322
// Yields pairs of lists of the form < [word1, word2, ...], [tag1, tag2, ...] >
@@ -43,7 +42,7 @@ vector<pair<vector<string>, vector<string> > > read(const string & fname) {
4342
class BiLSTMTagger {
4443
public:
4544

46-
BiLSTMTagger(unsigned layers, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, Model & model, Dict & wv, Dict & tv, unordered_map<string,int> & wc)
45+
BiLSTMTagger(unsigned layers, unsigned wembed_dim, unsigned hidden_dim, unsigned mlp_dim, ParameterCollection & model, Dict & wv, Dict & tv, unordered_map<string,int> & wc)
4746
: wv(wv), tv(tv), wc(wc) {
4847
unsigned nwords = wv.size();
4948
unsigned ntags = tv.size();
@@ -140,7 +139,7 @@ int main(int argc, char**argv) {
140139

141140
// DyNet Starts
142141
dynet::initialize(argc, argv);
143-
Model model;
142+
ParameterCollection model;
144143
AdamTrainer trainer(model);
145144
trainer.clipping_enabled = false;
146145

@@ -204,7 +203,6 @@ int main(int argc, char**argv) {
204203
cg.backward(loss_exp);
205204
trainer.update();
206205
}
207-
trainer.update_epoch(1.0);
208206
}
209207
return 0;
210208
}

dynet-cpp/rnnlm-batch.cc

+3-4
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@
1111
#include <dynet/expr.h>
1212
#include <dynet/lstm.h>
1313
#include <dynet/training.h>
14+
#include <dynet/param-init.h>
1415

1516
using namespace std;
1617
using namespace std::chrono;
1718
using namespace dynet;
18-
using namespace dynet::expr;
1919

2020
// Read a file where each line is of the form "word1 word2 ..."
2121
// Yields lists of the form [word1, word2, ...]
@@ -40,7 +40,7 @@ struct RNNLanguageModel {
4040
Parameter W_sm;
4141
Parameter b_sm;
4242
VanillaLSTMBuilder builder;
43-
explicit RNNLanguageModel(unsigned layers, unsigned input_dim, unsigned hidden_dim, unsigned vocab_size, Model& model) : builder(layers, input_dim, hidden_dim, model) {
43+
explicit RNNLanguageModel(unsigned layers, unsigned input_dim, unsigned hidden_dim, unsigned vocab_size, ParameterCollection& model) : builder(layers, input_dim, hidden_dim, model) {
4444
p_c = model.add_lookup_parameters(vocab_size, {input_dim}, ParameterInitUniform(0.1));
4545
W_sm = model.add_parameters({vocab_size, hidden_dim}, ParameterInitUniform(0.5));
4646
b_sm = model.add_parameters({vocab_size}, ParameterInitUniform(0.5));
@@ -115,7 +115,7 @@ int main(int argc, char** argv) {
115115

116116
// DyNet Starts
117117
dynet::initialize(argc, argv);
118-
Model model;
118+
ParameterCollection model;
119119

120120
if(argc != 6) {
121121
cerr << "Usage: " << argv[0] << " MB_SIZE EMBED_SIZE HIDDEN_SIZE SPARSE TIMEOUT" << endl;
@@ -188,6 +188,5 @@ int main(int argc, char** argv) {
188188
cg.backward(loss_exp);
189189
trainer.update();
190190
}
191-
trainer.update_epoch(1.0);
192191
}
193192
}

dynet-cpp/rnnlm-seq.cc

+2-4
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
using namespace std;
1616
using namespace std::chrono;
1717
using namespace dynet;
18-
using namespace dynet::expr;
1918

2019
// Read a file where each line is of the form "word1 word2 ..."
2120
// Yields lists of the form [word1, word2, ...]
@@ -40,7 +39,7 @@ struct RNNLanguageModel {
4039
Parameter W_sm;
4140
Parameter b_sm;
4241
VanillaLSTMBuilder builder;
43-
explicit RNNLanguageModel(unsigned layers, unsigned input_dim, unsigned hidden_dim, unsigned vocab_size, Model& model) : builder(layers, input_dim, hidden_dim, model) {
42+
explicit RNNLanguageModel(unsigned layers, unsigned input_dim, unsigned hidden_dim, unsigned vocab_size, ParameterCollection& model) : builder(layers, input_dim, hidden_dim, model) {
4443
p_c = model.add_lookup_parameters(vocab_size, {input_dim}, ParameterInitUniform(0.1));
4544
W_sm = model.add_parameters({vocab_size, hidden_dim}, ParameterInitUniform(0.5));
4645
b_sm = model.add_parameters({vocab_size}, ParameterInitUniform(0.5));
@@ -100,7 +99,7 @@ int main(int argc, char** argv) {
10099

101100
// DyNet Starts
102101
dynet::initialize(argc, argv);
103-
Model model;
102+
ParameterCollection model;
104103

105104
if(argc != 6) {
106105
cerr << "Usage: " << argv[0] << " MB_SIZE EMBED_SIZE HIDDEN_SIZE SPARSE TIMEOUT" << endl;
@@ -173,6 +172,5 @@ int main(int argc, char** argv) {
173172
cg.backward(loss_exp);
174173
trainer.update();
175174
}
176-
trainer.update_epoch(1.0);
177175
}
178176
}

dynet-cpp/treenn-bulk.cc

+5-7
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
using namespace std;
1717
using namespace std::chrono;
1818
using namespace dynet;
19-
using namespace dynet::expr;
2019

2120
class Tree {
2221
public:
@@ -103,7 +102,7 @@ vector<Tree*> read_dataset(const string & filename) {
103102

104103
class TreeLSTMBuilder {
105104
public:
106-
TreeLSTMBuilder(Model & model, Dict & word_vocab, unsigned wdim, unsigned hdim) :
105+
TreeLSTMBuilder(ParameterCollection & model, Dict & word_vocab, unsigned wdim, unsigned hdim) :
107106
model(model), word_vocab(word_vocab), wdim(wdim), hdim(hdim) {
108107
WS = {model.add_parameters({hdim, wdim}), // 0: Wi
109108
model.add_parameters({hdim, wdim}), // 1: Wo
@@ -158,7 +157,7 @@ class TreeLSTMBuilder {
158157
return hc_ret;
159158
}
160159

161-
Model & model;
160+
ParameterCollection & model;
162161
Dict & word_vocab;
163162
unsigned wdim, hdim;
164163
vector<Parameter> WS;
@@ -173,16 +172,16 @@ int main(int argc, char**argv) {
173172

174173
time_point<system_clock> start = system_clock::now();
175174

176-
vector<Tree*> train = read_dataset("../data/trees/train.txt");
177-
vector<Tree*> dev = read_dataset("../data/trees/dev.txt");
175+
vector<Tree*> train = read_dataset("data/trees/train.txt");
176+
vector<Tree*> dev = read_dataset("data/trees/dev.txt");
178177
Dict nonterm_voc, term_voc;
179178
for(auto tree : train) tree->make_vocab(nonterm_voc, term_voc);
180179
nonterm_voc.freeze();
181180
term_voc.convert("<unk>"); term_voc.freeze(); term_voc.set_unk("<unk>");
182181

183182
// DyNet Starts
184183
dynet::initialize(argc, argv);
185-
Model model;
184+
ParameterCollection model;
186185
AdamTrainer trainer(model, 0.001);
187186
trainer.clipping_enabled = false;
188187

@@ -244,7 +243,6 @@ int main(int argc, char**argv) {
244243
}
245244
std::chrono::duration<float> fs = (system_clock::now() - start);
246245
all_time += duration_cast<milliseconds>(fs).count() / float(1000);
247-
trainer.update_epoch(1.0);
248246
int good = 0, bad = 0;
249247
for(auto tree : dev) {
250248
ComputationGraph cg;

0 commit comments

Comments
 (0)