Skip to content

Commit a800c8f

Browse files
committed
implementing the xor example: losses, optimizers, datasets training etc.
1 parent b5fb456 commit a800c8f

File tree

15 files changed

+242
-5
lines changed

15 files changed

+242
-5
lines changed

Diff for: .gitignore

+1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
.vscode/
2+
.vs/
23
build/

Diff for: CMakeLists.txt

+5
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ cmake_minimum_required(VERSION 3.0.0)
22

33
project(Tipousi VERSION 0.0.1)
44

5+
set(CMAKE_CXX_STANDARD 17)
6+
set(CMAKE_CXX_STANDARD_REQUIRED ON)
7+
58
# Set the default build type to Release if not specified
69
if(NOT CMAKE_BUILD_TYPE)
710
set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build" FORCE)
@@ -20,6 +23,8 @@ include_directories(
2023
${PROJECT_SOURCE_DIR}/include/layer
2124
${PROJECT_SOURCE_DIR}/include/graph
2225
${PROJECT_SOURCE_DIR}/include/base
26+
${PROJECT_SOURCE_DIR}/include/optimizer
27+
${PROJECT_SOURCE_DIR}/include/data
2328
)
2429

2530
# Source files

Diff for: CMakeSettings.json

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
{
2+
"configurations": [
3+
{
4+
"name": "x64-Debug",
5+
"generator": "Ninja",
6+
"configurationType": "Debug",
7+
"inheritEnvironments": [ "msvc_x64_x64" ],
8+
"buildRoot": "${projectDir}\\out\\build\\${name}",
9+
"installRoot": "${projectDir}\\out\\install\\${name}",
10+
"cmakeCommandArgs": "",
11+
"buildCommandArgs": "",
12+
"ctestCommandArgs": ""
13+
}
14+
]
15+
}

Diff for: include/data/dataset.hpp

+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#pragma once
2+
#include <Eigen/Dense>
3+
#include <vector>
4+
5+
namespace Tipousi
6+
{
7+
namespace Data
8+
{
9+
class Dataset
10+
{
11+
public:
12+
Dataset(const Eigen::MatrixXd &X, const Eigen::MatrixXd &Y);
13+
~Dataset() = default;
14+
15+
using DataPair = std::pair<Eigen::MatrixXf, Eigen::MatrixXf>;
16+
17+
class Iterator
18+
{
19+
public:
20+
Iterator(const Eigen::MatrixXd &X, const Eigen::MatrixXd &Y,
21+
size_t index);
22+
Iterator &operator++();
23+
bool operator!=(const Iterator &other) const;
24+
DataPair operator*() const;
25+
26+
private:
27+
const Eigen::MatrixXd &m_X;
28+
const Eigen::MatrixXd &m_y;
29+
size_t m_index;
30+
};
31+
32+
Iterator begin() const;
33+
Iterator end() const;
34+
35+
private:
36+
Eigen::MatrixXd m_X;
37+
Eigen::MatrixXd m_y;
38+
};
39+
}; // namespace Data
40+
}; // namespace Tipousi

Diff for: include/graph/sequential.hpp

+7-1
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
#pragma once
22

33
#include "graph/node.hpp"
4+
#include "graph/trainable.hpp"
45
#include <vector>
56

67
namespace Tipousi
78
{
89
namespace Graph
910
{
10-
class Sequential
11+
class Sequential : public Trainable
1112
{
1213

1314
public:
@@ -27,6 +28,11 @@ namespace Tipousi
2728
void forward(const Eigen::MatrixXf &in, Eigen::MatrixXf &out);
2829
void backward();
2930

31+
void train(const Data::Dataset &dataset,
32+
const Optimizer::OptimizerBase &optimizer,
33+
const Loss::LossBase &loss,
34+
const uint32_t n_epochs) override;
35+
3036
private:
3137
Node *m_input_node = nullptr;
3238
Node *m_output_node = nullptr;

Diff for: include/graph/trainable.hpp

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
#pragma once
2+
3+
#include "data/dataset.hpp"
4+
#include "loss/base.hpp"
5+
#include "optimizer/base.hpp"
6+
7+
namespace Tipousi
8+
{
9+
namespace Graph
10+
{
11+
class Trainable
12+
{
13+
public:
14+
virtual void train(const Data::Dataset &dataset,
15+
const Optimizer::OptimizerBase &optimizer,
16+
const Loss::LossBase &loss,
17+
const uint32_t n_epochs) = 0;
18+
19+
protected:
20+
Trainable() = default;
21+
virtual ~Trainable() = default;
22+
};
23+
24+
} // namespace Graph
25+
} // namespace Tipousi

Diff for: include/loss/base.hpp

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#pragma once
2+
#include <Eigen/Dense>
3+
4+
namespace Tipousi
5+
{
6+
namespace Loss
7+
{
8+
class LossBase
9+
{
10+
public:
11+
LossBase() = default;
12+
virtual ~LossBase() = default;
13+
14+
virtual float compute(const Eigen::MatrixXf &y,
15+
const Eigen::MatrixXf &y_pred) const = 0;
16+
17+
virtual void grad(Eigen::MatrixXf &out_grad,
18+
const Eigen::MatrixXf &y,
19+
const Eigen::MatrixXf &y_pred) const = 0;
20+
};
21+
}; // namespace Loss
22+
}; // namespace Tipousi

Diff for: include/loss/mse.hpp

+10-1
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,20 @@
11
#pragma once
2+
#include "loss/base.hpp"
23

34
namespace Tipousi
45
{
56
namespace Loss
67
{
7-
class MSE
8+
class MSE : public LossBase
89
{
10+
MSE() = default;
11+
~MSE() = default;
12+
13+
float compute(const Eigen::MatrixXf &y,
14+
const Eigen::MatrixXf &y_pred) const override;
15+
16+
void grad(Eigen::MatrixXf &out_grad, const Eigen::MatrixXf &y,
17+
const Eigen::MatrixXf &y_pred) const override;
918
};
1019
}; // namespace Loss
1120
}; // namespace Tipousi

Diff for: include/loss/template.hpp renamed to include/optimizer/base.hpp

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@
22

33
namespace Tipousi
44
{
5-
namespace Loss
5+
namespace Optimizer
66
{
7-
class MyTemplate
7+
class OptimizerBase
88
{
99
};
10-
}; // namespace Loss
10+
}; // namespace Optimizer
1111
}; // namespace Tipousi

Diff for: include/optimizer/sgd.hpp

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
#pragma once
2+
#include "optimizer/base.hpp"
3+
4+
namespace Tipousi
5+
{
6+
namespace Optimizer
7+
{
8+
class SGD : public OptimizerBase
9+
{
10+
public:
11+
SGD();
12+
~SGD() = default;
13+
};
14+
}; // namespace Optimizer
15+
}; // namespace Tipousi

Diff for: src/data/dataset.cpp

+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
#include "data/dataset.hpp"
2+
3+
namespace Tipousi
4+
{
5+
namespace Data
6+
{
7+
Dataset::Dataset(const Eigen::MatrixXd &X, const Eigen::MatrixXd &Y)
8+
: m_X(X), m_y(Y)
9+
{
10+
}
11+
12+
Dataset::Iterator::Iterator(const Eigen::MatrixXd &X,
13+
const Eigen::MatrixXd &Y, size_t index)
14+
: m_X(X), m_y(Y), m_index(index)
15+
{
16+
}
17+
18+
Dataset::Iterator &Dataset::Iterator::operator++()
19+
{
20+
++m_index;
21+
return *this;
22+
}
23+
24+
bool Dataset::Iterator::operator!=(const Dataset::Iterator &other) const
25+
{
26+
return m_index != other.m_index;
27+
}
28+
29+
Dataset::DataPair Dataset::Iterator::operator*() const
30+
{
31+
Eigen::MatrixXf x = m_X.row(m_index).cast<float>();
32+
Eigen::MatrixXf y = m_y.row(m_index).cast<float>();
33+
return {x, y};
34+
}
35+
36+
Dataset::Iterator Dataset::begin() const
37+
{
38+
return Iterator(m_X, m_y, 0);
39+
}
40+
41+
Dataset::Iterator Dataset::end() const
42+
{
43+
return Iterator(m_X, m_y, m_X.rows());
44+
}
45+
} // namespace Data
46+
} // namespace Tipousi

Diff for: src/graph/sequential.cpp

+25
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
#include "graph/sequential.hpp"
2+
#include <iostream>
23

34
namespace Tipousi
45
{
@@ -58,5 +59,29 @@ namespace Tipousi
5859
//
5960
}
6061

62+
void Sequential::train(const Data::Dataset &dataset,
63+
const Optimizer::OptimizerBase &optimizer,
64+
const Loss::LossBase &loss_func,
65+
const uint32_t n_epochs)
66+
{
67+
for (uint32_t i{0}; i < n_epochs; i++)
68+
{
69+
float total_loss = 0.0f;
70+
uint32_t counter{0};
71+
for (const auto &[x, y] : dataset)
72+
{
73+
Eigen::MatrixXf output;
74+
Eigen::MatrixXf out_grad;
75+
forward(x, output);
76+
total_loss += loss_func.compute(y, output);
77+
loss_func.grad(out_grad, y, output);
78+
backward();
79+
counter++;
80+
}
81+
std::cout << "Epoch: " << i
82+
<< ", Loss: " << total_loss / counter << std::endl;
83+
}
84+
}
85+
6186
} // namespace Graph
6287
} // namespace Tipousi

Diff for: src/graph/trainable.cpp

Whitespace-only changes.

Diff for: src/loss/mse.cpp

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#include "loss/mse.hpp"
2+
3+
namespace Tipousi
4+
{
5+
namespace Loss
6+
{
7+
float MSE::compute(const Eigen::MatrixXf &y,
8+
const Eigen::MatrixXf &y_pred) const
9+
{
10+
return (y - y_pred).array().square().mean();
11+
}
12+
13+
void MSE::grad(Eigen::MatrixXf &out_grad, const Eigen::MatrixXf &y,
14+
const Eigen::MatrixXf &y_pred) const
15+
{
16+
out_grad = 2.0f * (y_pred - y) / y.rows();
17+
}
18+
} // namespace Loss
19+
} // namespace Tipousi

Diff for: src/optimizer/sgd.cpp

+9
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
#include "optimizer/sgd.hpp"
2+
3+
namespace Tipousi
4+
{
5+
namespace Optimizer
6+
{
7+
SGD::SGD() {}
8+
} // namespace Optimizer
9+
} // namespace Tipousi

0 commit comments

Comments
 (0)