-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_gradient_checking.py
39 lines (35 loc) · 1.06 KB
/
run_gradient_checking.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
from nnlayers import InputLayer, FullyConnectedLayer, LinearOutput, Activation, SoftmaxOutput
from neural_network import NeuralNetwork
import numpy as np
input_shape = (5, 10)
n_labels = 6
layers = [InputLayer(input_shape)]
layers.append(FullyConnectedLayer(
layers[-1],
num_units=15,
init_stddev=0.1,
activation_fun=Activation('relu')
))
layers.append(FullyConnectedLayer(
layers[-1],
num_units=6,
init_stddev=0.1,
activation_fun=Activation('sigmoid')
))
layers.append(FullyConnectedLayer(
layers[-1],
num_units=n_labels,
init_stddev=0.1,
activation_fun=None
))
layers.append(SoftmaxOutput(layers[-1]))
nn = NeuralNetwork(layers)
# create random data
X = np.random.normal(size=input_shape)
# and random labels
Y = np.zeros((input_shape[0], n_labels))
for i in range(Y.shape[0]):
idx = np.random.randint(n_labels)
Y[i, idx] = 1.
# check gradients of the neural network
nn.check_gradients(X,Y)