-
Notifications
You must be signed in to change notification settings - Fork 32
/
layers.py
38 lines (32 loc) · 1.49 KB
/
layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import tensorflow as tf
class Dense():
"""Fully-connected layer"""
def __init__(self, scope="dense_layer", size=None, dropout=1.,
nonlinearity=tf.identity):
# (str, int, (float | tf.Tensor), tf.op)
assert size, "Must specify layer size (num nodes)"
self.scope = scope
self.size = size
self.dropout = dropout # keep_prob
self.nonlinearity = nonlinearity
def __call__(self, x):
"""Dense layer currying, to apply layer to any input tensor `x`"""
# tf.Tensor -> tf.Tensor
with tf.name_scope(self.scope):
while True:
try: # reuse weights if already initialized
return self.nonlinearity(tf.matmul(x, self.w) + self.b)
except(AttributeError):
self.w, self.b = self.wbVars(x.get_shape()[1].value, self.size)
self.w = tf.nn.dropout(self.w, self.dropout)
@staticmethod
def wbVars(fan_in: int, fan_out: int):
"""Helper to initialize weights and biases, via He's adaptation
of Xavier init for ReLUs: https://arxiv.org/abs/1502.01852
"""
# (int, int) -> (tf.Variable, tf.Variable)
stddev = tf.cast((2 / fan_in)**0.5, tf.float32)
initial_w = tf.random_normal([fan_in, fan_out], stddev=stddev)
initial_b = tf.zeros([fan_out])
return (tf.Variable(initial_w, trainable=True, name="weights"),
tf.Variable(initial_b, trainable=True, name="biases"))