|
1 | 1 | #! /usr/bin/python
|
2 |
| -# -*- coding: utf8 -*- |
| 2 | +# -*- coding: utf-8 -*- |
3 | 3 |
|
| 4 | +import tensorflow as tf |
| 5 | +from tensorflow.python.util.deprecation import deprecated |
4 | 6 |
|
| 7 | +__all__ = [ |
| 8 | + 'identity', |
| 9 | + 'ramp', |
| 10 | + 'leaky_relu', |
| 11 | + 'swish', |
| 12 | + 'pixel_wise_softmax', |
| 13 | + 'linear', |
| 14 | + 'lrelu', |
| 15 | +] |
5 | 16 |
|
6 |
| -import tensorflow as tf |
7 | 17 |
|
8 |
| -def identity(x, name=None): |
9 |
| - """The identity activation function, Shortcut is ``linear``. |
| 18 | +@deprecated("2018-06-30", "This API will be deprecated soon as tf.identity can do the same thing.") |
| 19 | +def identity(x): |
| 20 | + """The identity activation function. |
| 21 | + Shortcut is ``linear``. |
10 | 22 |
|
11 | 23 | Parameters
|
12 | 24 | ----------
|
13 |
| - x : a tensor input |
14 |
| - input(s) |
15 |
| -
|
| 25 | + x : Tensor |
| 26 | + input. |
16 | 27 |
|
17 | 28 | Returns
|
18 |
| - -------- |
19 |
| - A `Tensor` with the same type as `x`. |
| 29 | + ------- |
| 30 | + Tensor |
| 31 | + A ``Tensor`` in the same type as ``x``. |
| 32 | +
|
20 | 33 | """
|
21 | 34 | return x
|
22 | 35 |
|
23 |
| -# Shortcut |
24 |
| -linear = identity |
25 | 36 |
|
26 |
| -def ramp(x=None, v_min=0, v_max=1, name=None): |
| 37 | +def ramp(x, v_min=0, v_max=1, name=None): |
27 | 38 | """The ramp activation function.
|
28 | 39 |
|
29 | 40 | Parameters
|
30 | 41 | ----------
|
31 |
| - x : a tensor input |
32 |
| - input(s) |
| 42 | + x : Tensor |
| 43 | + input. |
33 | 44 | v_min : float
|
34 |
| - if input(s) smaller than v_min, change inputs to v_min |
| 45 | + cap input to v_min as a lower bound. |
35 | 46 | v_max : float
|
36 |
| - if input(s) greater than v_max, change inputs to v_max |
37 |
| - name : a string or None |
38 |
| - An optional name to attach to this activation function. |
39 |
| -
|
| 47 | + cap input to v_max as a upper bound. |
| 48 | + name : str |
| 49 | + The function name (optional). |
40 | 50 |
|
41 | 51 | Returns
|
42 |
| - -------- |
43 |
| - A `Tensor` with the same type as `x`. |
| 52 | + ------- |
| 53 | + Tensor |
| 54 | + A ``Tensor`` in the same type as ``x``. |
| 55 | +
|
44 | 56 | """
|
45 | 57 | return tf.clip_by_value(x, clip_value_min=v_min, clip_value_max=v_max, name=name)
|
46 | 58 |
|
47 |
| -def leaky_relu(x=None, alpha=0.1, name="LeakyReLU"): |
| 59 | + |
| 60 | +def leaky_relu(x, alpha=0.1, name="lrelu"): |
48 | 61 | """The LeakyReLU, Shortcut is ``lrelu``.
|
49 | 62 |
|
50 |
| - Modified version of ReLU, introducing a nonzero gradient for negative |
51 |
| - input. |
| 63 | + Modified version of ReLU, introducing a nonzero gradient for negative input. |
52 | 64 |
|
53 | 65 | Parameters
|
54 | 66 | ----------
|
55 |
| - x : A `Tensor` with type `float`, `double`, `int32`, `int64`, `uint8`, |
56 |
| - `int16`, or `int8`. |
57 |
| - alpha : `float`. slope. |
58 |
| - name : a string or None |
59 |
| - An optional name to attach to this activation function. |
| 67 | + x : Tensor |
| 68 | + Support input type ``float``, ``double``, ``int32``, ``int64``, ``uint8``, |
| 69 | + ``int16``, or ``int8``. |
| 70 | + alpha : float |
| 71 | + Slope. |
| 72 | + name : str |
| 73 | + The function name (optional). |
60 | 74 |
|
61 | 75 | Examples
|
62 |
| - --------- |
63 |
| - >>> network = tl.layers.DenseLayer(network, n_units=100, name = 'dense_lrelu', |
64 |
| - ... act= lambda x : tl.act.lrelu(x, 0.2)) |
| 76 | + -------- |
| 77 | + >>> net = tl.layers.DenseLayer(net, 100, act=lambda x : tl.act.lrelu(x, 0.2), name='dense') |
| 78 | +
|
| 79 | + Returns |
| 80 | + ------- |
| 81 | + Tensor |
| 82 | + A ``Tensor`` in the same type as ``x``. |
65 | 83 |
|
66 | 84 | References
|
67 | 85 | ------------
|
68 |
| - - `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`_ |
| 86 | + - `Rectifier Nonlinearities Improve Neural Network Acoustic Models, Maas et al. (2013) <http://web.stanford.edu/~awni/papers/relu_hybrid_icml2013_final.pdf>`__ |
| 87 | +
|
69 | 88 | """
|
70 |
| - with tf.name_scope(name) as scope: |
71 |
| - # x = tf.nn.relu(x) |
72 |
| - # m_x = tf.nn.relu(-x) |
73 |
| - # x -= alpha * m_x |
74 |
| - x = tf.maximum(x, alpha * x) |
| 89 | + # with tf.name_scope(name) as scope: |
| 90 | + # x = tf.nn.relu(x) |
| 91 | + # m_x = tf.nn.relu(-x) |
| 92 | + # x -= alpha * m_x |
| 93 | + x = tf.maximum(x, alpha * x, name=name) |
75 | 94 | return x
|
76 | 95 |
|
77 |
| -#Shortcut |
78 |
| -lrelu = leaky_relu |
79 | 96 |
|
80 |
| -def pixel_wise_softmax(output, name='pixel_wise_softmax'): |
| 97 | +def swish(x, name='swish'): |
| 98 | + """The Swish function. |
| 99 | + See `Swish: a Self-Gated Activation Function <https://arxiv.org/abs/1710.05941>`__. |
| 100 | +
|
| 101 | + Parameters |
| 102 | + ---------- |
| 103 | + x : Tensor |
| 104 | + input. |
| 105 | + name: str |
| 106 | + function name (optional). |
| 107 | +
|
| 108 | + Returns |
| 109 | + ------- |
| 110 | + Tensor |
| 111 | + A ``Tensor`` in the same type as ``x``. |
| 112 | +
|
| 113 | + """ |
| 114 | + with tf.name_scope(name): |
| 115 | + x = tf.nn.sigmoid(x) * x |
| 116 | + return x |
| 117 | + |
| 118 | + |
| 119 | +@deprecated("2018-06-30", "This API will be deprecated soon as tf.nn.softmax can do the same thing.") |
| 120 | +def pixel_wise_softmax(x, name='pixel_wise_softmax'): |
81 | 121 | """Return the softmax outputs of images, every pixels have multiple label, the sum of a pixel is 1.
|
82 | 122 | Usually be used for image segmentation.
|
83 | 123 |
|
84 | 124 | Parameters
|
85 |
| - ------------ |
86 |
| - output : tensor |
87 |
| - - For 2d image, 4D tensor [batch_size, height, weight, channel], channel >= 2. |
88 |
| - - For 3d image, 5D tensor [batch_size, depth, height, weight, channel], channel >= 2. |
| 125 | + ---------- |
| 126 | + x : Tensor |
| 127 | + input. |
| 128 | + - For 2d image, 4D tensor (batch_size, height, weight, channel), where channel >= 2. |
| 129 | + - For 3d image, 5D tensor (batch_size, depth, height, weight, channel), where channel >= 2. |
| 130 | + name : str |
| 131 | + function name (optional) |
| 132 | +
|
| 133 | + Returns |
| 134 | + ------- |
| 135 | + Tensor |
| 136 | + A ``Tensor`` in the same type as ``x``. |
89 | 137 |
|
90 | 138 | Examples
|
91 |
| - --------- |
| 139 | + -------- |
92 | 140 | >>> outputs = pixel_wise_softmax(network.outputs)
|
93 | 141 | >>> dice_loss = 1 - dice_coe(outputs, y_, epsilon=1e-5)
|
94 | 142 |
|
95 | 143 | References
|
96 |
| - ----------- |
97 |
| - - `tf.reverse <https://www.tensorflow.org/versions/master/api_docs/python/array_ops.html#reverse>`_ |
| 144 | + ---------- |
| 145 | + - `tf.reverse <https://www.tensorflow.org/versions/master/api_docs/python/array_ops.html#reverse>`__ |
| 146 | +
|
98 | 147 | """
|
99 |
| - with tf.name_scope(name) as scope: |
100 |
| - return tf.nn.softmax(output) |
101 |
| - ## old implementation |
102 |
| - # exp_map = tf.exp(output) |
103 |
| - # if output.get_shape().ndims == 4: # 2d image |
104 |
| - # evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, True])) |
105 |
| - # elif output.get_shape().ndims == 5: # 3d image |
106 |
| - # evidence = tf.add(exp_map, tf.reverse(exp_map, [False, False, False, False, True])) |
107 |
| - # else: |
108 |
| - # raise Exception("output parameters should be 2d or 3d image, not %s" % str(output._shape)) |
109 |
| - # return tf.div(exp_map, evidence) |
| 148 | + with tf.name_scope(name): |
| 149 | + return tf.nn.softmax(x) |
| 150 | + |
| 151 | + |
| 152 | +# Alias |
| 153 | +linear = identity |
| 154 | +lrelu = leaky_relu |
0 commit comments