Skip to content

Commit e7353b7

Browse files
output layer aactivation, add fc2 in call (#358)
softmax applied during training phase to output layer and fc2 layer is unused Co-authored-by: Aymeric Damien <[email protected]>
1 parent 2cf9bfd commit e7353b7

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

tensorflow_v2/notebooks/3_NeuralNetworks/neural_network.ipynb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,12 +111,12 @@
111111
" # First fully-connected hidden layer.\n",
112112
" self.fc2 = layers.Dense(n_hidden_2, activation=tf.nn.relu)\n",
113113
" # Second fully-connecter hidden layer.\n",
114-
" self.out = layers.Dense(num_classes, activation=tf.nn.softmax)\n",
114+
" self.out = layers.Dense(num_classes)\n",
115115
"\n",
116116
" # Set forward pass.\n",
117117
" def call(self, x, is_training=False):\n",
118118
" x = self.fc1(x)\n",
119-
" x = self.fc2(x)\n",
119+
" x = self.fc2(x)\n"
120120
" x = self.out(x)\n",
121121
" if not is_training:\n",
122122
" # tf cross entropy expect logits without softmax, so only\n",

0 commit comments

Comments
 (0)