2
2
import numpy as np
3
3
import matplotlib .pyplot as plt
4
4
5
+ # COSC 525 Project 1: Owen Queen and Sai Thatigotla
6
+
5
7
class Neuron :
6
8
def __init__ (self , num_inputs , w_0 , activation = 'logistic' , \
7
9
learning_rate = 0.01 ):
@@ -99,7 +101,6 @@ def calcpartialderivative(self, l1_deltas_x_w):
99
101
100
102
# Stores the vector of partial derivatives internally
101
103
self .dE_dw = dE_dw
102
- #print(self.dE_dw)
103
104
104
105
# Return vector of delta * w
105
106
# DON'T include bias
@@ -268,7 +269,6 @@ def __init__(self,numOfLayers,numOfNeurons, inputSize, activation='logistic', lo
268
269
269
270
#set loss function
270
271
if loss == 'binary' :
271
- #self.loss = lambda y, y_hat: np.sum(-(y*np.log(y_hat) + (1-y)*np.log(1-y_hat)))/self.n_n
272
272
self .loss = lambda y_hat , y : np .sum ([- (yt [0 ]* np .log (yh ) + (1 - yt [0 ])* np .log (1 - yh )) for yh , yt in zip (y_hat , y )])/ len (y )
273
273
self .loss_deriv = lambda y_hat , y : - (y / y_hat ) + ((1 - y )/ (1 - y_hat ))
274
274
elif loss == 'square' :
@@ -448,9 +448,18 @@ def plot_one_loss_curve(losses, ep = None, title = 'Loss per Epochs'):
448
448
nn .train (x , y )
449
449
print ('Calculated Outputs (after 1 epoch) =' , nn .calculate (x ))
450
450
print ("Weights in Network (please refer to in-class example for each weight's label):" )
451
- # Iterate over neuron 1:
452
451
453
- # Iterate over neuron 2:
452
+ print ('Weight \t Value' )
453
+ # Iterate over neuron h1, h2:
454
+ count = 1
455
+ bcount = 1
456
+ for i in [0 , 1 ]:
457
+ for j in [0 , 1 ]:
458
+ print ('w{} \t {}' .format (count , nn .network [i ].neurons [j ].w [0 ]))
459
+ print ('w{} \t {}' .format (count + 1 , nn .network [i ].neurons [j ].w [1 ]))
460
+ print ('b{} \t {}' .format (bcount , nn .network [i ].neurons [j ].w [2 ]))
461
+ bcount += 1
462
+ count += 2
454
463
455
464
print ('Loss (MSE) after 1 Epoch =' , nn .calculateloss (nn .calculate (x ), y ))
456
465
@@ -557,38 +566,4 @@ def plot_one_loss_curve(losses, ep = None, title = 'Loss per Epochs'):
557
566
print ('' )
558
567
559
568
# Plot the loss curve
560
- plot_one_loss_curve (net_loss , ep = ep )
561
- # ------------------------
562
- # Neural net with 3 layers
563
- nn = NeuralNetwork (3 , [2 , 2 , 1 ], 2 , lr = 0.5 , loss = 'binary' )
564
-
565
- net_loss = []
566
- first = True
567
-
568
- # Run for 100 epochs
569
- for i in range (0 , 10000 ):
570
- for j in range (0 , len (x )):
571
- nn .train (x [j ], y [j ])
572
-
573
- y_hat = [nn .calculate (xi ) for xi in x ]
574
-
575
- net_loss .append (nn .calculateloss (np .array (y_hat ), y ))
576
-
577
- # Classify the predictions based on definition of sigmoid
578
- y_hat_preds = np .array ([0 if yh < 0.5 else 1 for yh in y_hat ])
579
-
580
- # Stop epochs early if predictions are correct:
581
- if (check_logical_predictions (y_hat_preds , [0 , 1 , 1 , 0 ]) and first ):
582
- first = False
583
- epn = i
584
-
585
- # Printing the final predictions:
586
- print ('Running XOR Logic Data (Network with 3 Hidden Layers)' )
587
- print ('Input \t Prediction \t Ground Truth' )
588
- for i in range (len (y_hat )):
589
- print ('{} \t {:.6f} \t {}' .format (x [i ], y_hat [i ][0 ], y [i ][0 ]))
590
- print ('Epoch of Convergence' , epn )
591
- print ('Final Loss (Binary Cross Entropy) =' , net_loss [- 1 ])
592
-
593
- # Plot the loss curve
594
- plot_one_loss_curve (net_loss , ep = epn )
569
+ plot_one_loss_curve (net_loss , ep = ep , title = '1 Hidden Layer Loss vs. Epoch (XOR)' )
0 commit comments