Skip to content

Commit dd68543

Browse files
committed
Final edition
1 parent 61d3dc2 commit dd68543

File tree

1 file changed

+14
-39
lines changed

1 file changed

+14
-39
lines changed

project1.py

+14-39
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
import numpy as np
33
import matplotlib.pyplot as plt
44

5+
# COSC 525 Project 1: Owen Queen and Sai Thatigotla
6+
57
class Neuron:
68
def __init__(self, num_inputs, w_0, activation = 'logistic', \
79
learning_rate = 0.01):
@@ -99,7 +101,6 @@ def calcpartialderivative(self, l1_deltas_x_w):
99101

100102
# Stores the vector of partial derivatives internally
101103
self.dE_dw = dE_dw
102-
#print(self.dE_dw)
103104

104105
# Return vector of delta * w
105106
# DON'T include bias
@@ -268,7 +269,6 @@ def __init__(self,numOfLayers,numOfNeurons, inputSize, activation='logistic', lo
268269

269270
#set loss function
270271
if loss == 'binary':
271-
#self.loss = lambda y, y_hat: np.sum(-(y*np.log(y_hat) + (1-y)*np.log(1-y_hat)))/self.n_n
272272
self.loss = lambda y_hat, y: np.sum([-(yt[0]*np.log(yh) + (1-yt[0])*np.log(1-yh)) for yh, yt in zip(y_hat, y)])/len(y)
273273
self.loss_deriv = lambda y_hat, y: -(y/y_hat) + ((1-y)/(1-y_hat))
274274
elif loss == 'square':
@@ -448,9 +448,18 @@ def plot_one_loss_curve(losses, ep = None, title = 'Loss per Epochs'):
448448
nn.train(x, y)
449449
print('Calculated Outputs (after 1 epoch) =', nn.calculate(x))
450450
print("Weights in Network (please refer to in-class example for each weight's label):")
451-
# Iterate over neuron 1:
452451

453-
# Iterate over neuron 2:
452+
print('Weight \t Value')
453+
# Iterate over neuron h1, h2:
454+
count = 1
455+
bcount = 1
456+
for i in [0, 1]:
457+
for j in [0, 1]:
458+
print('w{} \t {}'.format(count, nn.network[i].neurons[j].w[0]))
459+
print('w{} \t {}'.format(count + 1, nn.network[i].neurons[j].w[1]))
460+
print('b{} \t {}'.format(bcount, nn.network[i].neurons[j].w[2]))
461+
bcount += 1
462+
count += 2
454463

455464
print('Loss (MSE) after 1 Epoch =', nn.calculateloss(nn.calculate(x), y))
456465

@@ -557,38 +566,4 @@ def plot_one_loss_curve(losses, ep = None, title = 'Loss per Epochs'):
557566
print('')
558567

559568
# Plot the loss curve
560-
plot_one_loss_curve(net_loss, ep = ep)
561-
# ------------------------
562-
# Neural net with 3 layers
563-
nn = NeuralNetwork(3, [2, 2, 1], 2, lr = 0.5, loss = 'binary')
564-
565-
net_loss = []
566-
first = True
567-
568-
# Run for 100 epochs
569-
for i in range(0, 10000):
570-
for j in range(0, len(x)):
571-
nn.train(x[j], y[j])
572-
573-
y_hat = [nn.calculate(xi) for xi in x]
574-
575-
net_loss.append(nn.calculateloss(np.array(y_hat), y))
576-
577-
# Classify the predictions based on definition of sigmoid
578-
y_hat_preds = np.array([0 if yh < 0.5 else 1 for yh in y_hat])
579-
580-
# Stop epochs early if predictions are correct:
581-
if (check_logical_predictions(y_hat_preds, [0, 1, 1, 0]) and first):
582-
first = False
583-
epn = i
584-
585-
# Printing the final predictions:
586-
print('Running XOR Logic Data (Network with 3 Hidden Layers)')
587-
print('Input \t Prediction \t Ground Truth')
588-
for i in range(len(y_hat)):
589-
print('{} \t {:.6f} \t {}'.format(x[i], y_hat[i][0], y[i][0]))
590-
print('Epoch of Convergence', epn)
591-
print('Final Loss (Binary Cross Entropy) =', net_loss[-1])
592-
593-
# Plot the loss curve
594-
plot_one_loss_curve(net_loss, ep = epn)
569+
plot_one_loss_curve(net_loss, ep = ep, title = '1 Hidden Layer Loss vs. Epoch (XOR)')

0 commit comments

Comments
 (0)