Skip to content
Snippets Groups Projects
Commit 1880d96d authored by Michael Mutote's avatar Michael Mutote
Browse files

22202956 - added plots and empty neural network file. Narender to fill up

parent 1df7f2ca
No related branches found
No related tags found
No related merge requests found
import numpy as np import numpy as np
import Training_data import Training_data
import matplotlib.pyplot as plt
rng = np.random.default_rng(123) rng = np.random.default_rng(123)
TEACHDATA = 9999 TEACHDATA = 99999
TESTDATA = 999 TESTDATA = 999
T_NUMBER = 6 # Number to be detected 0-6 T_NUMBER = 6 # Number to be detected 0-6
...@@ -31,6 +32,14 @@ class Neuron: ...@@ -31,6 +32,14 @@ class Neuron:
self.input_count = input_count self.input_count = input_count
self.activation = activation self.activation = activation
self.weights = rng.random(input_count + 1) self.weights = rng.random(input_count + 1)
self.errors = []
def plot_errors(self, titel):
plt.plot(self.errors)
plt.xlabel('Iteration')
plt.ylabel('Error')
plt.title(titel)
plt.show()
def test(self, data): def test(self, data):
ix = np.insert(data.ravel(), 0, 1) ix = np.insert(data.ravel(), 0, 1)
...@@ -49,12 +58,13 @@ class ThresholdPerceptron(Neuron): ...@@ -49,12 +58,13 @@ class ThresholdPerceptron(Neuron):
ix = np.insert(teach_data[j][i].ravel(), 0, 1) ix = np.insert(teach_data[j][i].ravel(), 0, 1)
RI = self.activation(ix.dot(self.weights)) RI = self.activation(ix.dot(self.weights))
if RI != T: if RI != T:
delta = ETA * \ err = T - self.activation(ix.dot(self.weights))
(T - self.activation(ix.dot(self.weights))) * ix delta = ETA * err * ix
self.weights = self.weights + delta self.weights = self.weights + delta
if np.linalg.norm(old_weights - self.weights) == 0.00: self.errors.append(abs(err))
return self.weights # if np.linalg.norm(old_weights - self.weights) == 0.00:
return self.weights # return
return
class SGDPerceptron(Neuron): class SGDPerceptron(Neuron):
...@@ -65,16 +75,17 @@ class SGDPerceptron(Neuron): ...@@ -65,16 +75,17 @@ class SGDPerceptron(Neuron):
for i in range(TEACHDATA): for i in range(TEACHDATA):
old_weights = np.copy(self.weights) old_weights = np.copy(self.weights)
delta = [0 for _ in range(len(old_weights))] delta = [0 for _ in range(len(old_weights))]
for j in rng.choice(rng.permutation(len(teach_data)), 3): for j in rng.choice(rng.permutation(len(teach_data)), 5):
T = (j == T_NUMBER) T = (j == T_NUMBER)
ix = np.insert(teach_data[j][i].ravel(), 0, 1) ix = np.insert(teach_data[j][i].ravel(), 0, 1)
z = ix.dot(self.weights) RI = self.activation(ix.dot(self.weights))
RI = self.activation(z) err = T - RI
delta = ETA * (T - RI) * RI * (1 - RI) * ix delta = ETA * err * RI * (1 - RI) * ix
self.errors.append(abs(err))
self.weights += delta self.weights += delta
# if np.linalg.norm(old_weights - self.weights) == 0.00: if np.linalg.norm(old_weights - self.weights) == 0.00:
# return self.weights return
return self.weights return
class LinearPerceptron(Neuron): class LinearPerceptron(Neuron):
...@@ -88,8 +99,10 @@ class LinearPerceptron(Neuron): ...@@ -88,8 +99,10 @@ class LinearPerceptron(Neuron):
for j in rng.permutation(len(teach_data)): for j in rng.permutation(len(teach_data)):
T = (j == T_NUMBER) T = (j == T_NUMBER)
ix = np.insert(teach_data[j][i].ravel(), 0, 1) ix = np.insert(teach_data[j][i].ravel(), 0, 1)
delta += ETA * (T - self.activation(ix.dot(self.weights))) * ix err = T - self.activation(ix.dot(self.weights))
delta += ETA * err * ix
self.errors.append(abs(err))
self.weights = self.weights + delta self.weights = self.weights + delta
# if np.linalg.norm(old_weights - self.weights) == 0.00: if np.linalg.norm(old_weights - self.weights) == 0.00:
# return self.weights return
return self.weights return
# README
# Threshold perceptron stagnates although not very accurate so the line
# for ending the training on the threshold early was commented out.
# Plot was added but commented out as the plots that happen too frequently cause an error.
import numpy as np import numpy as np
from prettytable import PrettyTable from prettytable import PrettyTable
import Perceptrons import Perceptrons
...@@ -10,7 +16,7 @@ trial_data = Perceptrons.test_data ...@@ -10,7 +16,7 @@ trial_data = Perceptrons.test_data
REPETION = 1 REPETION = 1
def run_test(neuron, learning_rate): def run_test(neuron, learning_rate, titel):
global table global table
table = PrettyTable() table = PrettyTable()
table.field_names = ["ETA", "0", "1", "2", "3", "4", "5", "6"] table.field_names = ["ETA", "0", "1", "2", "3", "4", "5", "6"]
...@@ -22,18 +28,19 @@ def run_test(neuron, learning_rate): ...@@ -22,18 +28,19 @@ def run_test(neuron, learning_rate):
res[i] = res[i] + round(abs(neuron.test(trial_array))) res[i] = res[i] + round(abs(neuron.test(trial_array)))
res = ["{:5d}".format(r) for r in res] res = ["{:5d}".format(r) for r in res]
table.add_row([ETA] + res) table.add_row([ETA] + res)
# neuron.plot_errors(titel + f"{ETA}")
print(table) print(table)
return neuron return neuron
for i in range(7): for i in range(6):
Perceptrons.T_NUMBER = i Perceptrons.T_NUMBER = i
E = np.array([0.05, 0.1, 0.2, 0.4, 0.75, 1, 2, 5]) E = np.array([0.05, 0.1, 0.2, 0.4, 0.75, 1, 2, 5])
print("Threshold Perceptron is looking for: ", Perceptrons.T_NUMBER) print("Threshold Perceptron is looking for: ", Perceptrons.T_NUMBER)
run_test(Perceptrons.ThresholdPerceptron(20), E/4) run_test(Perceptrons.ThresholdPerceptron(20), E/4, f"Threshold Perceptron digit {i} " )
print("Linear Perceptron is looking for: ", Perceptrons.T_NUMBER) print("Linear Perceptron is looking for: ", Perceptrons.T_NUMBER)
run_test(Perceptrons.LinearPerceptron(20), E / 160) run_test(Perceptrons.LinearPerceptron(20), E / 160, f"Linear Perceptron digit {i} ")
print("Sigmoid Gradient Descent Perceptron is looking for: ", Perceptrons.T_NUMBER) print("Sigmoid Gradient Descent Perceptron is looking for: ", Perceptrons.T_NUMBER)
x = run_test(Perceptrons.SGDPerceptron(20), E) x = run_test(Perceptrons.SGDPerceptron(20), E, f"SGD Perceptron digit {i} ")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment