Skip to content
Snippets Groups Projects
Commit 77bfb9d5 authored by Michael Mutote's avatar Michael Mutote
Browse files

22202956 - optimised training supervised training

parent d1c47221
No related branches found
No related tags found
No related merge requests found
......@@ -2,10 +2,10 @@ import numpy as np
import Training_data
rng = np.random.default_rng(123)
TEACHDATA = 10000
TEACHDATA = 100000
TESTDATA = 1000
# ETA = 0.5
T_NUMBER = 0
T_NUMBER = 1 # Number to be detected 0-6
def sigmoid(val):
......@@ -13,6 +13,8 @@ def sigmoid(val):
def linear_act(val):
# there is the option to make it linear in the valid range and make it constant otherwise
return val
......@@ -20,34 +22,79 @@ def threshold(val):
return 1 if val > 0 else 0
class Perceptron:
def __init__(self, input_count, activation=threshold):
class Neuron:
test_data = Training_data.make_testset(TESTDATA)
teach_data = Training_data.make_testset(TEACHDATA)
def __init__(self, input_count, activation):
self.input_count = input_count
self.activation = activation
self.weights = rng.random(input_count + 1)
def train_thres(self, ETA):
teach_data = Training_data.make_testset(TEACHDATA)
def test(self):
res = [0 for _ in range(len(Neuron.test_data))]
for number in range(len(Neuron.test_data)):
for sample in Neuron.test_data[number]:
ix = np.insert(sample.ravel(), 0, 1)
res[number] = res[number] + (self.activation(ix.dot(self.weights)))
return res
class ThresholdPerceptron(Neuron):
def __init__(self, input_count, activation=threshold):
super().__init__(input_count, activation)
def train(self, ETA):
for i in range(TEACHDATA):
old_weights = np.copy(self.weights)
for j in rng.permutation(len(teach_data)):
for j in rng.permutation(len(Neuron.teach_data)):
T = 1 if j == T_NUMBER else 0
ix = np.insert(teach_data[j][i].ravel(), 0, 1)
ix = np.insert(Neuron.teach_data[j][i].ravel(), 0, 1)
RI = self.activation(ix.dot(self.weights))
if RI != T:
delta = ETA * \
(T - self.activation(ix.dot(self.weights))) * ix
(T - self.activation(ix.dot(self.weights))) * ix
self.weights = self.weights + delta
if np.linalg.norm(old_weights - self.weights) == 0.00:
return self.weights
return self.weights
def test(self):
test_data = Training_data.make_testset(TESTDATA)
res = [0 for _ in range(len(test_data))]
for number in range(len(test_data)):
for sample in test_data[number]:
ix = np.insert(sample.ravel(), 0, 1)
res[number] = res[number] + \
(self.activation(ix.dot(self.weights)))
return res
class SGDPerceptron(Neuron):
def __init__(self, input_count, activation=sigmoid):
super().__init__(input_count, activation)
def train(self, ETA):
for i in range(TEACHDATA):
old_weights = np.copy(self.weights)
delta = [0 for _ in range(len(old_weights))]
for j in rng.choice(rng.permutation(len(Neuron.teach_data)),3):
T = (j == T_NUMBER)
ix = np.insert(Neuron.teach_data[j][i].ravel(), 0, 1)
z = ix.dot(self.weights)
RI = self.activation(z)
delta = ETA * (T - RI) * RI * (1 - RI) * ix
self.weights += delta
return self.weights
class LinearPerceptron(Neuron):
def __init__(self, input_count, activation=linear_act):
super().__init__(input_count, activation)
def train(self, ETA):
for i in range(TEACHDATA):
old_weights = np.copy(self.weights)
delta = [0 for _ in range(len(old_weights))]
for j in rng.permutation(len(Neuron.teach_data)):
T = (j == T_NUMBER)
ix = np.insert(Neuron.teach_data[j][i].ravel(), 0, 1)
delta += ETA * (T - self.activation(ix.dot(self.weights))) * ix
self.weights = self.weights + delta
if np.linalg.norm(old_weights - self.weights) == 0.00:
return self.weights
return self.weights
import numpy as np
import Perceptrons
import PerceptronsSGD
def test_function(ETA):
input_count = 20
def test_function(ETA, p):
results = []
p = Perceptrons.Perceptron(input_count)
p.train_thres(ETA)
output = p.test()
output = np.round(p.test())
results.append((ETA, output))
return results
for ETA in ([0.05, 0.1, 0.2, 0.4, 0.75, 1, 2, 5]): # the list of values for ETA
for i in range(1):
res = test_function(ETA)
print(res) # print the results list
w = Perceptrons.ThresholdPerceptron(20)
w.train(ETA)
x = Perceptrons.LinearPerceptron(20)
x.train(ETA/200)
y = Perceptrons.SGDPerceptron(20)
y.train(ETA)
for i in range(10):
res = test_function(ETA, w)
print("Thres", res) # print the results list
for i in range(10):
res = test_function(ETA/200, x)
print("Lin", res) # print the results list
for i in range(10):
res = test_function(ETA, y)
print("sgd", res) # print the results list
print("\n\n")
......@@ -53,6 +53,8 @@ def make_testset(set_size):
if set_size > 1:
data[number].append(value)
for _ in range(set_size):
# scale is the standard deviation affecting the "spread" plays a role int eh results
# new_digit = ideal[number] + rng.normal(loc=0, scale=0.3, size=(5, 4))
new_digit = ideal[number] + rng.normal(loc=0, scale=0.1, size=(5, 4))
data[number].append(new_digit)
return data
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment