-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathMLP.py
49 lines (45 loc) · 1.54 KB
/
MLP.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
from gen_weights import gen_weights
from forward_prop import forward_prop
from backward_prop import backward_prop
"""MLP Pandas"""
class MLP:
def __init__(self, X, y, hidden_layers):
# Define Hyperparameters
"""
These dictate the structure of the ANN.
Most are auto-generated but they may be tuned here
as well.
"""
self.X = X
self.y = y
self.input_nodes = X.shape[1]
self.hidden_neurons = X.shape[1] + 1
self.hidden_layers = hidden_layers
self.output_neuron = 1
self.learning_rate = 1
self.iterations = 500
self.weights = {}
self.check_hidden_input()
gen_weights(self)
def check_hidden_input(self):
# Check hidden input for errors
"""
If the number of hidden layers provided is invalid,
default to 1.
"""
if isinstance(self.hidden_neurons, int) is False:
print("Valid integer not provided. Defaulting to 1")
self.hidden_layers = 1
elif isinstance(self.hidden_neurons,
int) is True and self.hidden_neurons < 1:
print("Valid integer not provided. Defaulting to 1")
self.hidden_neurons = 1
def train_mlp(self):
# The entire process consolidated in one loop
"""
Alternate forward and backward propatation
and update weights/synapses in the process.
"""
for i in range(self.iterations):
forward_prop(self)
backward_prop(self)