-
Notifications
You must be signed in to change notification settings - Fork 0
/
function.py
45 lines (39 loc) · 1.32 KB
/
function.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import math
import numpy as np
def soft_max(inputs):
"""
sigmoid function to keep all outputs values between 0 and 1
:param inputs: numpy array of integers - size 1 x n
:return numpy array of integers - size 1 x n, after adjusting weights
"""
output = []
for num in inputs:
outNum = 1 / (1 + math.e ** (-1 * num))
output.append(outNum)
return np.array(output)
def normalize(inputs):
"""
Normalize the output of the final layer.
Makes it so that the sum of the outputs adds up to 1
:param inputs: numpy array of integers - size 1 x n
:return: numpy array of integers - size 1 x n, after normalizing
"""
output = []
base = sum(inputs)
for num in inputs:
output.append(num / base)
return output
def mean_squared_error(outputs, target):
"""
function to calculate the mean squared error of our predicted array value to the target values.
:param outputs: np array of normalized values from neural network
:param target: np array of target values with correct index marked with a 1
:return:
"""
if len(outputs) != len(target):
raise IndexError("arrays must be the same length!")
coeff = 1 / len(outputs)
outSum = 0
for fi, yi in zip(target, outputs):
outSum += (fi - yi)**2
return coeff * outSum