-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path16-softmax.py
More file actions
70 lines (53 loc) · 1.69 KB
/
16-softmax.py
File metadata and controls
70 lines (53 loc) · 1.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import numpy as np
import nnfs
from nnfs.datasets import spiral_data
nnfs.init()
def mul(A, B):
rows_A = len(A)
cols_A = len(A[0])
rows_B = len(B)
cols_B = len(B[0])
result = [None] * rows_A
if cols_A != rows_B:
return result
for i in range(rows_A):
result[i] = [0] * cols_B
for j in range(cols_B):
for k in range(rows_B):
result[i][j] += A[i][k] * B[k][j]
return result
def sum(A, B):
# matrix and vector
if isinstance(B, list) and len(B) > 0 and not isinstance(B[0], list) and len(A) > 0 and isinstance(A[0], list):
result = []
for a in A:
result.append(sum(a, B))
return result
result = []
for a, b in zip(A, B):
result.append(a+b)
return result
class Layer_Dense:
def __init__(self, n_inputs, n_neurons):
self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)
self.biases = [0.] * n_neurons
def forward(self, inputs):
self.output = sum(mul(inputs, self.weights), self.biases)
class Activation_ReLu:
def forward(self, inputs):
self.output = np.maximum(0, inputs)
class Activation_Softmax:
def forward(self, inputs):
exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))
prob = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = prob
X, y = spiral_data(samples=100, classes=3)
dense1 = Layer_Dense(2, 3)
activation1 = Activation_ReLu()
dense2 = Layer_Dense(3, 3)
activation2 = Activation_Softmax()
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
print(activation2.output[:5])