softmax
This commit is contained in:
parent
e7e9241d23
commit
ac5556f182
29
layer.py
29
layer.py
@ -177,6 +177,35 @@ def relu(v):
|
|||||||
relu_f = lambda x: np.max([x,0])
|
relu_f = lambda x: np.max([x,0])
|
||||||
relu_diff = lambda x: 1 if x > 0 else 0
|
relu_diff = lambda x: 1 if x > 0 else 0
|
||||||
return FunctionOp(relu_f,relu_diff,"Relu",v)
|
return FunctionOp(relu_f,relu_diff,"Relu",v)
|
||||||
|
#row vector
|
||||||
|
def softmaxHelp(i):
|
||||||
|
e = np.exp(i)
|
||||||
|
sumofe = np.sum(e,axis=e.ndim - 1)
|
||||||
|
sumofe = sumofe.reshape(*sumofe.shape,1)
|
||||||
|
return e / sumofe
|
||||||
|
class SoftmaxWithNegativeLogLikelihood(OpTree):
|
||||||
|
#row vector
|
||||||
|
def __init__(self, i, y):
|
||||||
|
super().__init__()
|
||||||
|
self.i = i
|
||||||
|
self.s = softmaxHelp(i)
|
||||||
|
self.v = -y*self.s
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"SoftmaxWithNegativeLogLikelihoodOp"
|
||||||
|
|
||||||
|
def mermaid_graph(self,writer):
|
||||||
|
self.i.mermaid_graph(writer)
|
||||||
|
writer.write(f'{id(self.i)}-->{id(self)}[SoftmaxWithNegativeLogLikelihoodOp]\n')
|
||||||
|
|
||||||
|
def numpy(self):
|
||||||
|
return self.v
|
||||||
|
|
||||||
|
def softmax_numpy(self):
|
||||||
|
return self.s
|
||||||
|
|
||||||
|
def backprop(self,seed):
|
||||||
|
self.i.backprop(seed * (self.s-y))
|
||||||
|
|
||||||
class Variable(OpTree):
|
class Variable(OpTree):
|
||||||
def __init__(self,x):
|
def __init__(self,x):
|
||||||
|
10
p4.py
Normal file
10
p4.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from sklearn import datasets
|
||||||
|
import numpy as np
|
||||||
|
X, y = datasets.fetch_openml('mnist_784', version=1, return_X_y=True, cache=True, as_frame= False)
|
||||||
|
print(X,y)
|
||||||
|
|
||||||
|
gen:np.random.Generator = np.random.default_rng()
|
||||||
|
|
||||||
|
input_var = Variable(X)
|
||||||
|
weight = Variable(gen.normal(100,784))
|
||||||
|
bias = Variable(np.array([1]))
|
Loading…
Reference in New Issue
Block a user