This commit is contained in:
monoid 2021-02-13 13:20:59 +09:00
parent e7e9241d23
commit ac5556f182
2 changed files with 39 additions and 0 deletions

View File

@ -177,7 +177,36 @@ def relu(v):
relu_f = lambda x: np.max([x,0])
relu_diff = lambda x: 1 if x > 0 else 0
return FunctionOp(relu_f,relu_diff,"Relu",v)
#row vector
def softmaxHelp(i):
e = np.exp(i)
sumofe = np.sum(e,axis=e.ndim - 1)
sumofe = sumofe.reshape(*sumofe.shape,1)
return e / sumofe
class SoftmaxWithNegativeLogLikelihood(OpTree):
#row vector
def __init__(self, i, y):
super().__init__()
self.i = i
self.s = softmaxHelp(i)
self.v = -y*self.s
def __str__(self):
return f"SoftmaxWithNegativeLogLikelihoodOp"
def mermaid_graph(self,writer):
self.i.mermaid_graph(writer)
writer.write(f'{id(self.i)}-->{id(self)}[SoftmaxWithNegativeLogLikelihoodOp]\n')
def numpy(self):
return self.v
def softmax_numpy(self):
return self.s
def backprop(self,seed):
self.i.backprop(seed * (self.s-y))
class Variable(OpTree):
def __init__(self,x):
super().__init__()

10
p4.py Normal file
View File

@ -0,0 +1,10 @@
from sklearn import datasets
import numpy as np
X, y = datasets.fetch_openml('mnist_784', version=1, return_X_y=True, cache=True, as_frame= False)
print(X,y)
gen:np.random.Generator = np.random.default_rng()
input_var = Variable(X)
weight = Variable(gen.normal(100,784))
bias = Variable(np.array([1]))