From ac5556f18297b1c490f8c6b1b9134e5e6565bb2f Mon Sep 17 00:00:00 2001 From: monoid Date: Sat, 13 Feb 2021 13:20:59 +0900 Subject: [PATCH] softmax --- layer.py | 29 +++++++++++++++++++++++++++++ p4.py | 10 ++++++++++ 2 files changed, 39 insertions(+) create mode 100644 p4.py diff --git a/layer.py b/layer.py index 94235f2..70c1f85 100644 --- a/layer.py +++ b/layer.py @@ -177,7 +177,36 @@ def relu(v): relu_f = lambda x: np.max([x,0]) relu_diff = lambda x: 1 if x > 0 else 0 return FunctionOp(relu_f,relu_diff,"Relu",v) +#row vector +def softmaxHelp(i): + e = np.exp(i) + sumofe = np.sum(e,axis=e.ndim - 1) + sumofe = sumofe.reshape(*sumofe.shape,1) + return e / sumofe +class SoftmaxWithNegativeLogLikelihood(OpTree): + #row vector + def __init__(self, i, y): + super().__init__() + self.i = i + self.s = softmaxHelp(i) + self.v = -y*self.s + def __str__(self): + return f"SoftmaxWithNegativeLogLikelihoodOp" + + def mermaid_graph(self,writer): + self.i.mermaid_graph(writer) + writer.write(f'{id(self.i)}-->{id(self)}[SoftmaxWithNegativeLogLikelihoodOp]\n') + + def numpy(self): + return self.v + + def softmax_numpy(self): + return self.s + + def backprop(self,seed): + self.i.backprop(seed * (self.s-y)) + class Variable(OpTree): def __init__(self,x): super().__init__() diff --git a/p4.py b/p4.py new file mode 100644 index 0000000..0935fe8 --- /dev/null +++ b/p4.py @@ -0,0 +1,10 @@ +from sklearn import datasets +import numpy as np +X, y = datasets.fetch_openml('mnist_784', version=1, return_X_y=True, cache=True, as_frame= False) +print(X,y) + +gen:np.random.Generator = np.random.default_rng() + +input_var = Variable(X) +weight = Variable(gen.normal(100,784)) +bias = Variable(np.array([1])) \ No newline at end of file