From 9c7b00c58ae0b4ece9f46a7226b59248b8b9eba6 Mon Sep 17 00:00:00 2001 From: Miguel Date: Fri, 22 Mar 2019 23:03:41 +0100 Subject: getting nicer --- mnist/Neuronet.hs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'mnist/Neuronet.hs') diff --git a/mnist/Neuronet.hs b/mnist/Neuronet.hs index e3344c7..ece288a 100644 --- a/mnist/Neuronet.hs +++ b/mnist/Neuronet.hs @@ -24,7 +24,7 @@ module Neuronet ,backprop )where -import Numeric.LinearAlgebra (Matrix,Vector,tr,scale,cmap,(#>),randn,toList,fromList,toLists,fromLists,Container) +import Numeric.LinearAlgebra (Matrix,Vector,tr,scale,cmap,(#>),randn,toList,fromList,toLists,fromLists,outer) import Data.List -- | A layer of our network consists of a weight matrix with input weights @@ -58,10 +58,9 @@ asknet net x = snd . last $ wghtact net x -- split in the weight and bias partial derivatives respectively). -- Keep the required assumptions about the cost function in mind! backprop :: Neuronet -> Vector Double -> Vector Double -> [(Matrix Double,Vector Double)] -backprop net x y = zipWith (\a e->(wm a e,e)) (x:map snd wa) (go $ zip ws wa) +backprop net x y = zipWith (\a e->(outer e a,e)) (x:map snd wa) (go $ zip ws wa) where ws = (++[fromLists []]) . tail . map fst $ net wa = wghtact net x - wm a e = fromLists $ map (\e->map (*e) (toList a)) (toList e) go [(w,(z,a))] = [cost_derivative a y * cmap sigmoid' z] go ((w,(z,a)):lx) =let r@(e:_)=go lx in tr w #> e * cmap sigmoid' z:r -- cgit v1.2.3