summaryrefslogtreecommitdiff
path: root/mnist/Neuronet.hs
diff options
context:
space:
mode:
authorMiguel <m.i@gmx.at>2019-03-22 23:03:41 +0100
committerMiguel <m.i@gmx.at>2019-03-22 23:03:41 +0100
commit9c7b00c58ae0b4ece9f46a7226b59248b8b9eba6 (patch)
tree330d6b5da1d87318628e2ca54242fb07382ee1ae /mnist/Neuronet.hs
parente1826a4c5975260c784d3f6c43fd53a7092d64e4 (diff)
getting nicer
Diffstat (limited to 'mnist/Neuronet.hs')
-rw-r--r--mnist/Neuronet.hs5
1 files changed, 2 insertions, 3 deletions
diff --git a/mnist/Neuronet.hs b/mnist/Neuronet.hs
index e3344c7..ece288a 100644
--- a/mnist/Neuronet.hs
+++ b/mnist/Neuronet.hs
@@ -24,7 +24,7 @@ module Neuronet
,backprop
)where
-import Numeric.LinearAlgebra (Matrix,Vector,tr,scale,cmap,(#>),randn,toList,fromList,toLists,fromLists,Container)
+import Numeric.LinearAlgebra (Matrix,Vector,tr,scale,cmap,(#>),randn,toList,fromList,toLists,fromLists,outer)
import Data.List
-- | A layer of our network consists of a weight matrix with input weights
@@ -58,10 +58,9 @@ asknet net x = snd . last $ wghtact net x
-- split in the weight and bias partial derivatives respectively).
-- Keep the required assumptions about the cost function in mind!
backprop :: Neuronet -> Vector Double -> Vector Double -> [(Matrix Double,Vector Double)]
-backprop net x y = zipWith (\a e->(wm a e,e)) (x:map snd wa) (go $ zip ws wa)
+backprop net x y = zipWith (\a e->(outer e a,e)) (x:map snd wa) (go $ zip ws wa)
where ws = (++[fromLists []]) . tail . map fst $ net
wa = wghtact net x
- wm a e = fromLists $ map (\e->map (*e) (toList a)) (toList e)
go [(w,(z,a))] = [cost_derivative a y * cmap sigmoid' z]
go ((w,(z,a)):lx) =let r@(e:_)=go lx in tr w #> e * cmap sigmoid' z:r