summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMiguel <m.i@gmx.at>2019-03-23 13:15:09 +0100
committerMiguel <m.i@gmx.at>2019-03-23 13:15:09 +0100
commit1afb966ff3f995b4ac08b9ad30a77caee85721fd (patch)
tree93f968fb0868ed652d45dd2d1274c0cd15885a45
parent8281304e3a7bea0cb1678f899e371f8d4776f34f (diff)
more cleaning
-rw-r--r--mnist/Neuronet.hs20
-rw-r--r--mnist/main.hs42
2 files changed, 23 insertions, 39 deletions
diff --git a/mnist/Neuronet.hs b/mnist/Neuronet.hs
index 6c3ea32..8a77622 100644
--- a/mnist/Neuronet.hs
+++ b/mnist/Neuronet.hs
@@ -1,12 +1,8 @@
module Neuronet
( Neuronet -- the neuronet
- ,neuronet -- initalize neuronet
- ,train -- train with one sample
- ,trainBatch -- train with batch
+ ,newnet -- initalize neuronet
+ ,train -- train with batch
,asknet -- ask the neuroal net
-
- ,wghtact
- ,backprop
)where
import Data.List
@@ -23,8 +19,8 @@ type Neuronet = [Layer]
-- | Initialize a fresh neuronal network given the number of neurons on
-- each layer, as a list. Weights and biases are initialized randomly
-- using gaussian distribution with mean 0 and standard deviation 1.
-neuronet :: [Int] -> IO Neuronet
-neuronet l = mapM nl $ zip l (tail l)
+newnet :: [Int] -> IO Neuronet
+newnet l = mapM nl $ zip l (tail l)
where nl (i,l) = (,) <$> randn l i <*>
(randn 1 l >>= return.fromList.head.toLists)
@@ -62,13 +58,9 @@ sigmoid' x = sigmoid x * (1-sigmoid x)
cost_derivative :: Vector Double -> Vector Double -> Vector Double
cost_derivative a y = a-y
--- | Train on one single sample
-train :: Double -> Neuronet -> Vector Double -> Vector Double -> Neuronet
-train r net x y = zipWith (upd r) net (backprop net x y)
-
-- | Train on a batch of samples
-trainBatch :: Double -> Neuronet -> [Vector Double] -> [Vector Double] -> Neuronet
-trainBatch r net xs ys = zipWith (upd r) net bp
+train :: Double -> Neuronet -> [Vector Double] -> [Vector Double] -> Neuronet
+train r net xs ys = zipWith (upd r) net bp
where bp = foldl1' fc $ map (uncurry $ backprop net) (zip xs ys)
fc v a = zipWith ff v a
ff (a,b) (c,d) = (a+c,b+d)
diff --git a/mnist/main.hs b/mnist/main.hs
index 02ba6f7..d1b5471 100644
--- a/mnist/main.hs
+++ b/mnist/main.hs
@@ -8,6 +8,7 @@ import Data.Tuple.Extra
import System.Random.Shuffle
import qualified Data.ByteString as BS
import System.IO
+import Control.DeepSeq
-- a single data-sample with input and expected output
type Sample = (Vector Double,Vector Double)
@@ -29,7 +30,7 @@ epochs e n s = (:) <$> batches n s <*> epochs (e-1) n s
training :: Bool -> (Neuronet->String) -> Double -> Neuronet -> [[Samples]] -> IO Neuronet
training tst tstf r net s = foldlM f net (zip s [1..])
where f nt (v,i) = do putStr $ "Epoch "++ show i ++ "...."
- let n = foldl' (\n x->trainBatch r n (fst.unzip $ x) (snd.unzip $ x)) nt v
+ let n = foldl' (\n x->train r n (fst.unzip $ x) (snd.unzip $ x)) nt v
putStrLn $ tstf n
return n
@@ -37,11 +38,6 @@ training tst tstf r net s = foldlM f net (zip s [1..])
testing :: (Vector Double -> Vector Double -> Bool) -> Neuronet -> Samples -> Int
testing f net s = length . filter id $ map (\(x,y)->f y (asknet net x)) s
--- finally some learning and testing with MNIST
--- MNIST files from http://yann.lecun.com/exdb/mnist/
-main :: IO ()
-main = mainMNIST
-
-- create Samples given two MNIST files for images and labels
read_samples :: FilePath -> FilePath -> IO Samples
read_samples f1 f2 = do
@@ -55,29 +51,25 @@ read_samples f1 f2 = do
where zrs= take 9 $ repeat 0
val x= take x zrs ++ [1] ++ drop x zrs
--- MNIST main function
-mainMNIST :: IO ()
-mainMNIST =do
-
- let ep = 10 -- number of epochs
- let mbs = 10 -- mini-batch size
- let lr = 3 -- learning rate
- let cap = 999999 -- cap number of training samples
+-- finally some learning and testing with MNIST
+-- MNIST files from http://yann.lecun.com/exdb/mnist/
+main = do s <- read_samples "train-images-idx3-ubyte" "train-labels-idx1-ubyte"
+ print $ s `deepseq` length s
- putStrLn "= Init ="
- str "Initializing Net"
- nt <- neuronet [28*28,30,10]
- done
+main2 :: IO ()
+main2 = do
- str "Reading Samples"
+ let ep = 10 -- number of epochs
+ let mbs = 10 -- mini-batch size
+ let lr = 3 -- learning rate
+ let lay = [28*28,30,10] -- number of neurons by layer
+ let cap = 999999 -- cap number of training samples
+
+ nt <- newnet lay
smpl_train <- take cap <$> read_samples "train-images-idx3-ubyte" "train-labels-idx1-ubyte"
smpl_test <- read_samples "t10k-images-idx3-ubyte" "t10k-labels-idx1-ubyte"
- done
-
- putStrLn "= Training ="
- tr <- epochs ep mbs smpl_train >>= training True (tst smpl_test) (lr/fromIntegral mbs) nt
-
- putStrLn "= THE END ="
+ tr <- epochs ep mbs smpl_train >>= training True (tst smpl_test) (lr/fromIntegral mbs) nt
+ putStrLn "end"
where chk y1 y2 = val y1 == val y2
val x=snd . last . sort $ zip (toList x) [0..9]