1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
|
import Neuronet
import System.Random(randomRIO)
import Numeric.LinearAlgebra
import Data.List
import Data.List.Split
import Data.Tuple.Extra
import System.Random.Shuffle
import qualified Data.ByteString as BS
import System.IO
-- a single data-sample with input and expected output
type Sample = (Vector Double,Vector Double)
-- a list of samples
type Samples =[Sample]
-- split samples into random batches of n elements
batches :: Int -> Samples -> IO [Samples]
batches n x = do s<-shuffleM x
return . filter ((==n).length) $ chunksOf n s
-- get multiple epochs, with the given number of samples each.
epochs :: Int -> Int -> Samples -> IO [[Samples]]
epochs 0 _ _ = return []
epochs e n s = (:) <$> batches n s <*> epochs (e-1) n s
-- train for multiple epochs
training :: Double -> Neuronet -> [[Samples]] -> Neuronet
training r net s = foldl' f net (concat s)
where f a v = trainBatch r a (fst.unzip $ v) (snd.unzip $ v)
-- test with given samples and return number of correct answers
testing :: (Vector Double -> Vector Double -> Bool) -> Neuronet -> Samples -> Int
testing f net s = length . filter id $ map (\(x,y)->f y (asknet net x)) s
-- finally some learning and testing with MNIST
-- MNIST files from http://yann.lecun.com/exdb/mnist/
main :: IO ()
main = mainMNIST
-- create Samples given two MNIST files for images and labels
read_samples :: FilePath -> FilePath -> IO Samples
read_samples f1 f2 = do
h1 <- openFile f1 ReadMode
h2 <- openFile f2 ReadMode
xs <- BS.hGetContents h1 >>= return . map (fromList.map ((/255).fromIntegral)) .
chunksOf (28*28) . BS.unpack.BS.drop 16
ys <- BS.hGetContents h2 >>= return. map (fromList.val.fromIntegral) .
BS.unpack . BS.drop 8
return $ zip xs ys
where zrs= take 9 $ repeat 0
val x= take x zrs ++ [1] ++ drop x zrs
-- MNIST main function
mainMNIST :: IO ()
mainMNIST =do
let ep = 1 -- number of epochs
let mbs = 10 -- mini-batch size
let lr = 3 -- learning rate
nt <- neuronet [28*28,30,10]
smpl_train <- read_samples "train-images-idx3-ubyte" "train-labels-idx1-ubyte"
smpl_test <- read_samples "t10k-images-idx3-ubyte" "t10k-labels-idx1-ubyte"
tr <- epochs ep mbs smpl_train >>= return . training (lr/fromIntegral mbs) nt
let passed = testing chk tr smpl_test
print $ show passed ++ "/10000 (" ++ show (fromIntegral passed/100)++ "%)"
where chk y1 y2 = val y1 == val y2
val x=snd . last . sort $ zip (toList x) [0..9]
-- just a quick and simple network created manually, used for experimenting
mainMANUAL :: IO ()
mainMANUAL = do
let nt =[ ((2><2)[0.2,0.3,0.4,0.5],fromList[0.6,-0.6]) -- L1
,((2><2)[-0.5,-0.4,-0.3,-0.2],fromList[0.4,0.5]) -- L2
,((1><2)[0.25,0.35],fromList[0.9]) -- L3
]
print nt
print $ wghtact nt $ fromList [0.8,0.9]
print $ backprop nt (fromList [0.8,0.9]) (fromList [1])
print $ train 0.3 nt (fromList [0.8,0.9]) (fromList [1])
print $ trainBatch 0.15 nt [fromList [0.8,0.9],fromList [0.8,0.9]] [fromList [1],fromList [1]]
|