1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
|
import Numeric.LinearAlgebra (Matrix,Vector,tr,scale,cmap,(#>),randn,toList,fromList,toLists,fromLists,Container)
import System.Random(randomRIO)
import Data.List
-- | Our neural network is simply a list of layers each consisting of
-- a weight matrix with input weights and a vector holding the bias at
-- each neuron.
type Layer = (Matrix Double,Vector Double)
type Neuronet = [Layer]
-- | Initialize a fresh neuronal network given the number of neurons on
-- each layer, as a list. Weights and biases are initialized randomly
-- using gaussian distribution with mean 0 and standard deviation 1.
neuronet :: [Int] -> IO Neuronet
neuronet l = mapM nl $ zip l (tail l)
where nl (i,l) = (,) <$> randn l i <*> (randn 1 l >>= return.fromList.head.toLists)
-- | Given the input vector calculate the `weighted inputs` and
-- `activations` for all layers of our network.
weightin :: Neuronet -> Vector Double -> [(Vector Double,Vector Double)]
weightin [] _ = []
weightin ((w,b):lx) x = (z,a):weightin lx a
where z = w #> x + b
a = cmap sigmoid z
-- | Given the input and outpout vectors calculate the gradient of our
-- cost function, utilizing backpropagation (output list by layer and
-- split in the weight and bias partial derivatives respectively).
-- Keep the required assumptions about the cost function in mind!
backprop :: Neuronet -> Vector Double -> Vector Double -> [(Matrix Double,Vector Double)]
backprop net x y = zipWith (\a e->(wm a e,e)) (x:map snd wa) (go $ zip ws wa)
where ws = (++[fromLists []]) . tail . map fst $ net
wa = weightin net x
wm a e = fromLists $ map (\e->map (*e) (toList a)) (toList e)
go [(w,(z,a))] = [cost_derivative a y * cmap sigmoid' z]
go ((w,(z,a)):lx) =let r@(e:_)=go lx in tr w #> e * cmap sigmoid' z:r
-- | Sigmoid function
sigmoid :: Double -> Double
sigmoid x = 1/(1+exp(-x))
-- | Derivative of sigmoid function
sigmoid' :: Double -> Double
sigmoid' x = sigmoid x * (1-sigmoid x)
-- | Returs vector of partial derivatives of the cost function
cost_derivative :: Vector Double -> Vector Double -> Vector Double
cost_derivative a y = a-y
-- | Train on one single sample
train :: Double -> Neuronet -> Vector Double -> Vector Double -> Neuronet
train r net x y = zipWith f net (backprop net x y)
where f :: Layer -> Layer -> Layer
f (a,b) (c,d) = (a-scale r c,b-scale r d)
-- | Train on a batch of samples
trainBatch :: Double -> Neuronet -> [Vector Double] -> [Vector Double] -> Neuronet
trainBatch = undefined
main = do
let numtrain=300000
easynet<-neuronet [20,15,10] :: IO Neuronet
samples<-filter ((<10).uncurry (+)) <$> gensamp numtrain
let testsmp=filter ((<10).uncurry (+))[(a,b)|a<-[0..10],b<-[0..10]]
let trained=foldl' training easynet samples
let res = map (\(a,b)->test trained (a,b) == a+b) testsmp
let right=length $ filter (id) res
let all = length res
print $ "trained adding up to 10 with "++ show numtrain ++" random training samples"
print $ "correct answers during testing: " ++ show right ++ "/" ++ show all ++ " (" ++ show (right*100`div`all) ++ "%)"
print "what can I add for you?"
str<-lines<$>getContents
mapM_ (addit trained) str
where zrs= take 9 $ repeat 0.01
val x= take x zrs ++ [0.99] ++ drop x zrs
getVal x=snd . last . sort $ zip x [0..9]
training net (s1,s2) = train 0.25 net (fromList $ val s1++val s2) (fromList $ val $ s1+s2)
test net (s1,s2) = getVal . toList . snd . last $ weightin net (fromList $ val s1++val s2)
gensamp :: Int -> IO [(Int,Int)]
gensamp 0 = return []
gensamp n = (\a b r->(a,b):r) <$> randomRIO (0,9) <*> randomRIO (0,9) <*> gensamp (n-1)
addit n x= do let w=words x
let a=read $ w!!0 :: Int
let b=read $ w!!1 :: Int
putStrLn $ "I try to add " ++ show a ++ " and " ++ show b
putStrLn $ "I think this is = " ++ show (test n (a,b) )
putStrLn "what two numbers do you want me to add master?"
{-
train_img="train-images-idx3-ubyte"
train_lbl="train-labels-idx1-ubyte"
sel=3333
main1 = do
h <- openFile train_img ReadMode
h2 <- openFile train_lbl ReadMode
BS.hGetContents h>>= showImg . BS.take (28*28) . BS.drop (28*28*sel) . BS.drop 16
BS.hGetContents h2>>= print. BS.unpack . BS.take 1 . BS.drop (sel) . BS.drop 8
showImg a | BS.null a = print "--"
| otherwise = do mapM_ putVal $ BS.unpack $ BS.take 28 a
putStr "\n"
showImg $ BS.drop 28 a
putVal v | v==0 = putStr " "
| v<50 = putStr "."
| v<128 = putStr "o"
| v<160 = putStr "O"
| otherwise = putStr "0"
main2 = do
-- 0 1 2 3 4 5 6 7 8 9
let m=(4><10)[ 0, 100,0, 100,0, 100,0, 100,0, 100
,0, 0, 100,100,0, 0, 100,100,0, 0
,0, 0, 0, 0, 100,100,100,100,0, 0
,0, 0, 0, 0, 0, 0, 0, 0, 100,100]
let b=vector . take 4 . repeat $ (-50)
let z= repeat 0.01
let vv x= vector $ take x z ++ [0.99] ++ take (9-x) z
let v = vv 6
let z= m #> v + b
let a= cmap sigmoid z
print m
print v
print z
print a
-}
|