chore(README): unnecessary space

This commit is contained in:
Mahdi Dibaiee 2016-10-17 01:59:48 +03:30
parent ace9b7f5f9
commit 44f2ae372a

View File

@ -4,9 +4,9 @@ A simple Machine Learning library.
## Simple neural network ## Simple neural network
```haskell ```haskell
import Numeric.Sibe import Numeric.Sibe
let a = (sigmoid, sigmoid') -- activation function let a = (sigmoid, sigmoid') -- activation function
-- random network, seed 0, values between -1 and 1, -- random network, seed 0, values between -1 and 1,
-- two inputs, two nodes in hidden layer and a single output -- two inputs, two nodes in hidden layer and a single output
rnetwork = randomNetwork 0 (-1, 1) 2 [(2, a)] (1, a) rnetwork = randomNetwork 0 (-1, 1) 2 [(2, a)] (1, a)
@ -27,19 +27,19 @@ A simple Machine Learning library.
initialCost = crossEntropy session initialCost = crossEntropy session
-- run gradient descent -- run gradient descent
-- you can also use `sgd`, see the notmnist example -- you can also use `sgd`, see the notmnist example
newsession <- run gd session newsession <- run gd session
let results = map (`forward` newsession) inputs let results = map (`forward` newsession) inputs
rounded = map (map round . toList) results rounded = map (map round . toList) results
cost = crossEntropy newsession cost = crossEntropy newsession
putStrLn $ "- initial cost (cross-entropy): " ++ show initialCost putStrLn $ "- initial cost (cross-entropy): " ++ show initialCost
putStrLn $ "- actual result: " ++ show results putStrLn $ "- actual result: " ++ show results
putStrLn $ "- rounded result: " ++ show rounded putStrLn $ "- rounded result: " ++ show rounded
putStrLn $ "- cost (cross-entropy): " ++ show cost putStrLn $ "- cost (cross-entropy): " ++ show cost
``` ```