rm(sin): remove sin example
fix(ignoreBiases): was ignoring nodes, lol fix(w2v): better logging and implementation
This commit is contained in:
@ -1,35 +0,0 @@
|
||||
module Main where
|
||||
import Sibe
|
||||
import Numeric.LinearAlgebra
|
||||
import Data.List
|
||||
import Debug.Trace
|
||||
|
||||
main = do
|
||||
let alpha = 0.5
|
||||
epochs = 1000
|
||||
a = (sigmoid, sigmoid')
|
||||
lo = (sigmoid, (\_ -> 1)) -- cross entropy
|
||||
-- a = (relu, relu')
|
||||
rnetwork = randomNetwork 0 (-1, 1) 1 [(50, a)] (1, lo)
|
||||
|
||||
inputs = map (\a -> vector [a]) (reverse [0, 30, 45, 60, 90])
|
||||
labels = map (\deg -> vector $ [sin $ deg * pi/180]) (reverse [0, 30, 45, 60, 90])
|
||||
|
||||
initial_cost = zipWith crossEntropy (map (`forward` rnetwork) inputs) labels
|
||||
|
||||
network <- run session inputs rnetwork labels alpha epochs
|
||||
|
||||
let results = map (`forward` network) inputs
|
||||
rounded = map (map round . toList) results
|
||||
|
||||
cost = zipWith crossEntropy (map (`forward` network) inputs) labels
|
||||
|
||||
putStrLn "parameters: "
|
||||
putStrLn $ "- inputs: " ++ show inputs
|
||||
putStrLn $ "- labels: " ++ show labels
|
||||
putStrLn $ "- learning rate: " ++ show alpha
|
||||
putStrLn $ "- epochs: " ++ show epochs
|
||||
{-putStrLn $ "- initial cost (cross-entropy): " ++ show initial_cost-}
|
||||
putStrLn "results: "
|
||||
putStrLn $ "- actual result: " ++ show results
|
||||
{-putStrLn $ "- cost (cross-entropy): " ++ show cost-}
|
@ -15,20 +15,43 @@ module Main where
|
||||
import Data.List.Split
|
||||
|
||||
main = do
|
||||
{-ds <- do
|
||||
content <- readFile "examples/doc-classifier-data/data-reuters"
|
||||
let splitted = splitOn (replicate 10 '-' ++ "\n") content
|
||||
d = concatMap (tail . lines) (take 100 splitted)
|
||||
return d-}
|
||||
let ds = ["I like deep learning", "I like NLP", "I enjoy flying"]
|
||||
sws <- lines <$> readFile "examples/stopwords"
|
||||
{-ds <- do-}
|
||||
{-content <- readFile "examples/doc-classifier-data/data-reuters"-}
|
||||
{-let splitted = splitOn (replicate 10 '-' ++ "\n") content-}
|
||||
{-d = concatMap (tail . lines) (take 100 splitted)-}
|
||||
{-return $ removeWords sws d-}
|
||||
--let ds = ["I like deep learning", "I like NLP", "I enjoy flying"]
|
||||
let ds = ["the king loves the queen", "the queen loves the king",
|
||||
"the dwarf hates the king", "the queen hates the dwarf",
|
||||
"the dwarf poisons the king", "the dwarf poisons the queen"]
|
||||
|
||||
let session = def { learningRate = 0.8
|
||||
, batchSize = 10
|
||||
, epochs = 1000
|
||||
let session = def { learningRate = 0.1
|
||||
, batchSize = 16
|
||||
, epochs = 100
|
||||
} :: Session
|
||||
w2v = def { docs = ds }:: Word2Vec
|
||||
w2v = def { docs = ds
|
||||
, dimensions = 50
|
||||
, method = SkipGram
|
||||
, window = 3
|
||||
} :: Word2Vec
|
||||
|
||||
|
||||
r <- word2vec w2v session
|
||||
{-print r-}
|
||||
(computed, vocvec) <- word2vec w2v session
|
||||
|
||||
mapM_ (\(w, v) -> do
|
||||
putStr $ w ++ ": "
|
||||
let similarities = map (similarity v . snd) computed
|
||||
let sorted = sortBy (compare `on` similarity v . snd) computed
|
||||
print . take 2 . drop 1 . reverse $ map fst sorted
|
||||
) computed
|
||||
|
||||
return ()
|
||||
|
||||
removeWords :: [String] -> [String] -> [String]
|
||||
removeWords ws documents =
|
||||
map (rm ws) documents
|
||||
where
|
||||
rm list text =
|
||||
unwords $ filter (`notElem` list) (words text)
|
||||
|
||||
|
Reference in New Issue
Block a user