Browse Source

removed warnings

master
parent
commit
cf6d961e4c
  1. 13
      src/Main.hs
  2. 4
      src/YML.hs
  3. 1
      src/YML/Dataset.hs
  4. 23
      src/YML/LinearGradient.hs

13
src/Main.hs

@ -1,7 +1,6 @@
import Control.Monad (foldM)
import System.Console.GetOpt
import System.Environment (getArgs,getProgName)
import System.IO (readFile)
import YML.Dataset (parse, R)
import YML.LinearGradient (cost, gradientDescent, nullF
@ -11,19 +10,21 @@ import YML.LinearGradient (cost, gradientDescent, nullF
-- and try to read it and put its values inside a dataset data structure
main :: IO ()
main = do
(options,file) <- parseArgs
(opts,file) <- parseArgs
fileContent <- readFile file
let trainingSet = parse fileContent
-- print trainingSet
putStr "Cost of training set using the null function: "
print $ cost trainingSet (nullF trainingSet)
putStr "Function minimizing the cost: "
print $ gradientDescent (optionsToParameters options) trainingSet
print $ gradientDescent (optionsToParameters opts) trainingSet
where
optionsToParameters :: Options -> Parameters
optionsToParameters (Options valAlpha valThreshold) = Parameters valAlpha valThreshold
data Options = Options { optAlpha :: R , optThreshold :: R} deriving Show
defaultOptions :: Options
defaultOptions = Options { optAlpha = 0.01, optThreshold = 10**(-10) }
parseArgs :: IO (Options,String)
@ -35,17 +36,17 @@ parseArgs = do
case getOpt RequireOrder options argv of
(opts, [file], []) ->
case foldM (flip id) defaultOptions opts of
Right opts -> return (opts,file)
Right option -> return (option,file)
Left errorMessage -> ioError (userError (errorMessage ++ "\n" ++ helpMessage))
(_,_,errs) -> ioError (userError (concat errs ++ helpMessage))
options :: [OptDescr (Options -> Either String Options)]
options = [
Option ['a'] ["alpha"] (ReqArg (\a opts -> case reads a of
Option ['a'] ["alpha"] (ReqArg (\str opts -> case reads str of
[(a, "")] | a>0 -> Right opts { optAlpha = a }
_ -> Left "--alpha must be >0"
) "alpha") "set alpha value"
, Option ['t'] ["threshold"] (ReqArg (\a opts -> case reads a of
, Option ['t'] ["threshold"] (ReqArg (\str opts -> case reads str of
[(t, "")] | t>0 -> Right opts { optThreshold = t }
_ -> Left "--threshold must be >0"
) "threshold") "set threshold value"

4
src/YML.hs

@ -1,3 +1,3 @@
module YML where
import YML.Dataset
import YML.LinearGradient
import YML.Dataset ()
import YML.LinearGradient ()

1
src/YML/Dataset.hs

@ -9,7 +9,6 @@ module YML.Dataset
where
import Data.List (intercalate)
import Data.Vector.Unboxed ((!))
import qualified Data.Vector.Unboxed as V
type R = Double -- Just in case I want to change the precision later

23
src/YML/LinearGradient.hs

@ -15,7 +15,9 @@ import YML.Dataset
-- swap comments to show debug traces
-- import Debug.Trace (trace)
trace :: a -> b -> b
trace _ x = x
--
data Parameters = Parameters { alpha :: R , threshold :: R} deriving Show
@ -38,7 +40,7 @@ h f v = V.foldl (\acc (x,t) -> acc + x*t) 0 (V.zip (xs v) (thetas f))
-- | The function giving the cost of some linear function relatively
-- to some dataset.
cost :: Dataset -> LinearFunction -> Double
cost (Dataset values) f = (sum (map ((^2).dist) values))/(2*m)
cost (Dataset values) f = (sum (map ((**2).dist) values))/(2*m)
where
m = fromIntegral $ length values
dist v = h f v - (y v)
@ -59,25 +61,16 @@ oneStepGradient opts dataset f = if bad f then
else
trace ((show f) ++ ": " ++ show (cost dataset f)) $ LinearFunction newthetas
where
bad f = V.any (\x -> isNaN x || isInfinite x) (thetas f)
bad phi = V.any (\x -> isNaN x || isInfinite x) (thetas phi)
-- new_theta_j = theta_j - alpha * derive cost (theta0, theta1)
newthetas = V.imap newcost (thetas f)
newcost i x = x - (alpha opts) * cost' i dataset f
coupleFilter :: [a] -> (a -> a -> Bool) -> [a]
coupleFilter [] _ = []
coupleFilter (x:[]) _ = []
coupleFilter (x:y:xs) f = if f x y
then
x:coupleFilter (y:xs) f
else
coupleFilter (y:xs) f
gradientDescent :: Parameters -> Dataset -> LinearFunction
gradientDescent opts t = head $ coupleFilter gradients close
gradientDescent opts t = snd $ head $ filter close $ zip gradients (tail gradients)
where
close :: LinearFunction -> LinearFunction -> Bool
close (LinearFunction xs) (LinearFunction ys) = dist < threshold opts
close :: (LinearFunction,LinearFunction) -> Bool
close ((LinearFunction us),(LinearFunction vs)) = dist < threshold opts
where
dist = V.foldl (\acc (x,y) -> acc + (x-y)^2 ) 0 (V.zip xs ys)
dist = V.foldl (\acc (u,v) -> acc + (u-v)**2 ) 0 (V.zip us vs)
gradients = iterate (oneStepGradient opts t) (nullF t)

Loading…
Cancel
Save