opto-0.1.0.0: General-purpose performant numeric optimization library

Copyright(c) Justin Le 2019
LicenseBSD3
Maintainerjustin@jle.im
Stabilityexperimental
Portabilitynon-portable
Safe HaskellNone
LanguageHaskell2010

Numeric.Opto.Optimizer

Description

Defining various numeric optimizers. Most of these implemtations are taken directly from http://ruder.io/optimizing-gradient-descent/

Synopsis

Documentation

steepestDescent Source #

Arguments

:: LinearInPlace m c a 
=> c

learning rate

-> Grad m r a

gradient

-> Opto m r a 

Steepest descent, acording to some learning rate. The simplest optimizer.

newtype Momentum c Source #

Hyperparameter for momentum

Constructors

Momentum 

Fields

Instances
Eq c => Eq (Momentum c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: Momentum c -> Momentum c -> Bool #

(/=) :: Momentum c -> Momentum c -> Bool #

Show c => Show (Momentum c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> Momentum c -> ShowS #

show :: Momentum c -> String #

showList :: [Momentum c] -> ShowS #

Fractional c => Default (Momentum c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: Momentum c #

momentum Source #

Arguments

:: LinearInPlace m c a 
=> Momentum c

configuration

-> c

learning rate

-> Grad m r a

gradient

-> Opto m r a 

Steepest descent with momentum. (Qian, 1999)

newtype Nesterov c Source #

Hyperparameter for nesterov

Constructors

Nesterov 

Fields

Instances
Eq c => Eq (Nesterov c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: Nesterov c -> Nesterov c -> Bool #

(/=) :: Nesterov c -> Nesterov c -> Bool #

Show c => Show (Nesterov c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> Nesterov c -> ShowS #

show :: Nesterov c -> String #

showList :: [Nesterov c] -> ShowS #

Fractional c => Default (Nesterov c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: Nesterov c #

nesterov Source #

Arguments

:: LinearInPlace m c a 
=> Nesterov c

configuration

-> c

learning rate

-> Grad m r a

gradient

-> Opto m r a 

Nesterov accelerated gradient (NAG) (Nesterov, 1983)

data Adagrad c Source #

Hyperparameters for adagrad

Constructors

Adagrad 

Fields

Instances
Eq c => Eq (Adagrad c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: Adagrad c -> Adagrad c -> Bool #

(/=) :: Adagrad c -> Adagrad c -> Bool #

Show c => Show (Adagrad c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> Adagrad c -> ShowS #

show :: Adagrad c -> String #

showList :: [Adagrad c] -> ShowS #

Fractional c => Default (Adagrad c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: Adagrad c #

adagrad :: forall m r a c. (LinearInPlace m c a, Floating a, Real c) => Adagrad c -> Grad m r a -> Opto m r a Source #

Adaptive Gradient (Duchu, Hazan, Singer, 2011). Note that if the state is not reset periodically, updates tend to zero fairly quickly.

data Adadelta c Source #

Hyperparameters for adadelta

Constructors

Adadelta 

Fields

Instances
Eq c => Eq (Adadelta c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: Adadelta c -> Adadelta c -> Bool #

(/=) :: Adadelta c -> Adadelta c -> Bool #

Show c => Show (Adadelta c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> Adadelta c -> ShowS #

show :: Adadelta c -> String #

showList :: [Adadelta c] -> ShowS #

Fractional c => Default (Adadelta c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: Adadelta c #

adadelta :: forall m r a c. (LinearInPlace m c a, Floating a, Real c) => Adadelta c -> Grad m r a -> Opto m r a Source #

The Adadelta extension of Adagrad (Zeiler, 2012) that mitigates the decreasing learning rate.

data RMSProp c Source #

Hyperparameters for rmsProp

Constructors

RMSProp 

Fields

Instances
Eq c => Eq (RMSProp c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: RMSProp c -> RMSProp c -> Bool #

(/=) :: RMSProp c -> RMSProp c -> Bool #

Show c => Show (RMSProp c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> RMSProp c -> ShowS #

show :: RMSProp c -> String #

showList :: [RMSProp c] -> ShowS #

Fractional c => Default (RMSProp c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: RMSProp c #

rmsProp :: forall m r a c. (LinearInPlace m c a, Floating a, Real c) => RMSProp c -> Grad m r a -> Opto m r a Source #

RMSProp, as described by Geoff Hinton.

data Adam c Source #

Hyperparameters for adam

Constructors

Adam 

Fields

Instances
Eq c => Eq (Adam c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: Adam c -> Adam c -> Bool #

(/=) :: Adam c -> Adam c -> Bool #

Show c => Show (Adam c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> Adam c -> ShowS #

show :: Adam c -> String #

showList :: [Adam c] -> ShowS #

Fractional c => Default (Adam c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: Adam c #

adam Source #

Arguments

:: (RealFloat c, Floating a, LinearInPlace m c a, Mutable m c) 
=> Adam c

configuration

-> Grad m r a

gradient

-> Opto m r a 

Adaptive Moment Estimation (Kingma, Ba, 2015)

data AdaMax c Source #

Hyperparameters for adaMax

Constructors

AdaMax 

Fields

Instances
Eq c => Eq (AdaMax c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

(==) :: AdaMax c -> AdaMax c -> Bool #

(/=) :: AdaMax c -> AdaMax c -> Bool #

Show c => Show (AdaMax c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

showsPrec :: Int -> AdaMax c -> ShowS #

show :: AdaMax c -> String #

showList :: [AdaMax c] -> ShowS #

Fractional c => Default (AdaMax c) Source # 
Instance details

Defined in Numeric.Opto.Optimizer

Methods

def :: AdaMax c #

adaMax Source #

Arguments

:: (RealFloat c, Metric c a, LinearInPlace m c a, Mutable m c) 
=> AdaMax c

configuration

-> Grad m r a

gradient

-> Opto m r a 

Adam variation (Kingma and Ba, 2015)