neuralNetwork

package
v0.0.0-...-0705f78 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 30, 2018 License: MIT Imports: 10 Imported by: 0

Documentation

Index

Examples

Constants

This section is empty.

Variables

View Source
var Regressors = []base.Regressor{&MLPRegressor{}}

Regressors is the list of regressors in this package

View Source
var SupportedActivations = map[string]ActivationFunctions{
	"identity": identityActivation{},
	"logistic": logisticActivation{},
	"tanh":     tanhActivation{},
	"relu":     reluActivation{},
}

SupportedActivations is a map[Sing]ActivationFunctions for the supproted activation functions (identity,logistic,tanh,relu)

View Source
var SupportedLoss = map[string]LossFunctions{
	"square":        squareLoss{},
	"log":           logLoss{},
	"cross-entropy": crossEntropyLoss{},
}

SupportedLoss are the map[string]Losser of available matrix loss function providers

Functions

This section is empty.

Types

type ActivationFunctions

type ActivationFunctions interface {
	Func(z, h *mat.Dense)
	Grad(z, h, grad *mat.Dense)
}

ActivationFunctions WIP

func NewActivation

func NewActivation(name string) ActivationFunctions

NewActivation return ActivationFunctions (Func and Grad) from its name (identity,logistic,tanh,relu)

type Layer

type Layer struct {
	Activation                                string
	X1, Ytrue, Z, Ypred, NextX1, Ydiff, Hgrad *mat.Dense
	Theta, Grad, Update                       *mat.Dense
	Optimizer                                 Optimizer
}

Layer represents a layer in a neural network. its mainly an Activation and a Theta

func NewLayer

func NewLayer(inputs, outputs int, activation string, optimCreator base.OptimCreator, thetaSlice, gradSlice, updateSlice []float64, rnd func() float64) *Layer

NewLayer creates a randomly initialized layer

type LossFunctions

type LossFunctions interface {
	Loss(Ytrue, Ypred, Grad *mat.Dense) float64
}

LossFunctions is the interface for matLoss (matSquareLoss,...)

func NewLoss

func NewLoss(name string) LossFunctions

NewLoss creates a LossFunctions by its name

type MLPClassifier

type MLPClassifier struct{ MLPRegressor }

MLPClassifier ...

Example
ds := datasets.LoadBreastCancer()
fmt.Println("Dims", base.MatDimsString(ds.X, ds.Y))

scaler := preprocessing.NewStandardScaler()
X0, Y0 := scaler.Fit(ds.X, ds.Y).Transform(ds.X, ds.Y)
nSamples, nOutputs := Y0.Dims()
pca := preprocessing.NewPCA()
X1, Y1 := pca.Fit(X0, Y0).Transform(X0, Y0)
thres := .995
ExplainedVarianceRatio := 0.
var nComponents int
for nComponents = 0; nComponents < len(pca.ExplainedVarianceRatio) && ExplainedVarianceRatio < thres; nComponents++ {
	ExplainedVarianceRatio += pca.ExplainedVarianceRatio[nComponents]
}
fmt.Printf("ExplainedVarianceRatio %.3f %.3f\n", ExplainedVarianceRatio, pca.ExplainedVarianceRatio[0:nComponents])
fmt.Printf("%d components explain %.2f%% of variance\n", nComponents, thres*100.)
X1 = base.MatDenseSlice(X1, 0, nSamples, 0, nComponents)

poly := preprocessing.NewPolynomialFeatures(2)
poly.IncludeBias = false
X2, Y2 := poly.Fit(X1, Y1).Transform(X1, Y1)

// fmt.Println(base.MatStr(base.MatDenseSlice(X2, 0, 2, 0, 5)))
// fmt.Println(base.MatStr(base.MatDenseRowSlice(Y2, 0, 2)))

m := NewMLPClassifier([]int{}, "relu", "adam", 0.)
m.Loss = "cross-entropy"

m.Epochs = 300
m.Fit(X2, Y2)
Ypred := mat.NewDense(nSamples, nOutputs, nil)
m.Predict(X2, Ypred)

accuracy := metrics.AccuracyScore(Y2, Ypred, true, nil)
fmt.Println("accuracy>0.994 ?", accuracy > 0.994)
if accuracy <= 0.994 {
	fmt.Println("accuracy:", accuracy)
}
Output:

Dims  569,30 569,1
ExplainedVarianceRatio 0.996 [0.443 0.190 0.094 0.066 0.055 0.040 0.023 0.016 0.014 0.012 0.010 0.009 0.008 0.005 0.003 0.003 0.002 0.002 0.002 0.001]
20 components explain 99.50% of variance
accuracy>0.994 ? true

func NewMLPClassifier

func NewMLPClassifier(hiddenLayerSizes []int, activation string, solver string, Alpha float64) *MLPClassifier

NewMLPClassifier returns a *MLPClassifier with defaults activation is one of logistic,tanh,relu solver is on of agd,adagrad,rmsprop,adadelta,adam (one of the keys of base.Solvers) defaults to "adam" Alpha is the regularization parameter lossName is one of square,log,cross-entropy (one of the keys of lm.LossFunctions) defaults to "log"

func (*MLPClassifier) Predict

func (regr *MLPClassifier) Predict(X, Y *mat.Dense) base.Regressor

Predict return the forward result for MLPClassifier

func (*MLPClassifier) Transform

func (regr *MLPClassifier) Transform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)

Transform for pipeline

type MLPRegressor

type MLPRegressor struct {
	Shuffle, UseBlas bool
	Optimizer        base.OptimCreator
	Activation       string
	Solver           string
	HiddenLayerSizes []int
	RandomState      *rand.Rand

	Layers                           []*Layer
	Alpha, L1Ratio, GradientClipping float64
	Epochs, MiniBatchSize            int

	Loss string

	// Loss value after Fit
	JFirst, J float64
	// contains filtered or unexported fields
}

MLPRegressor is a multilayer perceptron regressor

func NewMLPRegressor

func NewMLPRegressor(hiddenLayerSizes []int, activation string, solver string, Alpha float64) *MLPRegressor

NewMLPRegressor returns a *MLPRegressor with defaults activation is one of identity,logistic,tanh,relu solver is on of agd,adagrad,rmsprop,adadelta,adam (one of the keys of base.Solvers) defaults to "adam" Alpha is the regularization parameter Loss is one of square,log,cross-entropy defaults: square for identity, log for logistic,tanh,relu

func (*MLPRegressor) Fit

func (regr *MLPRegressor) Fit(X, Y *mat.Dense) base.Transformer

Fit fits an MLPRegressor

func (*MLPRegressor) FitTransform

func (regr *MLPRegressor) FitTransform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)

FitTransform is for Pipeline

func (*MLPRegressor) Predict

func (regr *MLPRegressor) Predict(X, Y *mat.Dense) base.Regressor

Predict return the forward result

func (*MLPRegressor) Score

func (regr *MLPRegressor) Score(X, Y *mat.Dense) float64

Score returns R2Score for square loss, else accuracy. see metrics package for other scores

func (*MLPRegressor) SetOptimizer

func (regr *MLPRegressor) SetOptimizer(creator OptimCreator)

SetOptimizer changes Optimizer

func (*MLPRegressor) Transform

func (regr *MLPRegressor) Transform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)

Transform is for Pipeline

type OptimCreator

type OptimCreator = base.OptimCreator

OptimCreator is an Optimizer creator function

type Optimizer

type Optimizer = base.Optimizer

Optimizer comes from base

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL