Documentation ¶
Index ¶
- Variables
- type ActivationFunctions
- type Layer
- type LossFunctions
- type MLPClassifier
- type MLPRegressor
- func (regr *MLPRegressor) Fit(X, Y *mat.Dense) base.Transformer
- func (regr *MLPRegressor) FitTransform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)
- func (regr *MLPRegressor) Predict(X, Y *mat.Dense) base.Regressor
- func (regr *MLPRegressor) Score(X, Y *mat.Dense) float64
- func (regr *MLPRegressor) SetOptimizer(creator OptimCreator)
- func (regr *MLPRegressor) Transform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)
- type OptimCreator
- type Optimizer
Examples ¶
Constants ¶
This section is empty.
Variables ¶
var Regressors = []base.Regressor{&MLPRegressor{}}
Regressors is the list of regressors in this package
var SupportedActivations = map[string]ActivationFunctions{
"identity": identityActivation{},
"logistic": logisticActivation{},
"tanh": tanhActivation{},
"relu": reluActivation{},
}
SupportedActivations is a map[Sing]ActivationFunctions for the supproted activation functions (identity,logistic,tanh,relu)
var SupportedLoss = map[string]LossFunctions{
"square": squareLoss{},
"log": logLoss{},
"cross-entropy": crossEntropyLoss{},
}
SupportedLoss are the map[string]Losser of available matrix loss function providers
Functions ¶
This section is empty.
Types ¶
type ActivationFunctions ¶
ActivationFunctions WIP
func NewActivation ¶
func NewActivation(name string) ActivationFunctions
NewActivation return ActivationFunctions (Func and Grad) from its name (identity,logistic,tanh,relu)
type Layer ¶
type Layer struct { Activation string X1, Ytrue, Z, Ypred, NextX1, Ydiff, Hgrad *mat.Dense Theta, Grad, Update *mat.Dense Optimizer Optimizer }
Layer represents a layer in a neural network. its mainly an Activation and a Theta
type LossFunctions ¶
LossFunctions is the interface for matLoss (matSquareLoss,...)
type MLPClassifier ¶
type MLPClassifier struct{ MLPRegressor }
MLPClassifier ...
Example ¶
ds := datasets.LoadBreastCancer() fmt.Println("Dims", base.MatDimsString(ds.X, ds.Y)) scaler := preprocessing.NewStandardScaler() X0, Y0 := scaler.Fit(ds.X, ds.Y).Transform(ds.X, ds.Y) nSamples, nOutputs := Y0.Dims() pca := preprocessing.NewPCA() X1, Y1 := pca.Fit(X0, Y0).Transform(X0, Y0) thres := .995 ExplainedVarianceRatio := 0. var nComponents int for nComponents = 0; nComponents < len(pca.ExplainedVarianceRatio) && ExplainedVarianceRatio < thres; nComponents++ { ExplainedVarianceRatio += pca.ExplainedVarianceRatio[nComponents] } fmt.Printf("ExplainedVarianceRatio %.3f %.3f\n", ExplainedVarianceRatio, pca.ExplainedVarianceRatio[0:nComponents]) fmt.Printf("%d components explain %.2f%% of variance\n", nComponents, thres*100.) X1 = base.MatDenseSlice(X1, 0, nSamples, 0, nComponents) poly := preprocessing.NewPolynomialFeatures(2) poly.IncludeBias = false X2, Y2 := poly.Fit(X1, Y1).Transform(X1, Y1) // fmt.Println(base.MatStr(base.MatDenseSlice(X2, 0, 2, 0, 5))) // fmt.Println(base.MatStr(base.MatDenseRowSlice(Y2, 0, 2))) m := NewMLPClassifier([]int{}, "relu", "adam", 0.) m.Loss = "cross-entropy" m.Epochs = 300 m.Fit(X2, Y2) Ypred := mat.NewDense(nSamples, nOutputs, nil) m.Predict(X2, Ypred) accuracy := metrics.AccuracyScore(Y2, Ypred, true, nil) fmt.Println("accuracy>0.994 ?", accuracy > 0.994) if accuracy <= 0.994 { fmt.Println("accuracy:", accuracy) }
Output: Dims 569,30 569,1 ExplainedVarianceRatio 0.996 [0.443 0.190 0.094 0.066 0.055 0.040 0.023 0.016 0.014 0.012 0.010 0.009 0.008 0.005 0.003 0.003 0.002 0.002 0.002 0.001] 20 components explain 99.50% of variance accuracy>0.994 ? true
func NewMLPClassifier ¶
func NewMLPClassifier(hiddenLayerSizes []int, activation string, solver string, Alpha float64) *MLPClassifier
NewMLPClassifier returns a *MLPClassifier with defaults activation is one of logistic,tanh,relu solver is on of agd,adagrad,rmsprop,adadelta,adam (one of the keys of base.Solvers) defaults to "adam" Alpha is the regularization parameter lossName is one of square,log,cross-entropy (one of the keys of lm.LossFunctions) defaults to "log"
type MLPRegressor ¶
type MLPRegressor struct {
Shuffle, UseBlas bool
Optimizer base.OptimCreator
Activation string
Solver string
HiddenLayerSizes []int
RandomState *rand.Rand
Layers []*Layer
Alpha, L1Ratio, GradientClipping float64
Epochs, MiniBatchSize int
Loss string
// Loss value after Fit
JFirst, J float64
// contains filtered or unexported fields
}
MLPRegressor is a multilayer perceptron regressor
func NewMLPRegressor ¶
func NewMLPRegressor(hiddenLayerSizes []int, activation string, solver string, Alpha float64) *MLPRegressor
NewMLPRegressor returns a *MLPRegressor with defaults activation is one of identity,logistic,tanh,relu solver is on of agd,adagrad,rmsprop,adadelta,adam (one of the keys of base.Solvers) defaults to "adam" Alpha is the regularization parameter Loss is one of square,log,cross-entropy defaults: square for identity, log for logistic,tanh,relu
func (*MLPRegressor) Fit ¶
func (regr *MLPRegressor) Fit(X, Y *mat.Dense) base.Transformer
Fit fits an MLPRegressor
func (*MLPRegressor) FitTransform ¶
func (regr *MLPRegressor) FitTransform(X, Y *mat.Dense) (Xout, Yout *mat.Dense)
FitTransform is for Pipeline
func (*MLPRegressor) Predict ¶
func (regr *MLPRegressor) Predict(X, Y *mat.Dense) base.Regressor
Predict return the forward result
func (*MLPRegressor) Score ¶
func (regr *MLPRegressor) Score(X, Y *mat.Dense) float64
Score returns R2Score for square loss, else accuracy. see metrics package for other scores
func (*MLPRegressor) SetOptimizer ¶
func (regr *MLPRegressor) SetOptimizer(creator OptimCreator)
SetOptimizer changes Optimizer
type OptimCreator ¶
type OptimCreator = base.OptimCreator
OptimCreator is an Optimizer creator function