neural

package module
v0.0.0-...-9daa5a6 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 5, 2019 License: MIT Imports: 5 Imported by: 0

README

Neural

Neural is an experimental neural network implementation written in Go by a complete newbie in this topic for self-educational purposes.

Currently the code is in need of a review from someone who is much more experienced than I am.

Usage

See here for examples.

Documentation

See here for GoDoc.

Documentation

Overview

Package neural is an experimental neural network implementation written in Go by a complete newbie in this topic for self-educational purposes.

Index

Examples

Constants

This section is empty.

Variables

View Source
var ActivationFuncLeakyRelu = ActivationFuncReLuGenerator(0.01)

ActivationFuncLeakyRelu is the leaky rectified linear unit funciton with its scalar for non-positive values set to 0.01 by default.

View Source
var ActivationFuncLinear = ActivationFuncLinearGenerator(1.0, 0.0)

ActivationFuncLinear is the linear is the function with slope 1 that passes through 0, 0 and its derivative

View Source
var ActivationFuncReLu = ActivationFuncReLuGenerator(0.0)

ActivationFuncReLu is the rectified linear unit function and its derivative

View Source
var ActivationFuncSigmoid = &ActivationFunc{
	f: func(z float64) float64 {
		return 1.0 / (1.0 + math.Exp(-z))
	},
	fprime: func(z float64) float64 {
		s := 1.0 / (1.0 + math.Exp(-z))
		return s * (1.0 - s)
	},
}

ActivationFuncSigmoid is the logistic sigmoid activation function and its derivative

View Source
var ActivationFuncSwish = &ActivationFunc{
	f: func(z float64) float64 {
		return z / (1.0 + math.Exp(-z))
	},
	fprime: func(z float64) float64 {
		s := 1.0 / (1.0 + math.Exp(-z))
		sP := s * (1.0 - s)
		return s + z*sP
	},
}

ActivationFuncSwish is the self gated activation function and its derivative. Swish : z * sigmoid(z) Swish Derivative calculated by product rule (f(z) * g(z))' = f'(z) * g(z) + f(z) * g'(z) :

= z' * sigmoid(z) + z * sigmoid'(z)
= sigmoid(z) + z * sigmoid(z) * (1 - sigmoid(z))
View Source
var ActivationFuncTanH = &ActivationFunc{
	f: func(z float64) float64 {
		return math.Tanh(z)
	},
	fprime: func(z float64) float64 {
		return 1.0 - math.Pow(math.Tanh(z), 2.0)
	},
}

ActivationFuncTanH is the hyperbolic tangent function and its derivative

View Source
var ErrInvalidActivationFunction = errors.New("invalid activation function")

ErrInvalidActivationFunction is returned when activation function input is invalid

View Source
var ErrInvalidFunction = errors.New("invalid function input for activation function pair")

ErrInvalidFunction is returned when input function is not valid for creating an activation function

View Source
var ErrInvalidInput = errors.New("invalid input size")

ErrInvalidInput is returned when prediction or training input size does not match neural network input layer size. This error is also returned when setting weights and biases with invalid matrix dimensions.

View Source
var ErrInvalidLayerSize = errors.New("invalid layer size")

ErrInvalidLayerSize is returned when layer size is less than or equal to 0

View Source
var ErrNetworkIsFinalized = errors.New("network is already finalized")

ErrNetworkIsFinalized is returned when network output layer is already added when trying to add a hidden layer

Functions

This section is empty.

Types

type ActivationFunc

type ActivationFunc struct {
	// contains filtered or unexported fields
}

ActivationFunc is the structure that represents a forward function and its derivative function

func ActivationFuncLinearGenerator

func ActivationFuncLinearGenerator(a, b float64) *ActivationFunc

ActivationFuncLinearGenerator generates a linear funtion where forward function returns the value a*z + b and its derivative returns a

func ActivationFuncReLuGenerator

func ActivationFuncReLuGenerator(scalar float64) *ActivationFunc

ActivationFuncReLuGenerator is a ReLu / leaky ReLu / randomized ReLu generator function.

func NewActivationFunc

func NewActivationFunc(f, fprime func(float64) float64) (*ActivationFunc, error)

NewActivationFunc creates a new ActivationFunc structure

type Network

type Network struct {
	// contains filtered or unexported fields
}

Network is the basic neural network structure

func NewNetwork

func NewNetwork(inputLayerSize int) (*Network, error)

NewNetwork initializes a new neural network. The initialized network can not be used for anything meaningful yet. 0 or more hidden layers must be added first and the network must be finalized by adding the output layer.

Example

ExampleNewNetwork is a simple adder network with 1 hidden layer.

package main

import (
	"fmt"

	"github.com/mraufc/neural"
	"gonum.org/v1/gonum/mat"
)

func main() {
	// Simple adder network
	n, err := neural.NewNetwork(2)
	if err != nil {
		fmt.Println(err)
		return
	}
	n.AddHiddenLayer(2, neural.ActivationFuncReLu)
	n.AddOutputLayer(1, neural.ActivationFuncLinear)

	weights := make([]*mat.Dense, 2)
	weights[0] = mat.NewDense(2, 2, []float64{1, 0, 0, 1})
	weights[1] = mat.NewDense(1, 2, []float64{1, 1})
	err = n.SetWeights(weights)
	if err != nil {
		fmt.Println(err)
		return
	}

	biases := make([]*mat.Dense, 2)
	biases[0] = mat.NewDense(1, 2, []float64{0, 0})
	biases[1] = mat.NewDense(1, 1, []float64{0})
	err = n.SetBiases(biases)
	if err != nil {
		fmt.Println(err)
		return
	}

	data := mat.NewDense(9, 2, []float64{1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9})
	expected := mat.NewDense(9, 1, []float64{2, 4, 6, 8, 10, 12, 14, 16, 18})
	predicted, err := n.Predict(data)
	fmt.Println("expected   :", expected)
	fmt.Println("predicted  :", predicted)

}
Output:

expected   : &{{9 1 [2 4 6 8 10 12 14 16 18] 1} 9 1}
predicted  : &{{9 1 [2 4 6 8 10 12 14 16 18] 1} 9 1}

func (*Network) AddHiddenLayer

func (nn *Network) AddHiddenLayer(hiddenLayerSize int, hiddenLayerActivationFunc *ActivationFunc) error

AddHiddenLayer adds a hidden layer with then given size.

func (*Network) AddOutputLayer

func (nn *Network) AddOutputLayer(outputLayerSize int, outputActivationFunc *ActivationFunc) error

AddOutputLayer adds an output layer to the neural network and also finalizes the network structure.

func (*Network) Biases

func (nn *Network) Biases() []*mat.Dense

Biases returns a copy of all biases in the network. Bias dimensions are 1 x current layer length, meaning biases are row vectors.

func (*Network) CheckGradients

func (nn *Network) CheckGradients(data, expected *mat.Dense, epsilonFunc func(value float64) (epsilon float64), validFunc func(numericalGrad, backpGrad, val float64) (valid bool, skip bool)) (bool, error)

CheckGradients is a utility function that compares numerical gradients and backpropagation gradients. This is a computationally heavy operation and should only be used for ensuring that backpropagation in the network is working as expected. Returns an error in case of a dimension mismatch.

This function takes 4 arguments. Input data, expected output, a function that generates epsilon value based on the original weight or bias value (epsilonFunc), and a function that outputs whether numerical value to backpropagated value comparison is a pass or fail, or if the comparison should be skipped (validFunc).

The numerical gradients are calculated as [cost(w+epsilon) - cost(w-epsilon)] / [2 * epsilon]

Example (ActivationFunc_relu)
package main

import (
	"fmt"
	"math"

	"github.com/mraufc/neural"
	"gonum.org/v1/gonum/mat"
)

func main() {
	n, err := neural.NewNetwork(2)
	if err != nil {
		fmt.Println(err)
		return
	}
	n.AddHiddenLayer(5, neural.ActivationFuncReLu)
	n.AddOutputLayer(1, neural.ActivationFuncLinear)

	// It is possible to set weights and biases for this example to behave in a more predictable way.
	// weights := make([]*mat.Dense, 2)
	// weights[0] = mat.NewDense(5, 2, []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1})
	// weights[1] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// err = n.SetWeights(weights)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	// biases := make([]*mat.Dense, 2)
	// biases[0] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// biases[1] = mat.NewDense(1, 1, []float64{0})
	// err = n.SetBiases(biases)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	data := mat.NewDense(9, 2, []float64{1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9})
	expected := mat.NewDense(9, 1, []float64{2, 4, 6, 8, 10, 12, 14, 16, 18})

	// constant epsilon of 1e-5
	epsilon := 1e-5
	epsilonFunc := func(value float64) float64 {
		return epsilon
	}

	validFunc := func(numericalGrad, backpGrad, val float64) (valid bool, skip bool) {
		if val <= epsilon || (numericalGrad == 0.0 && backpGrad == 0.0) {
			skip = true
			return
		}

		relDiff := math.Abs(numericalGrad-backpGrad) / math.Max(math.Abs(numericalGrad), math.Abs(backpGrad))
		absDiff := math.Abs(numericalGrad - backpGrad)
		if relDiff <= 1e-7 || absDiff <= 1e-8 {
			valid = true
		} else {
			fmt.Printf("relative difference %v is not less than or equal to 1e-7 and absolute difference %v is not less than or equal to 1e-8 \n", relDiff, absDiff)
		}
		return
	}

	gradientCheck, err := n.CheckGradients(data, expected, epsilonFunc, validFunc)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println("gradient check before training is successful:", gradientCheck)

	// do some training for 500 iterations

	// constant learning rate
	lrFunc := func(currentIteration int) float64 {
		return 1e-4
	}
	// converge when cost is less than 1e-4
	convFunc := func(prevCost, currentCost float64) bool {
		return currentCost < 1e-4
	}
	err = n.Train(data, expected, 500, lrFunc, convFunc)
	if err != nil {
		fmt.Println(err)
		return
	}

	gradientCheck, err = n.CheckGradients(data, expected, epsilonFunc, validFunc)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println("gradient check after training is successful:", gradientCheck)

}
Output:

gradient check before training is successful: true
gradient check after training is successful: true
Example (ActivationFunc_tanh)
package main

import (
	"fmt"
	"math"

	"github.com/mraufc/neural"
	"gonum.org/v1/gonum/mat"
)

func main() {
	n, err := neural.NewNetwork(2)
	if err != nil {
		fmt.Println(err)
		return
	}
	n.AddHiddenLayer(5, neural.ActivationFuncTanH)
	n.AddOutputLayer(1, neural.ActivationFuncLinear)

	// It is possible to set weights and biases for this example to behave in a more predictable way.
	// weights := make([]*mat.Dense, 2)
	// weights[0] = mat.NewDense(5, 2, []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1})
	// weights[1] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// err = n.SetWeights(weights)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	// biases := make([]*mat.Dense, 2)
	// biases[0] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// biases[1] = mat.NewDense(1, 1, []float64{0})
	// err = n.SetBiases(biases)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	data := mat.NewDense(9, 2, []float64{1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9})
	expected := mat.NewDense(9, 1, []float64{2, 4, 6, 8, 10, 12, 14, 16, 18})

	// constant epsilon of 1e-5
	epsilon := 1e-5
	epsilonFunc := func(value float64) float64 {
		return epsilon
	}

	validFunc := func(numericalGrad, backpGrad, val float64) (valid bool, skip bool) {
		relDiff := math.Abs(numericalGrad-backpGrad) / math.Max(math.Abs(numericalGrad), math.Abs(backpGrad))
		absDiff := math.Abs(numericalGrad - backpGrad)
		if relDiff <= 1e-7 || absDiff <= 1e-8 {
			valid = true
		} else {
			fmt.Printf("relative difference %v is not less than or equal to 1e-7 and absolute difference %v is not less than or equal to 1e-8 \n", relDiff, absDiff)
		}
		return
	}

	gradientCheck, err := n.CheckGradients(data, expected, epsilonFunc, validFunc)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println("gradient check before training is successful:", gradientCheck)

	// do some training for 500 iterations

	// constant learning rate
	lrFunc := func(currentIteration int) float64 {
		return 1e-4
	}
	// converge when cost is less than 1e-4
	convFunc := func(prevCost, currentCost float64) bool {
		return currentCost < 1e-4
	}
	err = n.Train(data, expected, 500, lrFunc, convFunc)
	if err != nil {
		fmt.Println(err)
		return
	}

	gradientCheck, err = n.CheckGradients(data, expected, epsilonFunc, validFunc)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println("gradient check after training is successful:", gradientCheck)

}
Output:

gradient check before training is successful: true
gradient check after training is successful: true

func (*Network) Cost

func (nn *Network) Cost(data, expected *mat.Dense) (float64, error)

Cost returns the cost function that is calculated as sum((predicted - expected)^2) / numExamples

func (*Network) Gradients

func (nn *Network) Gradients(data, expected *mat.Dense) (weightGradients, biasGradients []*mat.Dense, err error)

Gradients returns clones of all weight and bias gradients after completing backpropagation operation.

func (*Network) NumericalGradients

func (nn *Network) NumericalGradients(data, expected *mat.Dense, epsilonFunc func(value float64) (epsilon float64)) (weightGradients, biasGradients []*mat.Dense, err error)

NumericalGradients calculates and returns the gradients of all weights and biases in the network. This is a computationally heavy operation and should only be used for inspecting the network. Meaning, this function is only good for debugging operations. The function takes 3 inputs. Data, expected data and a function that generates epsilon value based on the original weight or bias value. The numerical gradients are calculated as [cost(w+epsilon) - cost(w-epsilon)] / [2 * epsilon]

func (*Network) Predict

func (nn *Network) Predict(data *mat.Dense) (*mat.Dense, error)

Predict predicts the outcome of the input dataset

func (*Network) SetBiases

func (nn *Network) SetBiases(biases []*mat.Dense) error

SetBiases sets all biases in the network. Bias dimensions are 1 x current layer length, meaning biases are row vectors.

func (*Network) SetWeights

func (nn *Network) SetWeights(weights []*mat.Dense) error

SetWeights sets all weights in the network. Weight dimensions are current layer length x previous layer length.

func (*Network) Train

func (nn *Network) Train(data, expected *mat.Dense, maxIterations int, lrFunc func(currentIteration int) (learningRate float64), convFunc func(prevCost, currentCost float64) (converged bool)) error

Train trains the network to fit the expected data. maxIterations is the iteration count limit. Negative or 0 maxIterations value means the training will continue until convergance.

Example
package main

import (
	"fmt"

	"github.com/mraufc/neural"
	"gonum.org/v1/gonum/mat"
)

func main() {
	n, err := neural.NewNetwork(2)
	if err != nil {
		fmt.Println(err)
		return
	}
	n.AddHiddenLayer(5, neural.ActivationFuncReLu)
	n.AddOutputLayer(1, neural.ActivationFuncLinear)

	// It is possible to set weights and biases for this example to behave in a more predictable way.
	// weights := make([]*mat.Dense, 2)
	// weights[0] = mat.NewDense(5, 2, []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1})
	// weights[1] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// err = n.SetWeights(weights)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	// biases := make([]*mat.Dense, 2)
	// biases[0] = mat.NewDense(1, 5, []float64{1, 1, 1, 1, 1})
	// biases[1] = mat.NewDense(1, 1, []float64{0})
	// err = n.SetBiases(biases)
	// if err != nil {
	// 	fmt.Println(err)
	// 	return
	// }

	data := mat.NewDense(9, 2, []float64{1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9})
	expected := mat.NewDense(9, 1, []float64{2, 4, 6, 8, 10, 12, 14, 16, 18})

	cost, err := n.Cost(data, expected)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println("cost is less than before training 1e-3:", cost < 1e-3)

	// constant learning rate
	lrFunc := func(currentIteration int) float64 {
		return 1e-4
	}
	// converge when cost is less than 1e-3
	convFunc := func(prevCost, currentCost float64) bool {
		return currentCost < 1e-3
	}
	err = n.Train(data, expected, 0, lrFunc, convFunc)
	if err != nil {
		fmt.Println(err)
		return
	}
	cost, err = n.Cost(data, expected)
	if err != nil {
		fmt.Println(err)
		return
	}

	fmt.Println("cost is less than after training 1e-3:", cost < 1e-3)
}
Output:

cost is less than before training 1e-3: false
cost is less than after training 1e-3: true

func (*Network) Weights

func (nn *Network) Weights() []*mat.Dense

Weights returns a copy of all weights in the network. Weight dimensions are current layer length x previous layer length.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL