layers

package
v0.0.0-...-32ffa8f Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 5, 2017 License: GPL-3.0 Imports: 10 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func RandomID

func RandomID(n int) string

func SoftMaxLog

func SoftMaxLog(values []float64) []float64

Types

type BaseLayer

type BaseLayer struct {
	// contains filtered or unexported fields
}

func (*BaseLayer) Activate

func (l *BaseLayer) Activate(input *tensor.Tensor) error

func (*BaseLayer) BackPropagate

func (l *BaseLayer) BackPropagate(err *tensor.Tensor) error

func (*BaseLayer) GetDebugInfo

func (l *BaseLayer) GetDebugInfo() []*debug.LayerInfo

func (*BaseLayer) GetInputSize

func (l *BaseLayer) GetInputSize() []int

func (*BaseLayer) GetOutputSize

func (l *BaseLayer) GetOutputSize() []int

func (*BaseLayer) GetParamGradPointers

func (l *BaseLayer) GetParamGradPointers() ([]*float64, []*float64)

func (*BaseLayer) ID

func (l *BaseLayer) ID() string

func (*BaseLayer) Init

func (l *BaseLayer) Init(inputSize, outputSize []int) error

type ConvolutionalLayer

type ConvolutionalLayer struct {
	BaseLayer
	// contains filtered or unexported fields
}

func NewConvolutionalLayer

func NewConvolutionalLayer(inputWidth, inputHeight, inputDepth, nKernels, kernelPadX, kernelPadY, strideX, strideY, padX, padY int) *ConvolutionalLayer

NewConvolutionalLayer does what it says

inputWidth: input width inputHeight: input height inputDepth: input depth nKernels: number of kernels kernelPadX: kernel width pad kernelPadY: kernel height pad strideX: stride in x strideY: stride in y padX: padding in x padY: padding in y

Instead of passing the kernel size, you specify the size as a padding of a kernel of one pixel. This is to avoid the possibility of a kernel size that has no center pixel. It's also easier to create a layer that does not change the input size if kernelPad = pad

The filter cannot be bigger than the image so: kernelPadX*2+1 <= inputWidth && kernelPadY*2+1 <= inputHeight The stride must divide the image in equal integer parts: ((inputWidth + padX*2) - (1+kernelPadX*2)) % (strideX+1) == 0 && ((inputHeight + padY*2) - (1+kernelPadY*2)) % (strideY+1) == 0 If you want the output area to be the same as the input: kernelPadX == padX && kernelPadY == padY More padding than spatial extend makes no sense so: padX <= kernelPadX && padY < kernelPadY

func NewSquareConvolutionalLayer

func NewSquareConvolutionalLayer(inputSize, inputDepth, nKernels, kernelPad, stride, padding int) *ConvolutionalLayer

func (*ConvolutionalLayer) Activate

func (l *ConvolutionalLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

Activate takes and input tensor and computes an output tensor where each value is the sum of the convolutions of the different input depths using different kernels.

func (*ConvolutionalLayer) BackPropagate

func (l *ConvolutionalLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*ConvolutionalLayer) ConvIm2Col

func (l *ConvolutionalLayer) ConvIm2Col(data, ker, out *tensor.Tensor, kernelSizeX, kernelSizeY, padX, padY, strideX, strideY int)

func (*ConvolutionalLayer) CreateSlave

func (l *ConvolutionalLayer) CreateSlave() weight.Layer

CreateSlave creates a slave of the ConvolutionalLayer. See EnslaverLayer in package weight for more information on layer slaves.

type DenseLayer

type DenseLayer struct {
	BaseLayer
}

DenseLayer computes each output using a weighted sum of all inputs plus a bias

func NewDenseLayer

func NewDenseLayer(inputSize, outputSize []int) *DenseLayer

NewDenseLayer creates a new DenseLayer

func (*DenseLayer) Activate

func (l *DenseLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

Activate takes and input tensor and computes an output tensor where each value is the weighed sum of all the input values.

func (*DenseLayer) BackPropagate

func (l *DenseLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*DenseLayer) CreateSlave

func (l *DenseLayer) CreateSlave() weight.Layer

CreateSlave creates a slave of the DenseLayer. See EnslaverLayer in package weight for more information on layer slaves.

func (*DenseLayer) GetNumberOfInputs

func (l *DenseLayer) GetNumberOfInputs() int

func (*DenseLayer) GetNumberOfNeurons

func (l *DenseLayer) GetNumberOfNeurons() int

type FFNet

type FFNet struct {
	// contains filtered or unexported fields
}

FFNet is a generic feedforward network. It can include any number branches, but they cannot form a loop.

func NewCRPBlock

func NewCRPBlock(inputSize []int, nConv, nKernels int) *FFNet

News a block of Convolutionals, ReLUs and Pool

func NewCRPBlocks

func NewCRPBlocks(inputSize []int, nConv, nKernels, nBlocks int) *FFNet

func NewFFNet

func NewFFNet() *FFNet

NewFFNet returns a new FFNet

func NewSequentialNet

func NewSequentialNet(layers ...weight.Layer) (*FFNet, error)

func (*FFNet) Activate

func (n *FFNet) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

Activate takes an input tensor and passes it through all the layers in the netork following the node connections.

func (*FFNet) AddLayer

func (n *FFNet) AddLayer(layer weight.Layer, parents ...string) error

func (*FFNet) BackPropagate

func (n *FFNet) BackPropagate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*FFNet) CreateSlave

func (n *FFNet) CreateSlave() weight.Layer

CreateSlave creates a slave of the FFNet. See EnslaverLayer in package weight for more information on layer slaves.

func (*FFNet) End

func (n *FFNet) End() error

End closes the network

func (*FFNet) GetDebugInfo

func (n *FFNet) GetDebugInfo() []*debug.LayerInfo

func (*FFNet) GetInputSize

func (n *FFNet) GetInputSize() []int

func (*FFNet) GetOutputSize

func (n *FFNet) GetOutputSize() []int

func (*FFNet) GetParamGradPointers

func (n *FFNet) GetParamGradPointers() ([]*float64, []*float64)

func (*FFNet) ID

func (n *FFNet) ID() string

type FFNode

type FFNode struct {
	// contains filtered or unexported fields
}

FFNode is a node to be used with FFNet

func (*FFNode) Activate

func (n *FFNode) Activate()

Activate waits for the parent nodes to send its outputs, computes the sum of them and passes it to the underlying layer's Activate, then sends the result to all childs.

func (*FFNode) BackPropagate

func (n *FFNode) BackPropagate()

BackPropagate waits for the child nodes to send its propagated errors, computes the sum of them and passes it to the underlying layer's BackPropagate, then propagates the result to all parents.

func (*FFNode) ID

func (n *FFNode) ID() string

type LeakyReLULayer

type LeakyReLULayer struct {
	BaseLayer
}

func NewLeakyReLULayer

func NewLeakyReLULayer(size ...int) *LeakyReLULayer

func (*LeakyReLULayer) Activate

func (l *LeakyReLULayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*LeakyReLULayer) BackPropagate

func (l *LeakyReLULayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*LeakyReLULayer) CreateSlave

func (l *LeakyReLULayer) CreateSlave() weight.Layer

type PoolLayer

type PoolLayer struct {
	BaseLayer
	// contains filtered or unexported fields
}

func NewPoolLayer

func NewPoolLayer(inputSize, kernelSize []int) *PoolLayer

func (*PoolLayer) Activate

func (l *PoolLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*PoolLayer) BackPropagate

func (l *PoolLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*PoolLayer) CreateSlave

func (l *PoolLayer) CreateSlave() weight.Layer

func (*PoolLayer) GetParamGradPointers

func (l *PoolLayer) GetParamGradPointers() ([]*float64, []*float64)

type ReLULayer

type ReLULayer struct {
	BaseLayer
}

func NewReLULayer

func NewReLULayer(size ...int) *ReLULayer

func (*ReLULayer) Activate

func (l *ReLULayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*ReLULayer) BackPropagate

func (l *ReLULayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*ReLULayer) CreateSlave

func (l *ReLULayer) CreateSlave() weight.Layer

type ReshaperLayer

type ReshaperLayer struct {
	BaseLayer
}

func NewReshaperLayer

func NewReshaperLayer(inputSize []int, outputSize []int) *ReshaperLayer

func (*ReshaperLayer) Activate

func (l *ReshaperLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*ReshaperLayer) BackPropagate

func (l *ReshaperLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*ReshaperLayer) CreateSlave

func (l *ReshaperLayer) CreateSlave() weight.Layer

type SigmoidLayer

type SigmoidLayer struct {
	BaseLayer
}

func NewSigmoidLayer

func NewSigmoidLayer(size ...int) *SigmoidLayer

func (*SigmoidLayer) Activate

func (l *SigmoidLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*SigmoidLayer) BackPropagate

func (l *SigmoidLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*SigmoidLayer) CreateSlave

func (l *SigmoidLayer) CreateSlave() weight.Layer

type SoftmaxLayer

type SoftmaxLayer struct {
	BaseLayer
}

func NewSoftmaxLayer

func NewSoftmaxLayer(size ...int) *SoftmaxLayer

func (*SoftmaxLayer) Activate

func (l *SoftmaxLayer) Activate(input *tensor.Tensor) (*tensor.Tensor, error)

func (*SoftmaxLayer) BackPropagate

func (l *SoftmaxLayer) BackPropagate(err *tensor.Tensor) (*tensor.Tensor, error)

func (*SoftmaxLayer) CreateSlave

func (l *SoftmaxLayer) CreateSlave() weight.Layer

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL