layer

package
Version: v0.0.0-...-f83957e Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 30, 2021 License: Apache-2.0 Imports: 5 Imported by: 5

Documentation

Overview

Package layer provides the layers for sequential models.

Index

Constants

This section is empty.

Variables

View Source
var LeakyReLU = &LeakyReLUActivation{0.01}

LeakyReLU is default leaky relu activation.

View Source
var Linear = &LinearActivation{}

Linear activation.

View Source
var ReLU = &ReLUActivation{}

ReLU activation.

View Source
var Sigmoid = &SigmoidActivation{}

Sigmoid activation function.

View Source
var Softmax = &SoftmaxActivation{}

Softmax is the default softmax activation.

View Source
var Tanh = &TanhActivation{}

Tanh activation.

Functions

func AsBatch

func AsBatch() func(Layer)

AsBatch informs the layer compilation that it is a batch.

func AsType

func AsType(dtype t.Dtype) func(Layer)

AsType sets the datatype for the layer.

func WithLayerOpts

func WithLayerOpts(opts ...CompileOpt) func(*Chain)

WithLayerOpts adds the given layer opts to all layers.

func WithSharedChainLearnables

func WithSharedChainLearnables(shared *Chain) func(*Chain)

WithSharedChainLearnables shares the learnables from another chain.

func WithSharedLearnables

func WithSharedLearnables(shared Layer) func(Layer)

WithSharedLearnables shares the learnables from another layer.

Types

type ActivationFn

type ActivationFn interface {
	// Fwd is a forward pass through x.
	Fwd(x *g.Node) (*g.Node, error)

	// Clone the activation.
	Clone() ActivationFn
}

ActivationFn is an activation function.

type Chain

type Chain struct {
	// Layers are the layers to chain together.
	Layers []Config
	// contains filtered or unexported fields
}

Chain of layers.

func NewChain

func NewChain(layers ...Config) *Chain

NewChain returns a new chain of layers.

func (*Chain) Add

func (c *Chain) Add(l ...Config)

Add to the chain.

func (*Chain) Clone

func (c *Chain) Clone() *Chain

Clone the chain without any nodes.

func (*Chain) Compile

func (c *Chain) Compile(graph *g.ExprGraph, opts ...ChainOpt)

Compile the chain of layers into the model.

func (*Chain) Fwd

func (c *Chain) Fwd(x *g.Node) (prediction *g.Node, err error)

Fwd is a forward pass thorugh all layers of the chain.

func (*Chain) Learnables

func (c *Chain) Learnables() g.Nodes

Learnables are all of the learnable parameters in the chain.

type ChainOpt

type ChainOpt func(*Chain)

ChainOpt is a chain option.

type CompileOpt

type CompileOpt func(Layer)

CompileOpt is a layer compile option.

type Config

type Config interface {
	// Compile the layer.
	Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

	// ApplyDefaults to the config.
	ApplyDefaults() Config

	// Validate the config.
	Validate() error

	// Clone the layer config.
	Clone() Config
}

Config is the config for a layer.

type Conv2D

type Conv2D struct {
	// Input channels.
	// required
	Input int

	// Output channels.
	// required
	Output int

	// Height of the filter.
	// required
	Height int

	// Width of the filter.
	// required
	Width int

	// Name of the layer.
	Name string

	// Activation function for the layer.
	// Defaults to ReLU
	Activation ActivationFn

	// Pad
	// Defaults to (1, 1)
	Pad []int

	// Stride
	// Defaults to (1, 1)
	Stride []int

	// Dilation
	// Defaults to (1, 1)
	Dilation []int

	// Init function fot the weights.
	// Defaults to GlorotN(1)
	Init g.InitWFn
}

Conv2D is a 2D convolution.

func (Conv2D) ApplyDefaults

func (c Conv2D) ApplyDefaults() Config

ApplyDefaults to the config.

func (Conv2D) Clone

func (c Conv2D) Clone() Config

Clone the config.

func (Conv2D) Compile

func (c Conv2D) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the config into a layer.

func (Conv2D) Validate

func (c Conv2D) Validate() error

Validate the config.

type Dropout

type Dropout struct {
	// Probability of dropping out.
	// Defaults to 0.6
	Probability float64
}

Dropout implements layer dropout.

func (Dropout) ApplyDefaults

func (d Dropout) ApplyDefaults() Config

ApplyDefaults applys defaults to the layers.

func (Dropout) Clone

func (d Dropout) Clone() Config

Clone the config.

func (Dropout) Compile

func (d Dropout) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the config as a layer.

func (Dropout) Validate

func (d Dropout) Validate() error

Validate the config.

type FC

type FC struct {
	// Input is the number of units in input.
	// required
	Input int

	// Output is the number of units in the output.
	// required
	Output int

	// Name of the layer.
	Name string

	// Activation is the activation function.
	// Defaults to ReLU
	Activation ActivationFn

	// Init is the init function.
	// Defaults to GlorotN(1)
	Init g.InitWFn

	// NoBias indicates to not use a bias with the layer
	// Defaults to true.
	NoBias bool

	// BiasInit is the init function for the bias.
	// Defaults to GlorotN(1)
	BiasInit g.InitWFn
}

FC is a fully connected layer of neurons.

func (FC) ApplyDefaults

func (f FC) ApplyDefaults() Config

ApplyDefaults to the config.

func (FC) Clone

func (f FC) Clone() Config

Clone the config.

func (FC) Compile

func (f FC) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the layer into the graph.

func (FC) Validate

func (f FC) Validate() error

Validate the config.

type Flatten

type Flatten struct{}

Flatten reshapes the incoming tensor to be flat, preserving the batch.

func (Flatten) ApplyDefaults

func (f Flatten) ApplyDefaults() Config

ApplyDefaults to the flatten layer.

func (Flatten) Clone

func (f Flatten) Clone() Config

Clone the config.

func (Flatten) Compile

func (f Flatten) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the layer.

func (Flatten) Validate

func (f Flatten) Validate() error

Validate the config.

type Layer

type Layer interface {
	// Fwd is a forward pass through the layer.
	Fwd(x *g.Node) (*g.Node, error)

	// Learnables returns all learnable nodes within this layer.
	Learnables() g.Nodes

	// Clone the layer.
	Clone() Layer

	// Graph returns the graph for this layer.
	Graph() *g.ExprGraph
}

Layer in a network.

type LeakyReLUActivation

type LeakyReLUActivation struct {
	// contains filtered or unexported fields
}

LeakyReLUActivation is a leaky relu activation layer.

func NewLeakyReLU

func NewLeakyReLU(alpha float64) *LeakyReLUActivation

NewLeakyReLU returns a new leaky relu activation layer.

func (*LeakyReLUActivation) Clone

func (r *LeakyReLUActivation) Clone() ActivationFn

Clone the activation.

func (*LeakyReLUActivation) Compile

func (r *LeakyReLUActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*LeakyReLUActivation) Fwd

func (r *LeakyReLUActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*LeakyReLUActivation) Learnables

func (r *LeakyReLUActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

type LinearActivation

type LinearActivation struct{}

LinearActivation is a linear (identity) activation layer.

func NewLinear

func NewLinear() *LinearActivation

NewLinear is a linear activation layer.

func (*LinearActivation) Clone

func (l *LinearActivation) Clone() ActivationFn

Clone the activation.

func (*LinearActivation) Compile

func (l *LinearActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*LinearActivation) Fwd

func (l *LinearActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*LinearActivation) Learnables

func (l *LinearActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

type MaxPooling2D

type MaxPooling2D struct {
	// Shape of the kernel.
	// Defaults to (2, 2)
	Kernel t.Shape

	// Pad
	// Defaults to (0, 0)
	Pad []int

	// Stride
	// Defaults to (2, 2)
	Stride []int

	// Name
	Name string
}

MaxPooling2D implements the max pooling 2d function.

func (MaxPooling2D) ApplyDefaults

func (m MaxPooling2D) ApplyDefaults() Config

ApplyDefaults applys defaults to the layers.

func (MaxPooling2D) Clone

func (m MaxPooling2D) Clone() Config

Clone the config.

func (MaxPooling2D) Compile

func (m MaxPooling2D) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the config as a layer.

func (MaxPooling2D) Validate

func (m MaxPooling2D) Validate() error

Validate the config.

type ReLUActivation

type ReLUActivation struct{}

ReLUActivation is a relu activation layer.

func NewReLU

func NewReLU() *ReLUActivation

NewReLU returns a new relu activation layer.

func (*ReLUActivation) Clone

func (r *ReLUActivation) Clone() ActivationFn

Clone the activation.

func (*ReLUActivation) Compile

func (r *ReLUActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*ReLUActivation) Fwd

func (r *ReLUActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*ReLUActivation) Learnables

func (r *ReLUActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

type Reshape

type Reshape struct {
	// To shape
	// required
	To t.Shape
}

Reshape the incoming tensor.

func (Reshape) ApplyDefaults

func (r Reshape) ApplyDefaults() Config

ApplyDefaults to the flatten layer.

func (Reshape) Clone

func (r Reshape) Clone() Config

Clone the config.

func (Reshape) Compile

func (r Reshape) Compile(graph *g.ExprGraph, opts ...CompileOpt) Layer

Compile the layer.

func (Reshape) Validate

func (r Reshape) Validate() error

Validate the config.

type SigmoidActivation

type SigmoidActivation struct{}

SigmoidActivation is a sigmoid activation layer.

func NewSigmoid

func NewSigmoid() *SigmoidActivation

NewSigmoid returns a new sigmoid activation layer.

func (*SigmoidActivation) Clone

func (s *SigmoidActivation) Clone() ActivationFn

Clone the activation.

func (*SigmoidActivation) Compile

func (s *SigmoidActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*SigmoidActivation) Fwd

func (s *SigmoidActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*SigmoidActivation) Learnables

func (s *SigmoidActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

type SoftmaxActivation

type SoftmaxActivation struct {
	// contains filtered or unexported fields
}

SoftmaxActivation is a softmax activation layer.

func NewSoftmax

func NewSoftmax(axis ...int) *SoftmaxActivation

NewSoftmax returns a new leaky softmax activation layer.

func (*SoftmaxActivation) Clone

func (s *SoftmaxActivation) Clone() ActivationFn

Clone the activation.

func (*SoftmaxActivation) Compile

func (s *SoftmaxActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*SoftmaxActivation) Fwd

func (s *SoftmaxActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*SoftmaxActivation) Learnables

func (s *SoftmaxActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

type TanhActivation

type TanhActivation struct{}

TanhActivation is a tanh activation layer.

func NewTanh

func NewTanh() *TanhActivation

NewTanh returns a new tanh activation layer.

func (*TanhActivation) Clone

func (t *TanhActivation) Clone() ActivationFn

Clone the activation.

func (*TanhActivation) Compile

func (t *TanhActivation) Compile(x *g.Node, opts ...CompileOpt)

Compile the layer.

func (*TanhActivation) Fwd

func (t *TanhActivation) Fwd(x *g.Node) (*g.Node, error)

Fwd is a forward pass through the layer.

func (*TanhActivation) Learnables

func (t *TanhActivation) Learnables() (n g.Nodes)

Learnables returns all learnable nodes within this layer.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL