tokenizer

package
v0.0.0-...-81ee819 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jun 1, 2018 License: MIT Imports: 1 Imported by: 1

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	EmptyLine = Line{}
)

Functions

This section is empty.

Types

type Container

type Container struct {
	// contains filtered or unexported fields
}

func NewContainer

func NewContainer(first Tokenizer) Container

func NewParentheses

func NewParentheses(middle Tokenizer) Container

func WrapParenthesesLines

func WrapParenthesesLines(lines ...Line) Container

func (Container) Append

func (c Container) Append(tokens ...token.Token) Tokenizer

func (Container) FirstLine

func (c Container) FirstLine() (Line, Tokenizer)

func (Container) LastLine

func (c Container) LastLine() (Tokenizer, Line)

func (Container) Middle

func (c Container) Middle() Tokenizer

func (Container) Prepend

func (c Container) Prepend(tokens ...token.Token) Tokenizer

func (Container) SetLast

func (c Container) SetLast(line Tokenizer) Container

func (Container) SetMiddle

func (c Container) SetMiddle(middle Tokenizer) Container

func (Container) Tokenize

func (c Container) Tokenize(depth int) token.Tokens

type Containers

type Containers []Container

func (Containers) Append

func (cs Containers) Append(tokens ...token.Token) Tokenizer

func (Containers) FirstLine

func (cs Containers) FirstLine() (Line, Tokenizer)

func (Containers) LastLine

func (cs Containers) LastLine() (Tokenizer, Line)

func (Containers) Prepend

func (cs Containers) Prepend(tokens ...token.Token) Tokenizer

func (Containers) Tokenize

func (cs Containers) Tokenize(depth int) token.Tokens

type Line

type Line struct {
	// contains filtered or unexported fields
}

func NewLine

func NewLine(tokens ...token.Token) Line

func ParamsToLine

func ParamsToLine(values ...interface{}) (Line, []interface{})

func (Line) A

func (l Line) A(tokens ...token.Token) Line

func (Line) Append

func (l Line) Append(tokens ...token.Token) Tokenizer

func (Line) FirstLine

func (l Line) FirstLine() (Line, Tokenizer)

func (Line) Join

func (l Line) Join(lines ...Line) Line

func (Line) LastLine

func (l Line) LastLine() (Tokenizer, Line)

func (Line) P

func (l Line) P(tokens ...token.Token) Line

func (Line) Prepend

func (l Line) Prepend(tokens ...token.Token) Tokenizer

func (Line) Tokenize

func (l Line) Tokenize(depth int) token.Tokens

type Lines

type Lines []Line

func ConcatLines

func ConcatLines(lines1, lines2 Lines, seps ...Line) Lines

func NewLines

func NewLines(lines ...Line) Lines

func (Lines) Append

func (b Lines) Append(tokens ...token.Token) Tokenizer

func (Lines) AppendLines

func (b Lines) AppendLines(lines ...Line) Lines

func (Lines) FirstLine

func (b Lines) FirstLine() (Line, Tokenizer)

func (Lines) LastLine

func (b Lines) LastLine() (Tokenizer, Line)

func (Lines) Prefix

func (b Lines) Prefix(tokens ...token.Token) Lines

func (Lines) Prepend

func (b Lines) Prepend(tokens ...token.Token) Tokenizer

func (Lines) Tokenize

func (b Lines) Tokenize(depth int) token.Tokens

type Tokenizer

type Tokenizer interface {
	Tokenize(int) token.Tokens
	Prepend(tokens ...token.Token) Tokenizer
	Append(tokens ...token.Token) Tokenizer
	FirstLine() (Line, Tokenizer)
	LastLine() (Tokenizer, Line)
}

type Tokenizers

type Tokenizers []Tokenizer

func ConcatTokenizers

func ConcatTokenizers(t1, t2 Tokenizer, sep Line) Tokenizers

func NewTokenizers

func NewTokenizers(tokenizers ...Tokenizer) Tokenizers

func (Tokenizers) Append

func (ts Tokenizers) Append(tokens ...token.Token) Tokenizer

func (Tokenizers) FirstLine

func (ts Tokenizers) FirstLine() (Line, Tokenizer)

func (Tokenizers) Join

func (ts Tokenizers) Join(sep Line) Tokenizer

func (Tokenizers) LastLine

func (ts Tokenizers) LastLine() (Tokenizer, Line)

func (Tokenizers) Prefix

func (ts Tokenizers) Prefix(tokens ...token.Token) Tokenizers

func (Tokenizers) Prepend

func (ts Tokenizers) Prepend(tokens ...token.Token) Tokenizer

func (Tokenizers) Tokenize

func (ts Tokenizers) Tokenize(depth int) token.Tokens

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL