lexing

package
v0.1.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 2, 2020 License: Apache-2.0 Imports: 7 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

func FindRawTokenType

func FindRawTokenType(value, operators, separators string) text.TokenType

func JoinMultiCharSymbols

func JoinMultiCharSymbols(l *list.List, multiChar string) *list.List

JoinMultiCharSymbols join symbols with more than 1 char like && or >=. Max len of symbol is 2 <symbol padded> = <symbol> <space to next even position>, multiMatch format: "<symbol padded><symbol padded>...". example: "+ - ++ -- & |"

func JoinQuote

func JoinQuote(l *list.List) *list.List

JoinQuote join quote char and form a quoted string

func RegroupSymbols

func RegroupSymbols(l *list.List, symbols string) []string

RegroupSymbols regroup symbols split after NextLine() multiMatch format: "<symbol><space>...". example: "+ - ++ -- & |"

func SplitKeepSeparators

func SplitKeepSeparators(s, sep string) *list.List

SplitKeepSeparators split a string considering single char separators like StringTokenizer in java, return a list with the tokens and delimiters

Types

type SourceReader

type SourceReader struct {
	// contains filtered or unexported fields
}

SourceReader read the source file/string and split line into tokens (raw)

func NewSourceReader

func NewSourceReader(reader io.Reader, separators, sourceName string) *SourceReader

NewSourceReader create a SourceReader from a reader and a separator string

func NewSourceReaderFromFile

func NewSourceReaderFromFile(filePath, separators string) *SourceReader

NewSourceReaderFromFile create a SourceReader from a file and a separators string

func (*SourceReader) FilterLine

func (source *SourceReader) FilterLine(line *text.SourceLine, options TokenMatchingOptions) []text.Token

FilterLine remove single line comments and filter whitespace, return a slice of tokens

func (*SourceReader) HasNext

func (source *SourceReader) HasNext() bool

HasNext return true if the end of file is reached

func (*SourceReader) LineNumber

func (source *SourceReader) LineNumber() uint

LineNumber return, yes your guess is right: the line number, but go lint complain if i don't write it

func (*SourceReader) NextLine

func (source *SourceReader) NextLine() *list.List

NextLine read the line and then split in tokens (raw), return a list

func (*SourceReader) NormalizeLine

func (source *SourceReader) NormalizeLine(line *list.List, lineNumber uint, symbols string) *text.SourceLine

NormalizeLine process the raw tokens in line regrouping quoted strings and multi-char operators

func (*SourceReader) SourceName

func (source *SourceReader) SourceName() string

SourceName return the source name

type TokenMatchingOptions

type TokenMatchingOptions struct {
	// contains filtered or unexported fields
}

TokenMatchingOptions contains definition of delimiters and operators for the lexer

func NewMatchingOptions

func NewMatchingOptions(operators, separators, multiCharSymbols, lineCommentChar string) TokenMatchingOptions

func (TokenMatchingOptions) GetAllSeparators

func (opt TokenMatchingOptions) GetAllSeparators() string

func (TokenMatchingOptions) GetMultiCharSymbols

func (opt TokenMatchingOptions) GetMultiCharSymbols() string

func (TokenMatchingOptions) GetOperators

func (opt TokenMatchingOptions) GetOperators() string

func (TokenMatchingOptions) LineToTokens

func (opt TokenMatchingOptions) LineToTokens(l *text.SourceLine, lineNumber uint, fileName string) []text.Token

Convert the line to a slice of tokens, also skip whitespaces and comments search why fileName was used

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL