lexer

package
v0.0.0-...-b7f917f Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 30, 2017 License: BSD-3-Clause Imports: 11 Imported by: 0

Documentation

Index

Constants

View Source
const (
	InitialStackCapacity = 10
	InitialTokenCapacity = 100
)
View Source
const (
	InvalidToken = -1
)
View Source
const (
	StackEmpty = -1
)

Variables

This section is empty.

Functions

func CommonTokenize

func CommonTokenize(lexer antlr.Lexer, tm TypeMap) (golightan.Tokens, error)

CommonTokenize use

Types

type AllTokenStream

type AllTokenStream struct {
	*antlr.CommonTokenStream
	// contains filtered or unexported fields
}

AllTokenStream is an implementation of TokenStream that loads tokens from a TokenSource on-demand and places the tokens in a buffer to provide access to any previous token by index. This token stream fetches tokens from all of channels.

func NewAllTokenStream

func NewAllTokenStream(lexer antlr.Lexer) *AllTokenStream

func (*AllTokenStream) Consume

func (c *AllTokenStream) Consume()

func (*AllTokenStream) Fill

func (c *AllTokenStream) Fill()

Fill gets all tokens from the lexer until EOF.

func (*AllTokenStream) Get

func (c *AllTokenStream) Get(index int) antlr.Token

func (*AllTokenStream) GetAllText

func (c *AllTokenStream) GetAllText() string

func (*AllTokenStream) GetAllTokens

func (c *AllTokenStream) GetAllTokens() []antlr.Token

func (*AllTokenStream) GetSourceName

func (c *AllTokenStream) GetSourceName() string

func (*AllTokenStream) GetTextFromInterval

func (c *AllTokenStream) GetTextFromInterval(interval *antlr.Interval) string

func (*AllTokenStream) GetTextFromRuleContext

func (c *AllTokenStream) GetTextFromRuleContext(interval antlr.RuleContext) string

func (*AllTokenStream) GetTextFromTokens

func (c *AllTokenStream) GetTextFromTokens(start, end antlr.Token) string

func (*AllTokenStream) GetTokenSource

func (c *AllTokenStream) GetTokenSource() antlr.TokenSource

func (*AllTokenStream) Index

func (c *AllTokenStream) Index() int

func (*AllTokenStream) LA

func (c *AllTokenStream) LA(i int) int

func (*AllTokenStream) LB

func (c *AllTokenStream) LB(k int) antlr.Token

func (*AllTokenStream) LT

func (c *AllTokenStream) LT(k int) antlr.Token

func (*AllTokenStream) Mark

func (c *AllTokenStream) Mark() int

func (*AllTokenStream) NextTokenOnChannel

func (c *AllTokenStream) NextTokenOnChannel(i, channel int) int

NextTokenOnChannel returns the index of the next token on channel given a starting index. Returns i if tokens[i] is on channel. Returns -1 if there are no tokens on channel between i and EOF.

func (*AllTokenStream) Release

func (c *AllTokenStream) Release(marker int)

func (*AllTokenStream) Seek

func (c *AllTokenStream) Seek(index int)

func (*AllTokenStream) SetTokenSource

func (c *AllTokenStream) SetTokenSource(tokenSource antlr.TokenSource)

SetTokenSource resets the c token stream by setting its token source.

func (*AllTokenStream) Size

func (c *AllTokenStream) Size() int

func (*AllTokenStream) Sync

func (c *AllTokenStream) Sync(i int) bool

Sync makes sure index i in tokens has a token and returns true if a token is located at index i and otherwise false.

type AvailableLexer

type AvailableLexer struct {
	Targets       []string
	Lexer         string
	Description   string
	Exts          []string
	FactoryMethod func() Lexer
}

func AvailableLexers

func AvailableLexers() []AvailableLexer

type CLexer

type CLexer struct {
	// contains filtered or unexported fields
}

func (CLexer) Tokenize

func (l CLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type CommonParseTreeListener

type CommonParseTreeListener struct {
	// contains filtered or unexported fields
}

func NewCommonParseTreeListener

func NewCommonParseTreeListener(tm TokenMap) *CommonParseTreeListener

func (*CommonParseTreeListener) EnterEveryRule

func (b *CommonParseTreeListener) EnterEveryRule(ctx antlr.ParserRuleContext)

func (*CommonParseTreeListener) ExitEveryRule

func (b *CommonParseTreeListener) ExitEveryRule(ctx antlr.ParserRuleContext)

func (*CommonParseTreeListener) GetTokens

func (b *CommonParseTreeListener) GetTokens() golightan.Tokens

func (*CommonParseTreeListener) SetDebug

func (b *CommonParseTreeListener) SetDebug(lexer antlr.Lexer, parser antlr.Parser)

SetDebug enable debug print which shows rule stack and symbol.

func (*CommonParseTreeListener) Token

func (*CommonParseTreeListener) VisitErrorNode

func (b *CommonParseTreeListener) VisitErrorNode(node antlr.ErrorNode)

func (*CommonParseTreeListener) VisitTerminal

func (b *CommonParseTreeListener) VisitTerminal(node antlr.TerminalNode)

type CommonParseTreeVisitor

type CommonParseTreeVisitor struct {
	// contains filtered or unexported fields
}

func NewCommonParseTreeVisitor

func NewCommonParseTreeVisitor(tm TokenMap) *CommonParseTreeVisitor

func (*CommonParseTreeVisitor) GetTokens

func (b *CommonParseTreeVisitor) GetTokens() golightan.Tokens

func (*CommonParseTreeVisitor) SetDebug

func (b *CommonParseTreeVisitor) SetDebug(lexer antlr.Lexer)

SetDebug enable debug print which shows rule stack and symbol.

func (*CommonParseTreeVisitor) Token

func (b *CommonParseTreeVisitor) Token(node antlr.TerminalNode)

func (*CommonParseTreeVisitor) Visit

func (b *CommonParseTreeVisitor) Visit(tree antlr.ParseTree) interface{}

func (*CommonParseTreeVisitor) VisitChildren

func (b *CommonParseTreeVisitor) VisitChildren(node antlr.RuleNode) interface{}

func (*CommonParseTreeVisitor) VisitErrorNode

func (b *CommonParseTreeVisitor) VisitErrorNode(node antlr.ErrorNode) interface{}

func (*CommonParseTreeVisitor) VisitTerminal

func (b *CommonParseTreeVisitor) VisitTerminal(node antlr.TerminalNode) interface{}

type GolangLexer

type GolangLexer struct {
	// contains filtered or unexported fields
}

func (GolangLexer) Tokenize

func (l GolangLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type GraphQLLexer

type GraphQLLexer struct {
	// contains filtered or unexported fields
}

func (GraphQLLexer) Tokenize

func (l GraphQLLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type JSONLexer

type JSONLexer struct {
	// contains filtered or unexported fields
}

func (JSONLexer) Tokenize

func (l JSONLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type Lexer

type Lexer interface {
	Tokenize(input antlr.CharStream) (golightan.Tokens, error)
}

func Factory

func Factory(target string) (Lexer, error)

func NewCLexer

func NewCLexer() Lexer

func NewGolangLexer

func NewGolangLexer() Lexer

func NewGraphQLLexer

func NewGraphQLLexer() Lexer

func NewJSONLexer

func NewJSONLexer() Lexer

func NewPython3Lexer

func NewPython3Lexer() Lexer

func NewSQLiteLexer

func NewSQLiteLexer() Lexer

func NewXMLLexer

func NewXMLLexer() Lexer

type Python3Lexer

type Python3Lexer struct {
	// contains filtered or unexported fields
}

func (Python3Lexer) Tokenize

func (l Python3Lexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type Rule

type Rule [2]int // 0: rule, 1: antlr node token type

type RuleMap

type RuleMap map[Rule]golightan.TokenType

func (RuleMap) Get

func (t RuleMap) Get(rule, tokenType int) golightan.TokenType

type SQLiteLexer

type SQLiteLexer struct {
	// contains filtered or unexported fields
}

func (SQLiteLexer) Tokenize

func (l SQLiteLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

type Stack

type Stack struct {
	// contains filtered or unexported fields
}

func NewStack

func NewStack(capacity int) *Stack

func (*Stack) Last

func (s *Stack) Last() int

func (*Stack) Len

func (s *Stack) Len() int

func (*Stack) Pop

func (s *Stack) Pop() int

func (*Stack) Push

func (s *Stack) Push(n int)

func (*Stack) String

func (s *Stack) String() string

type TokenMap

type TokenMap struct {
	// contains filtered or unexported fields
}

func NewCTokenMap

func NewCTokenMap() TokenMap

func NewGolangTokenMap

func NewGolangTokenMap() TokenMap

func NewGraphQLTokenMap

func NewGraphQLTokenMap() TokenMap

func NewJSONTokenMap

func NewJSONTokenMap() TokenMap

func NewPython3TokenMap

func NewPython3TokenMap() TokenMap

func NewSQLiteTokenMap

func NewSQLiteTokenMap() TokenMap

func NewXMLTokenMap

func NewXMLTokenMap() TokenMap

func (TokenMap) Convert

func (t TokenMap) Convert(rule, tokenType int, text string) golightan.TokenType

Convert converts from rule and antlr TokenType to golightan.TokenType. 1. search Keyword Map 2. search RuleMap using rule and tokentype 3. If not in RuleMap, search symbolicMap using tokenType 4. If not , return TokenTypeText as normal text

type TypeMap

type TypeMap map[int]golightan.TokenType

func (TypeMap) Get

func (tm TypeMap) Get(tokenType int) golightan.TokenType

type XMLLexer

type XMLLexer struct {
	// contains filtered or unexported fields
}

func (XMLLexer) Tokenize

func (l XMLLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL