Documentation
¶
Index ¶
- Constants
- func CommonTokenize(lexer antlr.Lexer, tm TypeMap) (golightan.Tokens, error)
- type AllTokenStream
- func (c *AllTokenStream) Consume()
- func (c *AllTokenStream) Fill()
- func (c *AllTokenStream) Get(index int) antlr.Token
- func (c *AllTokenStream) GetAllText() string
- func (c *AllTokenStream) GetAllTokens() []antlr.Token
- func (c *AllTokenStream) GetSourceName() string
- func (c *AllTokenStream) GetTextFromInterval(interval *antlr.Interval) string
- func (c *AllTokenStream) GetTextFromRuleContext(interval antlr.RuleContext) string
- func (c *AllTokenStream) GetTextFromTokens(start, end antlr.Token) string
- func (c *AllTokenStream) GetTokenSource() antlr.TokenSource
- func (c *AllTokenStream) Index() int
- func (c *AllTokenStream) LA(i int) int
- func (c *AllTokenStream) LB(k int) antlr.Token
- func (c *AllTokenStream) LT(k int) antlr.Token
- func (c *AllTokenStream) Mark() int
- func (c *AllTokenStream) NextTokenOnChannel(i, channel int) int
- func (c *AllTokenStream) Release(marker int)
- func (c *AllTokenStream) Seek(index int)
- func (c *AllTokenStream) SetTokenSource(tokenSource antlr.TokenSource)
- func (c *AllTokenStream) Size() int
- func (c *AllTokenStream) Sync(i int) bool
- type AvailableLexer
- type CLexer
- type CommonParseTreeListener
- func (b *CommonParseTreeListener) EnterEveryRule(ctx antlr.ParserRuleContext)
- func (b *CommonParseTreeListener) ExitEveryRule(ctx antlr.ParserRuleContext)
- func (b *CommonParseTreeListener) GetTokens() golightan.Tokens
- func (b *CommonParseTreeListener) SetDebug(lexer antlr.Lexer, parser antlr.Parser)
- func (b *CommonParseTreeListener) Token(node antlr.TerminalNode)
- func (b *CommonParseTreeListener) VisitErrorNode(node antlr.ErrorNode)
- func (b *CommonParseTreeListener) VisitTerminal(node antlr.TerminalNode)
- type CommonParseTreeVisitor
- func (b *CommonParseTreeVisitor) GetTokens() golightan.Tokens
- func (b *CommonParseTreeVisitor) SetDebug(lexer antlr.Lexer)
- func (b *CommonParseTreeVisitor) Token(node antlr.TerminalNode)
- func (b *CommonParseTreeVisitor) Visit(tree antlr.ParseTree) interface{}
- func (b *CommonParseTreeVisitor) VisitChildren(node antlr.RuleNode) interface{}
- func (b *CommonParseTreeVisitor) VisitErrorNode(node antlr.ErrorNode) interface{}
- func (b *CommonParseTreeVisitor) VisitTerminal(node antlr.TerminalNode) interface{}
- type GolangLexer
- type GraphQLLexer
- type JSONLexer
- type Lexer
- type Python3Lexer
- type Rule
- type RuleMap
- type SQLiteLexer
- type Stack
- type TokenMap
- type TypeMap
- type XMLLexer
Constants ¶
const ( InitialStackCapacity = 10 InitialTokenCapacity = 100 )
const (
InvalidToken = -1
)
const (
StackEmpty = -1
)
Variables ¶
This section is empty.
Functions ¶
Types ¶
type AllTokenStream ¶
type AllTokenStream struct {
*antlr.CommonTokenStream
// contains filtered or unexported fields
}
AllTokenStream is an implementation of TokenStream that loads tokens from a TokenSource on-demand and places the tokens in a buffer to provide access to any previous token by index. This token stream fetches tokens from all of channels.
func NewAllTokenStream ¶
func NewAllTokenStream(lexer antlr.Lexer) *AllTokenStream
func (*AllTokenStream) Consume ¶
func (c *AllTokenStream) Consume()
func (*AllTokenStream) Fill ¶
func (c *AllTokenStream) Fill()
Fill gets all tokens from the lexer until EOF.
func (*AllTokenStream) GetAllText ¶
func (c *AllTokenStream) GetAllText() string
func (*AllTokenStream) GetAllTokens ¶
func (c *AllTokenStream) GetAllTokens() []antlr.Token
func (*AllTokenStream) GetSourceName ¶
func (c *AllTokenStream) GetSourceName() string
func (*AllTokenStream) GetTextFromInterval ¶
func (c *AllTokenStream) GetTextFromInterval(interval *antlr.Interval) string
func (*AllTokenStream) GetTextFromRuleContext ¶
func (c *AllTokenStream) GetTextFromRuleContext(interval antlr.RuleContext) string
func (*AllTokenStream) GetTextFromTokens ¶
func (c *AllTokenStream) GetTextFromTokens(start, end antlr.Token) string
func (*AllTokenStream) GetTokenSource ¶
func (c *AllTokenStream) GetTokenSource() antlr.TokenSource
func (*AllTokenStream) Index ¶
func (c *AllTokenStream) Index() int
func (*AllTokenStream) LA ¶
func (c *AllTokenStream) LA(i int) int
func (*AllTokenStream) Mark ¶
func (c *AllTokenStream) Mark() int
func (*AllTokenStream) NextTokenOnChannel ¶
func (c *AllTokenStream) NextTokenOnChannel(i, channel int) int
NextTokenOnChannel returns the index of the next token on channel given a starting index. Returns i if tokens[i] is on channel. Returns -1 if there are no tokens on channel between i and EOF.
func (*AllTokenStream) Release ¶
func (c *AllTokenStream) Release(marker int)
func (*AllTokenStream) Seek ¶
func (c *AllTokenStream) Seek(index int)
func (*AllTokenStream) SetTokenSource ¶
func (c *AllTokenStream) SetTokenSource(tokenSource antlr.TokenSource)
SetTokenSource resets the c token stream by setting its token source.
func (*AllTokenStream) Size ¶
func (c *AllTokenStream) Size() int
func (*AllTokenStream) Sync ¶
func (c *AllTokenStream) Sync(i int) bool
Sync makes sure index i in tokens has a token and returns true if a token is located at index i and otherwise false.
type AvailableLexer ¶
type AvailableLexer struct {
Targets []string
Lexer string
Description string
Exts []string
FactoryMethod func() Lexer
}
func AvailableLexers ¶
func AvailableLexers() []AvailableLexer
type CommonParseTreeListener ¶
type CommonParseTreeListener struct {
// contains filtered or unexported fields
}
func NewCommonParseTreeListener ¶
func NewCommonParseTreeListener(tm TokenMap) *CommonParseTreeListener
func (*CommonParseTreeListener) EnterEveryRule ¶
func (b *CommonParseTreeListener) EnterEveryRule(ctx antlr.ParserRuleContext)
func (*CommonParseTreeListener) ExitEveryRule ¶
func (b *CommonParseTreeListener) ExitEveryRule(ctx antlr.ParserRuleContext)
func (*CommonParseTreeListener) GetTokens ¶
func (b *CommonParseTreeListener) GetTokens() golightan.Tokens
func (*CommonParseTreeListener) SetDebug ¶
func (b *CommonParseTreeListener) SetDebug(lexer antlr.Lexer, parser antlr.Parser)
SetDebug enable debug print which shows rule stack and symbol.
func (*CommonParseTreeListener) Token ¶
func (b *CommonParseTreeListener) Token(node antlr.TerminalNode)
func (*CommonParseTreeListener) VisitErrorNode ¶
func (b *CommonParseTreeListener) VisitErrorNode(node antlr.ErrorNode)
func (*CommonParseTreeListener) VisitTerminal ¶
func (b *CommonParseTreeListener) VisitTerminal(node antlr.TerminalNode)
type CommonParseTreeVisitor ¶
type CommonParseTreeVisitor struct {
// contains filtered or unexported fields
}
func NewCommonParseTreeVisitor ¶
func NewCommonParseTreeVisitor(tm TokenMap) *CommonParseTreeVisitor
func (*CommonParseTreeVisitor) GetTokens ¶
func (b *CommonParseTreeVisitor) GetTokens() golightan.Tokens
func (*CommonParseTreeVisitor) SetDebug ¶
func (b *CommonParseTreeVisitor) SetDebug(lexer antlr.Lexer)
SetDebug enable debug print which shows rule stack and symbol.
func (*CommonParseTreeVisitor) Token ¶
func (b *CommonParseTreeVisitor) Token(node antlr.TerminalNode)
func (*CommonParseTreeVisitor) Visit ¶
func (b *CommonParseTreeVisitor) Visit(tree antlr.ParseTree) interface{}
func (*CommonParseTreeVisitor) VisitChildren ¶
func (b *CommonParseTreeVisitor) VisitChildren(node antlr.RuleNode) interface{}
func (*CommonParseTreeVisitor) VisitErrorNode ¶
func (b *CommonParseTreeVisitor) VisitErrorNode(node antlr.ErrorNode) interface{}
func (*CommonParseTreeVisitor) VisitTerminal ¶
func (b *CommonParseTreeVisitor) VisitTerminal(node antlr.TerminalNode) interface{}
type GolangLexer ¶
type GolangLexer struct {
// contains filtered or unexported fields
}
func (GolangLexer) Tokenize ¶
func (l GolangLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)
type GraphQLLexer ¶
type GraphQLLexer struct {
// contains filtered or unexported fields
}
func (GraphQLLexer) Tokenize ¶
func (l GraphQLLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)
type Lexer ¶
type Lexer interface {
Tokenize(input antlr.CharStream) (golightan.Tokens, error)
}
func NewGolangLexer ¶
func NewGolangLexer() Lexer
func NewGraphQLLexer ¶
func NewGraphQLLexer() Lexer
func NewJSONLexer ¶
func NewJSONLexer() Lexer
func NewPython3Lexer ¶
func NewPython3Lexer() Lexer
func NewSQLiteLexer ¶
func NewSQLiteLexer() Lexer
func NewXMLLexer ¶
func NewXMLLexer() Lexer
type Python3Lexer ¶
type Python3Lexer struct {
// contains filtered or unexported fields
}
func (Python3Lexer) Tokenize ¶
func (l Python3Lexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)
type SQLiteLexer ¶
type SQLiteLexer struct {
// contains filtered or unexported fields
}
func (SQLiteLexer) Tokenize ¶
func (l SQLiteLexer) Tokenize(input antlr.CharStream) (golightan.Tokens, error)
type TokenMap ¶
type TokenMap struct {
// contains filtered or unexported fields
}
func NewCTokenMap ¶
func NewCTokenMap() TokenMap
func NewGolangTokenMap ¶
func NewGolangTokenMap() TokenMap
func NewGraphQLTokenMap ¶
func NewGraphQLTokenMap() TokenMap
func NewJSONTokenMap ¶
func NewJSONTokenMap() TokenMap
func NewPython3TokenMap ¶
func NewPython3TokenMap() TokenMap
func NewSQLiteTokenMap ¶
func NewSQLiteTokenMap() TokenMap
func NewXMLTokenMap ¶
func NewXMLTokenMap() TokenMap