parsing

package
v0.1.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Oct 2, 2020 License: Apache-2.0 Imports: 6 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DefaultIgnore = 0
	DefaultAppend = 1
)

Defaults for tags

View Source
const (
	ErrorUnresolvedSymbol = 0 + iota
	ErrorDuplicatedLabel
	ErrorExpectedToken
	InvalidNumberFormat
	InvalidOpcodeArgument
	InvalidOpcodeFormat
	ErrorCyclicInclusion
	ErrorExpectedEOL
	ErrorIncompleteExpression
	ErrorUserDefined
	ErrorInternal
	NoError
)

ParsingErrorType values

Variables

This section is empty.

Functions

func FindTokenType

func FindTokenType(t text.Token) text.TokenType

FindTokenType to a text.Token

Types

type IncompleteError

type IncompleteError struct {
	// contains filtered or unexported fields
}

IncompleteError contains the token related part to throw a parsing error

func NewIncompleteError

func NewIncompleteError(errType ParsingErrorType, errMsg string) IncompleteError

NewIncompleteError return a new incomplete error type with type and message

type MatchRule

type MatchRule func(tokens []text.Token, pState ParserState) ([]text.Token, bool)

MatchRule match a rule in the token stream and push the matched token to the helper result if not enough token are provided, pull function is called of the pHelper is called return tokens not parsed, keep parsing

func MatchAll

func MatchAll(tag int, matchList ...MatchRule) MatchRule

MatchAll combine multiple MatchRule together and return a MatchRule that match all of the MatchRule in sequence

func MatchAnyString

func MatchAnyString(err IncompleteError, sList ...string) MatchRule

MatchAnyString return a MatchRule that match any string in a list return false with the same MatchRule condition and if an error is found

func MatchAnyToken

func MatchAnyToken(err IncompleteError, tList ...text.TokenType) MatchRule

MatchAnyToken return a MatchRule that match any token in a list return false with the same MatchRule condition and if an error is found

func MatchNotAnyToken

func MatchNotAnyToken(err IncompleteError, tList ...text.TokenType) MatchRule

MatchNotAnyToken return a MatchRule that match every token except the specified TokenType return false when the token match, true otherwise

func MatchSkipString

func MatchSkipString(err IncompleteError, s string) MatchRule

MatchSkipString return a MatchRule that match a specified TokenType it push the token with the 'IgnoreTokens' flag return false with the same MatchRule condition and if an error is found

func MatchSkipToken

func MatchSkipToken(err IncompleteError, tokenType text.TokenType) MatchRule

MatchSkipToken return a MatchRule that match a specified TokenType it push the token with the 'IgnoreTokens' flag return false with the same MatchRule condition and if an error is found

func MatchToken

func MatchToken(err IncompleteError, tokenType text.TokenType) MatchRule

MatchToken return a MatchRule that match a specified TokenType return false with the same MatchRule condition and if an error is found

func TryMatch

func TryMatch(tag int, test MatchRule, match MatchRule) MatchRule

TryMatch combine a test and a match and return a MatchRule that parse the token stream only if the test match

func TryMatchAnyString

func TryMatchAnyString(sList ...string) MatchRule

TryMatchAnyString return a MatchRule test the first token in the slice with the arguments if a match is found the MatchRule return true, false otherwise

func TryMatchRepeat

func TryMatchRepeat(tag int, test MatchRule, match MatchRule) MatchRule

TryMatchRepeat combine a test and a match and return a MatchRule that parse the token stream while the test match

func TryMatchToken

func TryMatchToken(tokenType text.TokenType, skip bool) MatchRule

TryMatchToken return a MatchRule that match a specified TokenType skip tell the test to skip the tested token return false if the test is false, true otherwise

func TryNotMatchToken

func TryNotMatchToken(tokenType text.TokenType, skip bool) MatchRule

TryNotMatchToken return a MatchRule that match a specified TokenType return true if the test is false, false otherwise

type MultiMatch

type MultiMatch struct {
	// contains filtered or unexported fields
}

MultiMatch is a collection of matches with their initial token

func NewMultiMatch

func NewMultiMatch() MultiMatch

NewMultiMatch return a MultiMatch object

func (*MultiMatch) AddMatch

func (mMatch *MultiMatch) AddMatch(tType text.TokenType, match MatchRule)

AddMatch add a match to the match map

func (*MultiMatch) MatchWithMap

func (mMatch *MultiMatch) MatchWithMap(err IncompleteError) MatchRule

MatchWithMap return a MatchRule that select a match using the first token found that MatchRule return false if EOF is reached or if the token is not known

type ParserState

type ParserState interface {
	ReportError(wrong text.Token, err ParsingErrorType, msg string) //ReportError report an error on a token
	Pull(tokens *[]text.Token) int                                  //Pull see the above
	Push(tokens []text.Token, tag int)                              //Push parsed tokens on the internal collection
	Mark() (int, int)                                               //Mark return a key needed to assemble the CST
	ToCST(markT int, markL, tag int)                                //Build a CST from the parsed tokens from mark and tag it with tag
}

ParserState supply function to interact with user and to pull tokens from stream and push tokens to parsed stream Pull put new tokens on the parser stream and return the number of token read, if EOF is reached a TokenEOF must be added to the slice and the result must be 0

type ParsingError

type ParsingError struct {
	// contains filtered or unexported fields
}

ParsingError contains all type of error that can occur during parsing

func NewParsingError

func NewParsingError(t text.Token, eType ParsingErrorType, err string) ParsingError

NewParsingError create new parsing error

func (ParsingError) Error

func (err ParsingError) Error() string

Error message

func (ParsingError) GetLine

func (err ParsingError) GetLine() uint

GetLine return the line of the error

func (ParsingError) Report

func (err ParsingError) Report(ui ui.UI, line ui.UIPrintable)

Report the error

type ParsingErrorType

type ParsingErrorType int

ParsingErrorType is an enum representing types of errors

func (ParsingErrorType) ReportToken

func (eType ParsingErrorType) ReportToken(ui ui.UI, token text.Token, b TokenSource, arg string)

ReportToken report in a token

func (ParsingErrorType) String

func (eType ParsingErrorType) String() string

String implements Stringer interface for ParsingErrorType

type SourceLibrary

type SourceLibrary struct {
	// contains filtered or unexported fields
}

SourceLibrary list all parsed sourced

func NewSourceLibrary

func NewSourceLibrary() *SourceLibrary

NewSourceLibrary return a new SourceLibrary struct

func (*SourceLibrary) AddSource

func (p *SourceLibrary) AddSource(sourceName string, isFile bool, source []*text.SourceLine) (string, error)

AddSource add a source line to the specified source Return the inserted name and an error if occurred

func (*SourceLibrary) GetSource

func (p *SourceLibrary) GetSource(fullName string) []*text.SourceLine

GetSource return the full source of the sourceName

func (*SourceLibrary) IndexOf

func (p *SourceLibrary) IndexOf(sourceName string) int

IndexOf return the index of the source or -1

type TokenBuffer

type TokenBuffer struct {
	// contains filtered or unexported fields
}

TokenBuffer keep a one line buffer of the tokens

func NewTokenBuffer

func NewTokenBuffer(name string, text io.Reader, options lexing.TokenMatchingOptions) *TokenBuffer

NewTokenBuffer return a new TokenBuffer from any reader

func NewTokenBufferFromFile

func NewTokenBufferFromFile(file string, options lexing.TokenMatchingOptions) *TokenBuffer

NewTokenBufferFromFile return a new TokenBuffer from a file

func (*TokenBuffer) Lines

func (buffer *TokenBuffer) Lines() []*text.SourceLine

Lines return all saved lines

func (*TokenBuffer) NextToken

func (buffer *TokenBuffer) NextToken(ui ui.UI) text.Token

NextToken return the next token in the source file empty lines are skipped

func (*TokenBuffer) SkipEndOfLine

func (buffer *TokenBuffer) SkipEndOfLine(ui ui.UI) text.Token

SkipEndOfLine return the first token that is not EOL

func (*TokenBuffer) SourceName

func (buffer *TokenBuffer) SourceName() string

SourceName return the source name of the tokens

func (*TokenBuffer) SyncLines

func (buffer *TokenBuffer) SyncLines(syncLines func([]*text.SourceLine))

SyncLines return

type TokenSource

type TokenSource interface {
	SourceName() string                //SourceName return the name of the source (may be a location)
	Lines() []*text.SourceLine         //Lines return all saved lines
	NextToken(ui ui.UI) text.Token     //NextToken return the next token in the source
	SkipEndOfLine(ui ui.UI) text.Token //SkipEndOfLine skip the next empty lines
}

TokenSource is the input of the parser

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL