syntax

package
v0.0.0-...-d47e5ee Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 25, 2023 License: MIT Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var (
	ErrUnexpectedToken = UnexpectedTokenError{}
)

Functions

This section is empty.

Types

type Cursor

type Cursor struct {
	Line, Column int
}

Cursor represents a cursor position within a document, i.e. a line and a column number, both starting at 1.

type Error

type Error struct {
	// The file path in which the error occured (optional)
	Filename string

	// The cursor position of the error
	Cursor

	// The concrete error
	Err error
}

Error represents a syntax error at a specific cursor position.

func (*Error) Error

func (e *Error) Error() string

func (*Error) Unwrap

func (e *Error) Unwrap() error

type Lexer

type Lexer struct {
	// The input of this lexer. Typically a bufio.Reader.
	Input io.RuneReader

	// The cursor position marking the start of the current token.
	TokenPosition Cursor

	// The cursor position at which the lexer will be reading next.
	NextPosition Cursor

	// The cursor position of the current rune.
	Position Cursor
	// contains filtered or unexported fields
}

func NewLexer

func NewLexer(input io.Reader, init StateFunc) *Lexer

func (*Lexer) AcceptFunc

func (l *Lexer) AcceptFunc(fn func(rune) bool) (rune, error)

func (*Lexer) AcceptRune

func (l *Lexer) AcceptRune(exp rune) (rune, error)

func (*Lexer) AcceptString

func (l *Lexer) AcceptString(exp string) (string, error)

func (*Lexer) AcceptUntil

func (l *Lexer) AcceptUntil(fn func(rune) bool) (string, error)

func (*Lexer) Discard

func (l *Lexer) Discard()

func (*Lexer) Emit

func (l *Lexer) Emit(typ TokenType, val interface{})

func (*Lexer) Error

func (l *Lexer) Error(err error) StateFunc

func (*Lexer) Errorf

func (l *Lexer) Errorf(format string, args ...interface{}) StateFunc

func (*Lexer) Next

func (l *Lexer) Next() Token

func (*Lexer) PeekRune

func (l *Lexer) PeekRune() (rune, int, error)

func (*Lexer) ReadRune

func (l *Lexer) ReadRune() (r rune, w int, err error)

func (*Lexer) Reset

func (l *Lexer) Reset()

func (*Lexer) Token

func (l *Lexer) Token() string

func (*Lexer) UnreadRune

func (l *Lexer) UnreadRune() error

type Marshaler

type Marshaler interface {
	MarshalNode(*Node) error
	MarshalNodePost(*Node) error
}

type Node

type Node struct {
	// The type of node
	Type NodeType

	// The interpreted value of the node
	Value interface{}

	// The starting position of the node in the file (ignoring comments and whitespace)
	Position Cursor

	// The immediate sibling of the node (may be nil if no sibling)
	Sibling *Node

	// The first child of the node (may be nil if no children)
	Child *Node

	// Tokens contains the tokens that are part of this node element
	Tokens []Token

	// Suffix contains the tokens that are part of this node,
	// but appear after its children
	Suffix []Token
}

Node represents a node in a parse tree.

func (*Node) Marshal

func (node *Node) Marshal(marshaler Marshaler) error

func (*Node) String

func (node *Node) String() string

func (*Node) Trim

func (node *Node) Trim(discard ...TokenType) Node

type NodeType

type NodeType string
const (
	NodeDocument NodeType = "document"
	NodeMap      NodeType = "map"
	NodeList     NodeType = "list"
	NodeKeyPath  NodeType = "keypath"
	NodeString   NodeType = "string"
	NodeNumber   NodeType = "number"
	NodeBool     NodeType = "bool"
	NodeNil      NodeType = "nil"
	NodeDateTime NodeType = "datetime"
)

func (NodeType) String

func (typ NodeType) String() string

type Parser

type Parser interface {
	Parse() (*Node, error)
}

type Regexp

type Regexp struct {
	// contains filtered or unexported fields
}

Accept is a more consistently-behaving regexp matcher. It uses regexp/syntax under the covers, and guarantees that no runes are read unnecessarily. It's not particularly fast, and does not optimize at all the regexp bytecode, but it is correct and conservative in its rune reading.

func CompileRegexp

func CompileRegexp(name, s string) (*Regexp, error)

func MustCompileRegexp

func MustCompileRegexp(name, s string) *Regexp

func (*Regexp) Accept

func (re *Regexp) Accept(l *Lexer) ([]string, error)

Accept accepts and returns the longest run of characters coming out of the lexer stream that matches the regular expression, or returns an error if it doesn't.

func (*Regexp) GoString

func (re *Regexp) GoString() string

func (*Regexp) String

func (re *Regexp) String() string

type StateFunc

type StateFunc func(*Lexer) StateFunc

type Token

type Token struct {
	// The type of this token.
	Type TokenType

	// The original string representation of this token.
	Raw string

	// The value interpreted from Raw (may be nil).
	Value interface{}

	// The starting position of this token.
	Start Cursor

	// The end position of this token.
	End Cursor
}

func (*Token) IsAny

func (tok *Token) IsAny(types ...TokenType) bool

IsAny returns true if the token is one of the specified token types.

type TokenType

type TokenType string
const (
	TokenEOF           TokenType = ""
	TokenError         TokenType = "<error>"
	TokenComment       TokenType = "<comment>"
	TokenInlineComment TokenType = "<inline-comment>"
	TokenString        TokenType = "<string>"
	TokenNumber        TokenType = "<number>"
	TokenBool          TokenType = "<bool>"
	TokenIdentifier    TokenType = "<identifier>"
	TokenNil           TokenType = "<nil>"
	TokenNewline       TokenType = "<newline>"
	TokenWhitespace    TokenType = "<whitespace>"
)

func (TokenType) String

func (typ TokenType) String() string

type TokenTypeError

type TokenTypeError struct {
	Token Token
	Err   error
}

func (TokenTypeError) Error

func (e TokenTypeError) Error() string

func (TokenTypeError) Unwrap

func (e TokenTypeError) Unwrap() error

type UnexpectedTokenError

type UnexpectedTokenError []TokenType

func (UnexpectedTokenError) Error

func (e UnexpectedTokenError) Error() string

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL