Documentation ¶
Overview ¶
Package syntax provides functions for building up an abstract syntax tree from a FISHI markdown file. It is the interface between the generated ictiobus compiler frontend for FISHI and the rest of the fishi package.
Index ¶
- Variables
- type AST
- type ActionsBlock
- type ActionsContent
- type AttrRef
- type Block
- type BlockType
- type ErrorBlock
- type GrammarBlock
- type GrammarContent
- type GrammarRule
- type ProductionAction
- type SemanticAction
- type SymbolActions
- type TokenEntry
- type TokenOption
- type TokenOptionType
- type TokensBlock
- type TokensContent
Constants ¶
This section is empty.
Variables ¶
var ( // HooksTable contains all bindings of STDS hook names to their // implementation functions. It is passed to the compiler frontend // automatically on creation and is used for translating parse trees // returned by the FISHI parser into an [AST]. HooksTable = trans.HookMap{ "make_fishispec": sdtsFnMakeFishispec, "block_list_append": sdtsFnBlockListAppend, "block_list_start": sdtsFnBlockListStart, "make_gblock": sdtsFnMakeGrammarBlock, "make_tblock": sdtsFnMakeTokensBlock, "make_ablock": sdtsFnMakeActionsBlock, "grammar_content_blocks_start_rule_list": sdtsFnGrammarContentBlocksStartRuleList, "tokens_content_blocks_start_entry_list": sdtsFnTokensContentBlocksStartEntryList, "actions_content_blocks_start_sym_actions": sdtsFnActionsContentBlocksStartSymbolActionsList, "actions_content_blocks_prepend": sdtsFnActionsContentBlocksPrepend, "tokens_content_blocks_prepend": sdtsFnTokensContentBlocksPrepend, "grammar_content_blocks_prepend": sdtsFnGrammarContentBlocksPrepend, "make_prod_action": sdtsFnMakeProdAction, "make_symbol_actions": sdtsFnMakeSymbolActions, "make_state_ins": sdtsFnMakeStateIns, "make_grammar_content_node": sdtsFnMakeGrammarContentNode, "make_actions_content_node": sdtsFnMakeActionsContentNode, "make_tokens_content_node": sdtsFnMakeTokensContentNode, "trim_string": sdtsFnTrimString, "make_discard_option": sdtsFnMakeDiscardOption, "make_stateshift_option": sdtsFnMakeStateshiftOption, "make_human_option": sdtsFnMakeHumanOption, "make_token_option": sdtsFnMakeTokenOption, "make_priority_option": sdtsFnMakePriorityOption, "ident": sdtsFnIdentity, "interpret_escape": sdtsFnInterpretEscape, "append_strings": sdtsFnAppendStrings, "append_strings_trimmed": sdtsFnAppendStringsTrimmed, "get_nonterminal": sdtsFnGetNonterminal, "get_int": sdtsFnGetInt, "get_terminal": sdtsFnGetTerminal, "rule_list_append": sdtsFnRuleListAppend, "entry_list_append": sdtsFnEntryListAppend, "actions_state_block_list_append": sdtsFnActionsStateBlockListAppend, "tokens_state_block_list_append": sdtsFnTokensStateBlockListAppend, "grammar_state_block_list_append": sdtsFnGrammarStateBlockListAppend, "symbol_actions_list_append": sdtsFnSymbolActionsListAppend, "prod_action_list_append": sdtsFnProdActionListAppend, "semantic_action_list_append": sdtsFnSemanticActionListAppend, "attr_ref_list_append": sdtsFnAttrRefListAppend, "attr_ref_list_start": sdtsFnAttrRefListStart, "get_attr_ref": sdtsFnGetAttrRef, "make_semantic_action": sdtsFnMakeSemanticAction, "make_prod_specifier_next": sdtsFnMakeProdSpecifierNext, "make_prod_specifier_index": sdtsFnMakeProdSpecifierIndex, "make_prod_specifier_literal": sdtsFnMakeProdSpecifierLiteral, "prod_action_list_start": sdtsFnProdActionListStart, "semantic_action_list_start": sdtsFnSemanticActionListStart, "rule_list_start": sdtsFnRuleListStart, "grammar_state_block_list_start": sdtsFnGrammarStateBlockListStart, "tokens_state_block_list_start": sdtsFnTokensStateBlockListStart, "actions_state_block_list_start": sdtsFnActionsStateBlockListStart, "symbol_actions_list_start": sdtsFnSymbolActionsListStart, "entry_list_start": sdtsFnEntryListStart, "string_list_append": sdtsFnStringListAppend, "token_opt_list_start": sdtsFnTokenOptListStart, "token_opt_list_append": sdtsFnTokenOptListAppend, "string_list_start": sdtsFnStringListStart, "string_list_list_start": sdtsFnStringListListStart, "string_list_list_append": sdtsFnStringListListAppend, "epsilon_string_list": sdtsFnEpsilonStringList, "make_rule": sdtsFnMakeRule, "make_token_entry": sdtsFnMakeTokenEntry, } )
Functions ¶
This section is empty.
Types ¶
type AST ¶ added in v0.7.0
type AST struct { // Nodes is the nodes that make up the AST. There will be one per top-level // FISHI section (%%grammar, %%tokens, %%actions) encountered in the // specification the AST represents. Nodes []Block }
AST is the a8stract syntax tree of a fishi spec.
type ActionsBlock ¶
type ActionsBlock struct { // Content is the content blocks that make up this section. There will be // one per state declared in the actions section this ActionsBlock was // created from. Content []ActionsContent }
ActionsBlock contains the contents of a single block of SDTS definition rules from a FISHI spec. It is represented in FISHI as an %%actions section.
func (ActionsBlock) Actions ¶
func (aab ActionsBlock) Actions() ActionsBlock
Actions returns this ActionsBlock. It is included to implement Block.
func (ActionsBlock) Grammar ¶
func (aab ActionsBlock) Grammar() GrammarBlock
Grammar panics immediately. It is included to implement Block.
func (ActionsBlock) String ¶
func (aab ActionsBlock) String() string
String returns a string representation of the ActionsBlock.
func (ActionsBlock) Tokens ¶
func (aab ActionsBlock) Tokens() TokensBlock
Tokens panics immediately. It is included to implement Block.
type ActionsContent ¶
type ActionsContent struct { // Actions is a series of SDTS actions that each apply to a given head // symbol of a grammar rule. Actions []SymbolActions // State is the state that the actions apply to. It will always be the empty // string. State string // Src is the first token that represents a part of this ActionsContent as // lexed from a FISHI spec. Src lex.Token // SrcState is the first token that represents a part of the %state // directive that defines the state that this ActionsContent is for. As // states for actions sections other than the default are not supported, // this will always be nil. SrcState lex.Token }
ActionsContent is a series of syntax-directed translation actions grouped with the state they are used in from an %%actions section of a FISHI spec. Note that multiple for a syntax-directed translation scheme are not supported, so State will always be the empty string.
func (ActionsContent) String ¶
func (content ActionsContent) String() string
String returns a string representation of the ActionsContent.
type AttrRef ¶
type AttrRef struct { // Symbol is the symbol name included in the AttrRef in a FISHI spec. This // will only be set if the AttrRef refers to a particular symbol by name; // otherwise, Symbol will be set to the empty string. Symbol string // Terminal is whether Symbol refers to a terminal symbol. Terminal bool // Head is whether the AttrRef refers to the Head symbol. Head bool // TermInProd is whether the AttrRef refers to the nth terminal symbol in // the production. If true, Occurrence is n. TermInProd bool // TermInProd is whether the AttrRef refers to the nth non-terminal symbol // in the production. If true, Occurrence is n. NontermInProd bool // TermInProd is whether the AttrRef refers to the nth symbol in the // production. If true, Occurrence is n. SymInProd bool // Occurrence is the index of the reference, and represents n when the // AttrRef refers to the nth occurrence of some criteria. It is not valid if // Head is true. Occurrence int // Attribute is the name of the attribute being referred to. Attribute string // Src is the first token that represents a part of this AttrRef as lexed // from a FISHI spec. Src lex.Token }
AttrRef is a reference to an attribute of a particular symbol in a grammar rule production. It consists of two parts; the symbol it refers to, and the name of the attribute. An AttrRef has five different ways it may refer to a symbol: The head symbol, the nth symbol in the production, the nth non-terminal symbol in the production, the nth terminal symbol in the production, or the nth instance of a symbol with a particular name in the production (with whether or not the symbol name refers to a terminal explicitly denoted).
func ParseAttrRef ¶
ParseAttrRef does a simple parse on an attribute reference from a string that makes it up. Does not set tok; caller must do so if needed.
type Block ¶
type Block interface { // Type returns the type of the Block. Type() BlockType // Grammar converts the Block into a GrammarBlock. Panics if the Block's // type is not BlockTypeGrammar. Grammar() GrammarBlock // Tokens converts the Block into a TokensBlock. Panics if the Block's type // is not BlockTypeTokens. Tokens() TokensBlock // Actions converts the Block into an ActionsBlock. Panics if the Block's // type is not BlockTypeActions. Actions() ActionsBlock }
Block is a main dividing section of a FISHI spec. It contains either grammar rules, token definitions, or syntax-directed translation rules for the language described by the spec it is associated with.
type BlockType ¶
type BlockType int
BlockType is the type of a FISHI Block.
const ( // BlockTypeError is an unrecognized type of FISHI block. BlockTypeError BlockType = iota // BlockTypeGrammar denotes a %%grammar section from a spec written in // FISHI. BlockTypeGrammar // BlockTypeTokens denotes a %%tokens section from a spec written in FISHI. BlockTypeTokens // BlockTypeActions denotes an %%actions sectoin from a spec written in // FISHI. BlockTypeActions )
type ErrorBlock ¶
type ErrorBlock struct{}
ErrorBlock is a Block representing an unrecognized kind of FISHI section.
func (ErrorBlock) Actions ¶
func (errBlock ErrorBlock) Actions() ActionsBlock
Actions panics immediately. It is included to implement Block.
func (ErrorBlock) Grammar ¶
func (errBlock ErrorBlock) Grammar() GrammarBlock
Grammar panics immediately. It is included to implement Block.
func (ErrorBlock) String ¶
func (errBlock ErrorBlock) String() string
String returns a string representation of the ErrorBlock.
func (ErrorBlock) Tokens ¶
func (errBlock ErrorBlock) Tokens() TokensBlock
Tokens panics immediately. It is included to implement Block.
type GrammarBlock ¶
type GrammarBlock struct { // Content is the content blocks that make up this section. There will be // one per state declared in the grammar section this GrammarBlock was // created from. Content []GrammarContent }
GrammarBlock contains the contents of a single block of grammar instructions from a FISHI spec. It is represented in FISHI as a %%grammar section.
func (GrammarBlock) Actions ¶
func (agb GrammarBlock) Actions() ActionsBlock
Actions panics immediately. It is included to implement Block.
func (GrammarBlock) Grammar ¶
func (agb GrammarBlock) Grammar() GrammarBlock
Grammar returns this GrammarBlock. It is included to implement Block.
func (GrammarBlock) String ¶
func (agb GrammarBlock) String() string
String returns a string representation of the GrammarBlock.
func (GrammarBlock) Tokens ¶
func (agb GrammarBlock) Tokens() TokensBlock
Tokens panics immediately. It is included to implement Block.
type GrammarContent ¶
type GrammarContent struct { // Rules is the rules in the GrammarContent. Rules []GrammarRule // State is the state that the rules apply to. It will always be the empty // string. State string // Src is the first token that represents a part of this GrammarContent as // lexed from a FISHI spec. Src lex.Token // SrcState is the first token that represents a part of the %state // directive that defines the state that this GrammarContent is for. As // states for grammar sections other than the default are not supported, // this will always be nil. SrcState lex.Token }
GrammarContent is a series of grammar rules grouped with the state they are used in from a %%grammar section of a FISHI spec. Note that multiple states for a grammar are not supported, so State will always be the empty string.
func (GrammarContent) String ¶
func (content GrammarContent) String() string
String returns a string representation of the GrammarContent.
type GrammarRule ¶
type GrammarRule struct { // Rule holds the non-terminal and all productions parsed for this // GrammarRule. Rule grammar.Rule // Src is the first token that represents a part of this GrammarRule as // lexed from a FISHI spec. Src lex.Token }
GrammarRule is a single complete grammar rule from a %%grammar block of a FISHI spec. It includes the non-terminal symbol at the head of the rule, and one or more productions that can be derived from that non-terminal.
func (GrammarRule) String ¶
func (agr GrammarRule) String() string
String returns a string representation of the GrammarRule.
type ProductionAction ¶
type ProductionAction struct { // ProdNext is whether the production referred to is left unspecified, ergo // is the 'next' production after the last one (or the first production, if // this is the first ProductionAction for the symbol). ProdNext bool // ProdIndex is the index of the production within all productions of the // symbol that this action is for. ProdIndex int // ProdLiteral is the literal symbols in the production of the symbol that // this action is for. ProdLiteral []string // Actions is the actions to perform when the production specified by this // ProductionAction is encountered during syntax-directed translation. Actions []SemanticAction // Src is the first token that represents a part of this ProductionAction as // lexed from a FISHI spec. Src lex.Token // SrcVal is where the production action "value" is set; that is, the index // or production. It will be nil if it is simply a prodNext. SrcVal lex.Token }
ProductionAction is a series of syntax-directed definitions defined for a production of a non-terminal symbol.
func (ProductionAction) String ¶
func (pa ProductionAction) String() string
String returns a string representation of the ProductionAction.
type SemanticAction ¶
type SemanticAction struct { // LHS is the left-hand side of the action. It is a reference to the // attribute and symbol node it should assign the result of the action to. LHS AttrRef // Hook is the name of the hook function to call. Hook string // With is references to the attributes whose values should be used as // arguments to the hook function. With []AttrRef // Src is the first token that represents the name of the hook as // lexed from a FISHI spec. SrcHook lex.Token // Src is the first token that represents a part of this SemanticAction as // lexed from a FISHI spec. Src lex.Token }
SemanticAction is a single syntax-directed action to perform. It takes some arguments from symbols in the grammar rule it is defined on, passes those to a hook function, and assigns the result to the attribute of another symbol in the node in the parse tree it is called on.
func (SemanticAction) String ¶
func (sa SemanticAction) String() string
String returns a string representation of the SemanticAction.
type SymbolActions ¶
type SymbolActions struct { // Symbol is the non-terminal that the Actions are defined for. Symbol string // Actions is the actions for the productions of Symbol. Actions []ProductionAction // Src is the first token that represents a part of this SymbolActions as // lexed from a FISHI spec. Src lex.Token // SrcSym is the first token that represents a part of the symbol as lexed // from a FISHI spec. SrcSym lex.Token }
SymbolActions is a series of SDTS actions defined for productions of a non-terminal symbol.
func (SymbolActions) String ¶
func (sa SymbolActions) String() string
String returns a string representation of the SymbolActions.
type TokenEntry ¶
type TokenEntry struct { // Pattern is the pattern that the lexer must recognize before performing // the actions indicated by the options associated with that pattern. Pattern string // Discard is true if the entry contains a %discard directive. Discard bool // Shift is set to the value of the %stateshift directive in the entry. If // the entry does not contain one, Shift will be an empty string. Shift string // Token is set to the value of the %token directive in the entry. If the // entry does not contain one, Token will be an empty string. Token string // Human is set to the value of the %human directive in the entry. If the // entry does not contain one, Human will be an empty string. Human string // Priority is set to the value of the %priority directive in the entry. If // the entry does not contain one, Priority will be 0, although note that // this cannot be distinguished from a %priority directive set to 0 without // also consulting SrcPriority. Priority int // Src is the first token that represents a part of this TokenEntry as lexed // from a FISHI spec. Src lex.Token // SrcDiscard is all first tokens of any %discard directives that are a part // of this TokenEntry as lexed from a FISHI spec. SrcDiscard []lex.Token // SrcShift is all first tokens of any %stateshift directives that are a // part of this TokenEntry as lexed from a FISHI spec. SrcShift []lex.Token // SrcToken is all first tokens of any %token directives that are a part of // this TokenEntry as lexed from a FISHI spec. SrcToken []lex.Token // SrcHuman is all first tokens of any %human directives that are a part of // this TokenEntry as lexed from a FISHI spec. SrcHuman []lex.Token // SrcPriority is all first tokens of any %priority directives that are a // part of this TokenEntry as lexed from a FISHI spec. SrcPriority []lex.Token }
TokenEntry is a single full entry from a %%tokens block of a FISHI spec. It includes the pattern for the lexer to recognize as well as options indicating what the lexer should do once that pattern is matched.
func (TokenEntry) String ¶
func (entry TokenEntry) String() string
String returns a string representation of the TokenEntry.
type TokenOption ¶
type TokenOption struct { // Type is the type of the TokenOption. Type TokenOptionType // Value is the string value of the option as lexed from a FISHI spec. Only // certain types of TokenOptions will have a value; for types that do not // accept a value, Value will be the empty string. Value string // Src is the token that represents this TokenOption as lexed from a FISHI // spec. Src lex.Token }
TokenOption is a directive associated with a pattern in a %%tokens block of a FISHI spec.
type TokenOptionType ¶
type TokenOptionType int
TokenOptionsType is the type of option that a TokenOption represents.
const ( // TokenOptDiscard is a token option type indicating that a pattern found by // the lexer should be discarded. It is represented by the %discard // directive in FISHI source code. TokenOptDiscard TokenOptionType = iota // TokenOptStateshift is a token option type indicating that a pattern found // by the lexer should make it change to a new state. It is represented by // the %stateshift directive in FISHI source code. TokenOptStateshift // TokenOptToken is a token option type indicating that a pattern found by // the lexer should be lexed as a new token and passed to the parser. It is // represented by the %token directive in FISHI source code. TokenOptToken // TokenOptHuman is a token option type that gives the human readable name // for a lexed token. It is represented by the %human directive in FISHI // source code. TokenOptHuman // TokenOptPriority is a token option type indicating that a pattern should // be treated as a certain priority by the lexer. It is represented by the // %priority directive in FISHI source code. TokenOptPriority )
type TokensBlock ¶
type TokensBlock struct { // Content is the content blocks that make up this section. There will be // one per state declared in the tokens section this TokensBlock was // created from. Content []TokensContent }
TokensBlock contains the contents of a single block of token declarations from a FISHI spec. It is represented in FISHI as a %%tokens section.
func (TokensBlock) Actions ¶
func (atb TokensBlock) Actions() ActionsBlock
Actions panics immediately. It is included to implement Block.
func (TokensBlock) Grammar ¶
func (atb TokensBlock) Grammar() GrammarBlock
Grammar panics immediately. It is included to implement Block.
func (TokensBlock) String ¶
func (atb TokensBlock) String() string
String returns a string representation of the TokensBlock.
func (TokensBlock) Tokens ¶
func (atb TokensBlock) Tokens() TokensBlock
Tokens returns this TokensBlock. It is included to implement Block.
type TokensContent ¶
type TokensContent struct { // Entries is the token entries for the lexer state. Entries []TokenEntry // State is the lexer state that the Entries are defined for. State string // Src is the first token that represents a part of this TokensContent as // lexed from a FISHI spec. Src lex.Token // SrcState is the first token that represents a part of the %state // directive that defines the state that this TokensContent is for. If it is // for the default state, this will be nil. SrcState lex.Token }
TokensContent is a series of token entries grouped with the lexer state they are used in from a %%tokens section of a FISHI spec.
func (TokensContent) String ¶
func (content TokensContent) String() string
String returns a string representation of the TokensContent.