analysis

package
v0.0.0-...-78453da Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 20, 2015 License: Apache-2.0 Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ErrInvalidDateTime = fmt.Errorf("unable to parse datetime with any of the layouts")

Functions

func BuildTermFromRunes

func BuildTermFromRunes(runes []rune) []byte

func DeleteRune

func DeleteRune(in []rune, pos int) []rune

func InsertRune

func InsertRune(in []rune, pos int, r rune) []rune

func RunesEndsWith

func RunesEndsWith(input []rune, suffix string) bool

func TruncateRunes

func TruncateRunes(input []byte, num int) []byte

Types

type Analyzer

type Analyzer struct {
	CharFilters  []CharFilter
	Tokenizer    Tokenizer
	TokenFilters []TokenFilter
}

func (*Analyzer) Analyze

func (a *Analyzer) Analyze(input []byte) TokenStream

type ByteArrayConverter

type ByteArrayConverter interface {
	Convert([]byte) (interface{}, error)
}

type CharFilter

type CharFilter interface {
	Filter([]byte) []byte
}

type DateTimeParser

type DateTimeParser interface {
	ParseDateTime(string) (time.Time, error)
}

type Token

type Token struct {
	Start    int       `json:"start"`
	End      int       `json:"end"`
	Term     []byte    `json:"term"`
	Position int       `json:"position"`
	Type     TokenType `json:"type"`
	KeyWord  bool      `json:"keyword"`
}

func (*Token) String

func (t *Token) String() string

type TokenFilter

type TokenFilter interface {
	Filter(TokenStream) TokenStream
}

type TokenFreq

type TokenFreq struct {
	Term      []byte
	Locations []*TokenLocation
}

type TokenFrequencies

type TokenFrequencies []*TokenFreq

func TokenFrequency

func TokenFrequency(tokens TokenStream) TokenFrequencies

func (TokenFrequencies) MergeAll

func (tfs TokenFrequencies) MergeAll(remoteField string, other TokenFrequencies) TokenFrequencies

type TokenLocation

type TokenLocation struct {
	Field    string
	Start    int
	End      int
	Position int
}

type TokenMap

type TokenMap map[string]bool

func NewTokenMap

func NewTokenMap() TokenMap

func (TokenMap) AddToken

func (t TokenMap) AddToken(token string)

func (TokenMap) LoadBytes

func (t TokenMap) LoadBytes(data []byte) error

func (TokenMap) LoadFile

func (t TokenMap) LoadFile(filename string) error

func (TokenMap) LoadLine

func (t TokenMap) LoadLine(line string)

type TokenStream

type TokenStream []*Token

type TokenType

type TokenType int
const (
	AlphaNumeric TokenType = iota
	Ideographic
	Numeric
	DateTime
	Shingle
	Single
	Double
)

type Tokenizer

type Tokenizer interface {
	Tokenize([]byte) TokenStream
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL