llm

package
v2.3.1 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 26, 2024 License: Apache-2.0 Imports: 12 Imported by: 0

Documentation

Index

Constants

View Source
const (
	DefaultOpenAIModel           = openai.GPT3Dot5Turbo
	DefaultOpenAIModelTokenLimit = "4096"
)

Variables

This section is empty.

Functions

func GetCacheKey

func GetCacheKey(provider string, sEnc string) string

func NumTokensFromMessages

func NumTokensFromMessages(messages []openai.ChatCompletionMessage, model string) (num_tokens int, err error)

func NumTokensFromPrompt

func NumTokensFromPrompt(prompt string, model string) (num_tokens int, err error)

Types

type ILLM

type ILLM interface {
	Configure(config LLMConfig) error
	GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)
	Parse(ctx context.Context, prompt string, cache cache.ICache, options ...ParamOption) (string, error)
	GetName() string
}

func NewClient

func NewClient(provider string) (ILLM, error)

type LLMConfig

type LLMConfig struct {
	Name    string
	Model   string
	Token   string
	BaseURL string
	Proxy   string
	APIType string
}

func (*LLMConfig) GetAPIType

func (p *LLMConfig) GetAPIType() string

func (*LLMConfig) GetBaseURL

func (p *LLMConfig) GetBaseURL() string

func (*LLMConfig) GetModel

func (p *LLMConfig) GetModel() string

func (*LLMConfig) GetName

func (p *LLMConfig) GetName() string

func (*LLMConfig) GetProxy

func (p *LLMConfig) GetProxy() string

func (*LLMConfig) GetToken

func (p *LLMConfig) GetToken() string

type OpenAIClient

type OpenAIClient struct {
	// contains filtered or unexported fields
}

func (*OpenAIClient) Configure

func (c *OpenAIClient) Configure(config LLMConfig) error

func (*OpenAIClient) GetCompletion

func (c *OpenAIClient) GetCompletion(ctx context.Context, prompt string, options ...ParamOption) (string, error)

@todo add ability to supply multiple messages

func (*OpenAIClient) GetName

func (a *OpenAIClient) GetName() string

func (*OpenAIClient) Parse

func (a *OpenAIClient) Parse(ctx context.Context, prompt string, cache cache.ICache, options ...ParamOption) (string, error)

type ParamOption

type ParamOption func(*ParamOptions)

ParamOption is a function that configures a CallOptions.

func WithLogitBias

func WithLogitBias(logitBias map[string]int) ParamOption

func WithMaxTokens

func WithMaxTokens(maxTokens int) ParamOption

func WithModel

func WithModel(model string) ParamOption

func WithOptions

func WithOptions(options ParamOptions) ParamOption

func WithStopWords

func WithStopWords(stopWords []string) ParamOption

func WithTemperature

func WithTemperature(temperature float32) ParamOption

type ParamOptions

type ParamOptions struct {
	// Model is the model to use.
	Model string `json:"model"`
	// MaxTokens is the maximum number of tokens to generate.
	MaxTokens int `json:"max_tokens"`
	// Temperature is the temperature for sampling, between 0 and 1.
	Temperature float32 `json:"temperature"`
	// StopWords is a list of words to stop on.
	StopWords []string       `json:"stop_words"`
	LogitBias map[string]int `json:"logit_bias"`
}

ParamOptions is a set of options.

func ValidOptions

func ValidOptions(options ParamOptions) ParamOptions

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL