client

package
v0.0.4 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 22, 2025 License: MIT Imports: 8 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

View Source
var ChatProfiles []*Ref
View Source
var Profiles = make(map[string]*Ref)

Functions

func Init

func Init() error

Types

type ModelTier

type ModelTier int
const (
	Tier1 ModelTier = iota + 1
	Tier2
)

type Ref

type Ref struct {
	Config  *config.Profile
	Client  *openai.Client
	S       *semaphore.Weighted
	Samples *Samples
}
var Default *Ref

func NewRef added in v0.0.4

func NewRef(p *config.Profile) *Ref

func (*Ref) Completion added in v0.0.4

func (ref *Ref) Completion(ctx context.Context, params openai.ChatCompletionNewParams) (
	*openai.ChatCompletion, error,
)

func (*Ref) Embeddings added in v0.0.4

func (ref *Ref) Embeddings(ctx context.Context, params openai.EmbeddingNewParams) (
	*openai.CreateEmbeddingResponse, error,
)

func (*Ref) Model added in v0.0.4

func (ref *Ref) Model(tier ModelTier) string

type Samples

type Samples struct {
	// contains filtered or unexported fields
}

Samples implements a cheesy way to estimate the number of bytes per token registered through several calls to the LLM service.

func (*Samples) BytesPerTok

func (s *Samples) BytesPerTok() float32

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL