ai

package
v0.0.0-...-1dcda14 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 3, 2025 License: Unlicense Imports: 19 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type AI

type AI interface {
	// Embed generates vector embeddings from the input text provided in the request.
	Embed(ctx context.Context, request EmbedRequest) (response EmbedResponse, err error)

	// Generate creates new content based on the prompt in a single response.
	Generate(ctx context.Context, request GenerateRequest) (response GenerateResponse, err error)

	// Generate creates new content based on the prompt as an byte stream.
	GenerateStream(ctx context.Context, request GenerateRequest) (stream io.Reader)

	// Chat facilitates a conversation between the AI and a user with documentation as context in a single response.
	Chat(ctx context.Context, request ChatRequest) (response ChatResponse, err error)

	// Chat facilitates a conversation between the AI and a user with documentation as context as a byte stream.
	ChatStream(ctx context.Context, request ChatRequest) (stream io.ReadCloser)
}

AI represents an interface for interacting with various AI services.

func NewOllama

func NewOllama(cfg config.Ollama) (ai AI, err error)

type ChatMessage

type ChatMessage struct {
	Role      string            `json:"role"`
	Content   string            `json:"content"`
	Images    []string          `json:"images,omitempty"`
	ToolCalls []json.RawMessage `json:"tool_calls,omitempty"`
}

type ChatRequest

type ChatRequest struct {
	Model     string            `json:"model"`
	Messages  []ChatMessage     `json:"messages"`
	Tools     []json.RawMessage `json:"tools,omitempty"`
	Format    string            `json:"format,omitempty"`
	Options   json.RawMessage   `json:"options,omitempty"`
	Stream    bool              `json:"stream"`
	KeepAlive *time.Duration    `json:"keep_alive,omitempty"`
}

type ChatResponse

type ChatResponse struct {
	Model              string      `json:"model"`
	CreatedAt          time.Time   `json:"created_at"`
	Message            ChatMessage `json:"message"`
	Done               bool        `json:"done"`
	Context            []int       `json:"context"`
	TotalDuration      int64       `json:"total_duration"`
	LoadDuration       int64       `json:"load_duration"`
	PromptEvalCount    int         `json:"prompt_eval_count"`
	PromptEvalDuration int64       `json:"prompt_eval_duration"`
	EvalCount          int         `json:"eval_count"`
	EvalDuration       int64       `json:"eval_duration"`
}

type EmbedRequest

type EmbedRequest struct {
	// Standard params
	Model string                       `json:"model"`
	Input config.SingleOrSlice[string] `json:"input"`
	// Advanced params
	Truncate  *bool          `json:"truncate,omitempty"`
	Options   map[string]any `json:"options,omitempty"`
	KeepAlive *time.Duration `json:"keep_alive,omitempty"`
}

type EmbedResponse

type EmbedResponse struct {
	Model           string     `json:"model"`
	Embeddings      Embeddings `json:"embeddings"`
	Done            bool       `json:"done"`
	TotalDuration   int64      `json:"total_duration"`
	LoadDuration    int64      `json:"load_duration"`
	PromptEvalCount int        `json:"prompt_eval_count"`
}

type EmbeddingValue

type EmbeddingValue uint8

func (*EmbeddingValue) UnmarshalJSON

func (e *EmbeddingValue) UnmarshalJSON(data []byte) error

type Embeddings

type Embeddings [][]EmbeddingValue

func (Embeddings) Underlying

func (e Embeddings) Underlying() [][]uint8

type GenerateRequest

type GenerateRequest struct {
	// Standard params
	Model  string `json:"model"`
	Prompt string `json:"prompt,omitempty"`
	Suffix string `json:"suffix,omitempty"`
	Images string `json:"images,omitempty"`
	// Advanced params
	Format    string         `json:"format,omitempty"`
	Options   map[string]any `json:"options,omitempty"`
	System    string         `json:"system,omitempty"`
	Template  string         `json:"template,omitempty"`
	Stream    bool           `json:"stream"`
	Raw       bool           `json:"raw"`
	KeepAlive *time.Duration `json:"keep_alive,omitempty"`
}

type GenerateResponse

type GenerateResponse struct {
	GenerateStream
	Context            []int `json:"context"`
	TotalDuration      int64 `json:"total_duration"`
	LoadDuration       int64 `json:"load_duration"`
	PromptEvalCount    int   `json:"prompt_eval_count"`
	PromptEvalDuration int64 `json:"prompt_eval_duration"`
	EvalCount          int   `json:"eval_count"`
	EvalDuration       int64 `json:"eval_duration"`
}

type GenerateStream

type GenerateStream struct {
	Model     string    `json:"model"`
	CreatedAt time.Time `json:"created_at"`
	Response  string    `json:"response"`
	Done      bool      `json:"done"`
}

type Ollama

type Ollama struct {
	// contains filtered or unexported fields
}

func (*Ollama) Chat

func (ai *Ollama) Chat(ctx context.Context, request ChatRequest) (response ChatResponse, err error)

func (*Ollama) ChatStream

func (ai *Ollama) ChatStream(ctx context.Context, request ChatRequest) (stream io.ReadCloser)

func (*Ollama) Embed

func (ai *Ollama) Embed(ctx context.Context, request EmbedRequest) (response EmbedResponse, err error)

func (*Ollama) Generate

func (ai *Ollama) Generate(ctx context.Context, request GenerateRequest) (response GenerateResponse, err error)

func (*Ollama) GenerateStream

func (ai *Ollama) GenerateStream(ctx context.Context, request GenerateRequest) (stream io.Reader)

func (*Ollama) Url

func (o *Ollama) Url() (uri url.URL, done func())

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL