Documentation
¶
Index ¶
- type AI
- type ChatMessage
- type ChatRequest
- type ChatResponse
- type EmbedRequest
- type EmbedResponse
- type EmbeddingValue
- type Embeddings
- type GenerateRequest
- type GenerateResponse
- type GenerateStream
- type Ollama
- func (ai *Ollama) Chat(ctx context.Context, request ChatRequest) (response ChatResponse, err error)
- func (ai *Ollama) ChatStream(ctx context.Context, request ChatRequest) (stream io.ReadCloser)
- func (ai *Ollama) Embed(ctx context.Context, request EmbedRequest) (response EmbedResponse, err error)
- func (ai *Ollama) Generate(ctx context.Context, request GenerateRequest) (response GenerateResponse, err error)
- func (ai *Ollama) GenerateStream(ctx context.Context, request GenerateRequest) (stream io.Reader)
- func (o *Ollama) Url() (uri url.URL, done func())
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AI ¶
type AI interface { // Embed generates vector embeddings from the input text provided in the request. Embed(ctx context.Context, request EmbedRequest) (response EmbedResponse, err error) // Generate creates new content based on the prompt in a single response. Generate(ctx context.Context, request GenerateRequest) (response GenerateResponse, err error) // Generate creates new content based on the prompt as an byte stream. GenerateStream(ctx context.Context, request GenerateRequest) (stream io.Reader) // Chat facilitates a conversation between the AI and a user with documentation as context in a single response. Chat(ctx context.Context, request ChatRequest) (response ChatResponse, err error) // Chat facilitates a conversation between the AI and a user with documentation as context as a byte stream. ChatStream(ctx context.Context, request ChatRequest) (stream io.ReadCloser) }
AI represents an interface for interacting with various AI services.
type ChatMessage ¶
type ChatMessage struct { Role string `json:"role"` Content string `json:"content"` Images []string `json:"images,omitempty"` ToolCalls []json.RawMessage `json:"tool_calls,omitempty"` }
type ChatRequest ¶
type ChatRequest struct { Model string `json:"model"` Messages []ChatMessage `json:"messages"` Tools []json.RawMessage `json:"tools,omitempty"` Format string `json:"format,omitempty"` Options json.RawMessage `json:"options,omitempty"` Stream bool `json:"stream"` KeepAlive *time.Duration `json:"keep_alive,omitempty"` }
type ChatResponse ¶
type ChatResponse struct { Model string `json:"model"` CreatedAt time.Time `json:"created_at"` Message ChatMessage `json:"message"` Done bool `json:"done"` Context []int `json:"context"` TotalDuration int64 `json:"total_duration"` LoadDuration int64 `json:"load_duration"` PromptEvalCount int `json:"prompt_eval_count"` PromptEvalDuration int64 `json:"prompt_eval_duration"` EvalCount int `json:"eval_count"` EvalDuration int64 `json:"eval_duration"` }
type EmbedRequest ¶
type EmbedResponse ¶
type EmbeddingValue ¶
type EmbeddingValue uint8
func (*EmbeddingValue) UnmarshalJSON ¶
func (e *EmbeddingValue) UnmarshalJSON(data []byte) error
type Embeddings ¶
type Embeddings [][]EmbeddingValue
func (Embeddings) Underlying ¶
func (e Embeddings) Underlying() [][]uint8
type GenerateRequest ¶
type GenerateRequest struct { // Standard params Model string `json:"model"` Prompt string `json:"prompt,omitempty"` Suffix string `json:"suffix,omitempty"` Images string `json:"images,omitempty"` // Advanced params Format string `json:"format,omitempty"` Options map[string]any `json:"options,omitempty"` System string `json:"system,omitempty"` Template string `json:"template,omitempty"` Stream bool `json:"stream"` Raw bool `json:"raw"` KeepAlive *time.Duration `json:"keep_alive,omitempty"` }
type GenerateResponse ¶
type GenerateResponse struct { GenerateStream Context []int `json:"context"` TotalDuration int64 `json:"total_duration"` LoadDuration int64 `json:"load_duration"` PromptEvalCount int `json:"prompt_eval_count"` PromptEvalDuration int64 `json:"prompt_eval_duration"` EvalCount int `json:"eval_count"` EvalDuration int64 `json:"eval_duration"` }
type GenerateStream ¶
type Ollama ¶
type Ollama struct {
// contains filtered or unexported fields
}
func (*Ollama) Chat ¶
func (ai *Ollama) Chat(ctx context.Context, request ChatRequest) (response ChatResponse, err error)
func (*Ollama) ChatStream ¶
func (ai *Ollama) ChatStream(ctx context.Context, request ChatRequest) (stream io.ReadCloser)
func (*Ollama) Embed ¶
func (ai *Ollama) Embed(ctx context.Context, request EmbedRequest) (response EmbedResponse, err error)
func (*Ollama) Generate ¶
func (ai *Ollama) Generate(ctx context.Context, request GenerateRequest) (response GenerateResponse, err error)
func (*Ollama) GenerateStream ¶
Click to show internal directories.
Click to hide internal directories.