Documentation
¶
Index ¶
- type AzureOpenAIProvider
- func (ap *AzureOpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
- func (ap *AzureOpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
- func (ap *AzureOpenAIProvider) GetModel() *ModelInfo
- func (ap *AzureOpenAIProvider) GetProvider() ProviderType
- func (ap *AzureOpenAIProvider) IsHealthy(ctx context.Context) (bool, error)
- type CompletionOptions
- type Config
- type Message
- type ModelInfo
- type OllamaProvider
- func (op *OllamaProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
- func (op *OllamaProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
- func (op *OllamaProvider) GetAvailableModels(ctx context.Context) ([]string, error)
- func (op *OllamaProvider) GetModel() *ModelInfo
- func (op *OllamaProvider) GetProvider() ProviderType
- func (op *OllamaProvider) IsHealthy(ctx context.Context) (bool, error)
- func (op *OllamaProvider) PullModel(ctx context.Context, modelName string) error
- type OpenAIProvider
- func (op *OpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
- func (op *OpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
- func (op *OpenAIProvider) GetModel() *ModelInfo
- func (op *OpenAIProvider) GetProvider() ProviderType
- func (op *OpenAIProvider) IsHealthy(ctx context.Context) (bool, error)
- type Provider
- type ProviderChain
- func (pc *ProviderChain) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
- func (pc *ProviderChain) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
- func (pc *ProviderChain) GetModel() *ModelInfo
- func (pc *ProviderChain) GetProvider() ProviderType
- func (pc *ProviderChain) IsHealthy(ctx context.Context) (bool, error)
- type ProviderType
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AzureOpenAIProvider ¶
type AzureOpenAIProvider struct {
// contains filtered or unexported fields
}
AzureOpenAIProvider implements the Provider interface for Azure OpenAI
func (*AzureOpenAIProvider) Chat ¶
func (ap *AzureOpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
Chat performs a chat completion using Azure OpenAI
func (*AzureOpenAIProvider) Complete ¶
func (ap *AzureOpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
Complete generates a text completion using Azure OpenAI
func (*AzureOpenAIProvider) GetModel ¶
func (ap *AzureOpenAIProvider) GetModel() *ModelInfo
GetModel returns the model information
func (*AzureOpenAIProvider) GetProvider ¶
func (ap *AzureOpenAIProvider) GetProvider() ProviderType
GetProvider returns the provider type
func (*AzureOpenAIProvider) IsHealthy ¶
func (ap *AzureOpenAIProvider) IsHealthy(ctx context.Context) (bool, error)
IsHealthy checks if the Azure OpenAI service is available by sending a minimal chat completion request. The legacy /completions endpoint is not supported on modern Azure GPT-3.5/4/4o deployments, so we always use /chat/completions here.
type CompletionOptions ¶
type CompletionOptions struct {
Temperature *float32
MaxTokens *int64
TopP *float32
StopSequences []string
}
CompletionOptions contains options for LLM completions
type Config ¶
type Config struct {
Provider ProviderType
Model string
Temperature float32
MaxTokens int
Timeout time.Duration
// Ollama specific
OllamaURL string
// Azure specific
AzureOpenAIEndpoint string
AzureOpenAIKey string
AzureOpenAIVersion string
// OpenAI specific
OpenAIAPIKey string
}
Config holds configuration for LLM providers
func LoadConfig ¶
func LoadConfig() *Config
LoadConfig loads configuration from environment and .env files
type ModelInfo ¶
type ModelInfo struct {
Name string
Provider ProviderType
ContextSize int
MaxTokens int
CostPer1kTokens float64 // in USD
Capabilities []string
}
ModelInfo contains information about an LLM model
type OllamaProvider ¶
type OllamaProvider struct {
// contains filtered or unexported fields
}
OllamaProvider implements the Provider interface for Ollama
func (*OllamaProvider) Chat ¶
func (op *OllamaProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
Chat performs a chat completion using Ollama
func (*OllamaProvider) Complete ¶
func (op *OllamaProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
Complete generates a text completion using Ollama
func (*OllamaProvider) GetAvailableModels ¶
func (op *OllamaProvider) GetAvailableModels(ctx context.Context) ([]string, error)
GetAvailableModels lists available models in Ollama
func (*OllamaProvider) GetModel ¶
func (op *OllamaProvider) GetModel() *ModelInfo
GetModel returns the model information
func (*OllamaProvider) GetProvider ¶
func (op *OllamaProvider) GetProvider() ProviderType
GetProvider returns the provider type
type OpenAIProvider ¶
type OpenAIProvider struct {
// contains filtered or unexported fields
}
OpenAIProvider implements the Provider interface for OpenAI
func (*OpenAIProvider) Chat ¶
func (op *OpenAIProvider) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
Chat performs a chat completion using OpenAI
func (*OpenAIProvider) Complete ¶
func (op *OpenAIProvider) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
Complete generates a text completion using OpenAI
func (*OpenAIProvider) GetModel ¶
func (op *OpenAIProvider) GetModel() *ModelInfo
GetModel returns the model information
func (*OpenAIProvider) GetProvider ¶
func (op *OpenAIProvider) GetProvider() ProviderType
GetProvider returns the provider type
type Provider ¶
type Provider interface {
// Complete generates text completion
Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
// Chat performs a chat completion with message history
Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
// GetModel returns information about the configured model
GetModel() *ModelInfo
// IsHealthy checks if the provider is available and healthy
IsHealthy(ctx context.Context) (bool, error)
// GetProvider returns the provider type
GetProvider() ProviderType
}
Provider is the interface for LLM providers
func NewAzureOpenAIProvider ¶
NewAzureOpenAIProvider creates a new Azure OpenAI provider
func NewOllamaProvider ¶
NewOllamaProvider creates a new Ollama provider
func NewOpenAIProvider ¶
NewOpenAIProvider creates a new OpenAI provider
func NewProvider ¶
NewProvider creates a new LLM provider based on config
type ProviderChain ¶
type ProviderChain struct {
// contains filtered or unexported fields
}
ProviderChain implements fallback chain for providers
func NewProviderChain ¶
func NewProviderChain(providers ...Provider) *ProviderChain
NewProviderChain creates a new provider chain with fallbacks
func (*ProviderChain) Chat ¶
func (pc *ProviderChain) Chat(ctx context.Context, messages []*Message, options *CompletionOptions) (string, error)
Chat tries each provider in sequence until one succeeds
func (*ProviderChain) Complete ¶
func (pc *ProviderChain) Complete(ctx context.Context, prompt string, options *CompletionOptions) (string, error)
Complete tries each provider in sequence until one succeeds
func (*ProviderChain) GetModel ¶
func (pc *ProviderChain) GetModel() *ModelInfo
GetModel returns the first available provider's model
func (*ProviderChain) GetProvider ¶
func (pc *ProviderChain) GetProvider() ProviderType
GetProvider returns the first available provider's type
type ProviderType ¶
type ProviderType string
ProviderType represents the LLM provider
const ( ProviderOllama ProviderType = "ollama" ProviderAzure ProviderType = "azure" ProviderOpenAI ProviderType = "openai" )