Documentation
¶
Overview ¶
Package chatdelta provides a unified interface for interacting with multiple AI APIs including OpenAI, Anthropic Claude, and Google Gemini.
The package supports both synchronous and streaming responses, conversation handling, parallel execution across multiple providers, comprehensive error handling, and configurable retry logic with exponential backoff.
Basic usage:
client, err := chatdelta.CreateClient("openai", "your-api-key", "gpt-3.5-turbo", nil) if err != nil { log.Fatal(err) } response, err := client.SendPrompt(context.Background(), "Hello, how are you?") if err != nil { log.Fatal(err) } fmt.Println(response)
Advanced usage with configuration:
config := chatdelta.NewClientConfig(). SetTimeout(60 * time.Second). SetTemperature(0.7). SetMaxTokens(2048). SetSystemMessage("You are a helpful assistant.") client, err := chatdelta.CreateClient("claude", "your-api-key", "claude-3-haiku-20240307", config) if err != nil { log.Fatal(err) }
Conversation handling:
conversation := chatdelta.NewConversation() conversation.AddSystemMessage("You are a helpful math tutor.") conversation.AddUserMessage("What is 2 + 2?") conversation.AddAssistantMessage("2 + 2 equals 4.") conversation.AddUserMessage("What about 3 + 3?") response, err := client.SendConversation(context.Background(), conversation)
Streaming responses:
if client.SupportsStreaming() { chunks, err := client.StreamPrompt(context.Background(), "Write a poem") if err != nil { log.Fatal(err) } for chunk := range chunks { fmt.Print(chunk.Content) if chunk.Finished { break } } }
Parallel execution:
clients := []chatdelta.AIClient{client1, client2, client3} results := chatdelta.ExecuteParallel(context.Background(), clients, "What is the meaning of life?") for _, result := range results { fmt.Printf("%s: %s\n", result.ClientName, result.Result) }
Environment variables: The library automatically detects API keys from environment variables:
- OpenAI: OPENAI_API_KEY or CHATGPT_API_KEY
- Anthropic: ANTHROPIC_API_KEY or CLAUDE_API_KEY
- Google: GOOGLE_API_KEY or GEMINI_API_KEY
Error handling: The library provides comprehensive error handling with specific error types and helper functions for error classification:
_, err := client.SendPrompt(ctx, "Hello") if err != nil { if chatdelta.IsAuthenticationError(err) { // Handle authentication error } else if chatdelta.IsNetworkError(err) { // Handle network error } else if chatdelta.IsRetryableError(err) { // Library will automatically retry retryable errors } }
Example (ErrorHandling) ¶
Example_errorHandling demonstrates error handling
// Try to create a client with invalid API key client, err := chatdelta.CreateClient("openai", "invalid-key", "", nil) if err != nil { fmt.Printf("Error creating client: %v\n", err) return } ctx := context.Background() _, err = client.SendPrompt(ctx, "Hello") if err != nil { // Check error type if chatdelta.IsAuthenticationError(err) { fmt.Println("Authentication error - check your API key") } else if chatdelta.IsNetworkError(err) { fmt.Println("Network error - check your connection") } else if chatdelta.IsRetryableError(err) { fmt.Println("Retryable error - the library will automatically retry") } else { fmt.Printf("Other error: %v\n", err) } }
Index ¶
- Constants
- Variables
- func ExecuteWithExponentialBackoff(ctx context.Context, retries int, baseDelay time.Duration, ...) error
- func ExecuteWithRetry(ctx context.Context, retries int, operation func() error) error
- func GetAvailableProviders() []string
- func IsAuthenticationError(err error) bool
- func IsNetworkError(err error) bool
- func IsRetryableError(err error) bool
- func MergeStreamChunks(chunks <-chan StreamChunk) (string, error)
- func QuickPrompt(provider, prompt string) (string, error)
- func StreamConversationToString(ctx context.Context, client AIClient, conversation *Conversation) (string, error)
- func StreamToString(ctx context.Context, client AIClient, prompt string) (string, error)
- func ValidateConfig(config *ClientConfig) error
- type AIClient
- type AiResponse
- type ChatSession
- func (s *ChatSession) AddMessage(message Message)
- func (s *ChatSession) Clear()
- func (s *ChatSession) History() *Conversation
- func (s *ChatSession) IsEmpty() bool
- func (s *ChatSession) Len() int
- func (s *ChatSession) ResetWithSystem(message string)
- func (s *ChatSession) Send(ctx context.Context, message string) (string, error)
- func (s *ChatSession) SendWithMetadata(ctx context.Context, message string) (*AiResponse, error)
- func (s *ChatSession) Stream(ctx context.Context, message string) (<-chan StreamChunk, error)
- type ClaudeClient
- func (c *ClaudeClient) Model() string
- func (c *ClaudeClient) Name() string
- func (c *ClaudeClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
- func (c *ClaudeClient) SendPrompt(ctx context.Context, prompt string) (string, error)
- func (c *ClaudeClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
- func (c *ClaudeClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
- func (c *ClaudeClient) SupportsConversations() bool
- func (c *ClaudeClient) SupportsStreaming() bool
- type ClientConfig
- func (c *ClientConfig) SetBaseURL(url string) *ClientConfig
- func (c *ClientConfig) SetFrequencyPenalty(penalty float64) *ClientConfig
- func (c *ClientConfig) SetMaxTokens(maxTokens int) *ClientConfig
- func (c *ClientConfig) SetPresencePenalty(penalty float64) *ClientConfig
- func (c *ClientConfig) SetRetries(retries int) *ClientConfig
- func (c *ClientConfig) SetRetryStrategy(strategy RetryStrategy) *ClientConfig
- func (c *ClientConfig) SetSystemMessage(message string) *ClientConfig
- func (c *ClientConfig) SetTemperature(temperature float64) *ClientConfig
- func (c *ClientConfig) SetTimeout(timeout time.Duration) *ClientConfig
- func (c *ClientConfig) SetTopP(topP float64) *ClientConfig
- type ClientError
- func NewBadRequestError(message string) *ClientError
- func NewConfigError(message string) *ClientError
- func NewConnectionError(err error) *ClientError
- func NewDNSError(hostname string, err error) *ClientError
- func NewExpiredTokenError() *ClientError
- func NewInvalidAPIKeyError() *ClientError
- func NewInvalidModelError(model string) *ClientError
- func NewInvalidParameterError(parameter, value string) *ClientError
- func NewJSONParseError(err error) *ClientError
- func NewMissingConfigError(config string) *ClientError
- func NewMissingFieldError(field string) *ClientError
- func NewPermissionDeniedError(resource string) *ClientError
- func NewQuotaExceededError() *ClientError
- func NewRateLimitError(retryAfter *time.Duration) *ClientError
- func NewServerError(statusCode int, message string) *ClientError
- func NewStreamClosedError() *ClientError
- func NewStreamReadError(err error) *ClientError
- func NewTimeoutError(timeout time.Duration) *ClientError
- type ClientInfo
- type Conversation
- type ErrorType
- type GeminiClient
- func (c *GeminiClient) Model() string
- func (c *GeminiClient) Name() string
- func (c *GeminiClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
- func (c *GeminiClient) SendPrompt(ctx context.Context, prompt string) (string, error)
- func (c *GeminiClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
- func (c *GeminiClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
- func (c *GeminiClient) SupportsConversations() bool
- func (c *GeminiClient) SupportsStreaming() bool
- type Message
- type OpenAIClient
- func (c *OpenAIClient) Model() string
- func (c *OpenAIClient) Name() string
- func (c *OpenAIClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
- func (c *OpenAIClient) SendPrompt(ctx context.Context, prompt string) (string, error)
- func (c *OpenAIClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
- func (c *OpenAIClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
- func (c *OpenAIClient) SupportsConversations() bool
- func (c *OpenAIClient) SupportsStreaming() bool
- type ParallelResult
- type ResponseMetadata
- type RetryStrategy
- type StreamChunk
Examples ¶
Constants ¶
const ( // Version of the chatdelta-go library Version = "1.1.0" // DefaultTimeout is the default timeout for HTTP requests DefaultTimeout = 30 // DefaultRetries is the default number of retry attempts DefaultRetries = 3 )
Variables ¶
var SupportedProviders = []string{"openai", "anthropic", "claude", "google", "gemini"}
SupportedProviders lists all supported AI providers
Functions ¶
func ExecuteWithExponentialBackoff ¶
func ExecuteWithExponentialBackoff(ctx context.Context, retries int, baseDelay time.Duration, operation func() error) error
ExecuteWithExponentialBackoff executes a function with exponential backoff
func ExecuteWithRetry ¶
ExecuteWithRetry executes a function with retry logic and exponential backoff
func GetAvailableProviders ¶
func GetAvailableProviders() []string
GetAvailableProviders returns a list of providers with available API keys
Example ¶
ExampleGetAvailableProviders demonstrates checking for available API keys
available := chatdelta.GetAvailableProviders() if len(available) == 0 { fmt.Println("No AI providers available (no API keys found in environment)") fmt.Println("Set one of these environment variables:") fmt.Println(" OPENAI_API_KEY or CHATGPT_API_KEY") fmt.Println(" ANTHROPIC_API_KEY or CLAUDE_API_KEY") fmt.Println(" GOOGLE_API_KEY or GEMINI_API_KEY") return } fmt.Printf("Available providers: %v\n", available) // Create clients for all available providers for _, provider := range available { client, err := chatdelta.CreateClient(provider, "", "", nil) if err != nil { fmt.Printf("Error creating %s client: %v\n", provider, err) continue } info := chatdelta.GetClientInfo(client) fmt.Printf(" %s: model=%s, streaming=%t, conversations=%t\n", info.Name, info.Model, info.SupportsStreaming, info.SupportsConversations) }
func IsAuthenticationError ¶
IsAuthenticationError checks if the error is authentication-related
func IsNetworkError ¶
IsNetworkError checks if the error is a network-related error
func IsRetryableError ¶
IsRetryableError checks if the error is retryable
func MergeStreamChunks ¶
func MergeStreamChunks(chunks <-chan StreamChunk) (string, error)
MergeStreamChunks combines multiple stream chunks into a single string
func QuickPrompt ¶
QuickPrompt is a convenience function for sending a quick prompt to a provider without needing to manage client instances. It uses environment variables for API keys and default configurations.
func StreamConversationToString ¶
func StreamConversationToString(ctx context.Context, client AIClient, conversation *Conversation) (string, error)
StreamConversationToString converts a streaming conversation response to a string
func StreamToString ¶
StreamToString converts a streaming response to a string
func ValidateConfig ¶
func ValidateConfig(config *ClientConfig) error
ValidateConfig validates a ClientConfig
Types ¶
type AIClient ¶
type AIClient interface { // SendPrompt sends a single prompt and returns the response SendPrompt(ctx context.Context, prompt string) (string, error) // SendPromptWithMetadata sends a prompt and returns response with metadata SendPromptWithMetadata(ctx context.Context, prompt string) (*AiResponse, error) // SendConversation sends a conversation and returns the response SendConversation(ctx context.Context, conversation *Conversation) (string, error) // SendConversationWithMetadata sends a conversation and returns response with metadata SendConversationWithMetadata(ctx context.Context, conversation *Conversation) (*AiResponse, error) // StreamPrompt sends a prompt and returns a channel for streaming chunks StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error) // StreamConversation sends a conversation and returns a channel for streaming chunks StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error) // SupportsStreaming returns true if the client supports streaming SupportsStreaming() bool // SupportsConversations returns true if the client supports conversations SupportsConversations() bool // Name returns the name of the client Name() string // Model returns the model identifier Model() string }
AIClient defines the interface for all AI clients
func CreateClient ¶
func CreateClient(provider, apiKey, model string, config *ClientConfig) (AIClient, error)
CreateClient creates a new AI client based on the provider string
Example ¶
ExampleCreateClient demonstrates how to create different AI clients
// Create an OpenAI client openaiClient, err := chatdelta.CreateClient("openai", "your-api-key", "gpt-3.5-turbo", nil) if err != nil { log.Fatal(err) } fmt.Printf("Created %s client with model %s\n", openaiClient.Name(), openaiClient.Model()) // Create a Claude client with custom configuration config := chatdelta.NewClientConfig(). SetTimeout(45 * time.Second). SetTemperature(0.7). SetMaxTokens(2048). SetSystemMessage("You are a helpful AI assistant.") claudeClient, err := chatdelta.CreateClient("claude", "your-api-key", "claude-3-haiku-20240307", config) if err != nil { log.Fatal(err) } fmt.Printf("Created %s client with model %s\n", claudeClient.Name(), claudeClient.Model())
type AiResponse ¶ added in v1.1.0
type AiResponse struct { Content string `json:"content"` Metadata ResponseMetadata `json:"metadata"` }
AiResponse combines content and metadata
type ChatSession ¶ added in v1.1.0
type ChatSession struct {
// contains filtered or unexported fields
}
ChatSession manages multi-turn conversations with an AI client Ported from chatdelta-rs/src/lib.rs:341-428
func NewChatSession ¶ added in v1.1.0
func NewChatSession(client AIClient) *ChatSession
NewChatSession creates a new chat session with the given client
func NewChatSessionWithSystemMessage ¶ added in v1.1.0
func NewChatSessionWithSystemMessage(client AIClient, message string) *ChatSession
NewChatSessionWithSystemMessage creates a new chat session with a system message
func (*ChatSession) AddMessage ¶ added in v1.1.0
func (s *ChatSession) AddMessage(message Message)
AddMessage adds a message to the conversation without sending
func (*ChatSession) Clear ¶ added in v1.1.0
func (s *ChatSession) Clear()
Clear clears the conversation history
func (*ChatSession) History ¶ added in v1.1.0
func (s *ChatSession) History() *Conversation
History returns the conversation history
func (*ChatSession) IsEmpty ¶ added in v1.1.0
func (s *ChatSession) IsEmpty() bool
IsEmpty checks if the conversation is empty
func (*ChatSession) Len ¶ added in v1.1.0
func (s *ChatSession) Len() int
Len returns the number of messages in the conversation
func (*ChatSession) ResetWithSystem ¶ added in v1.1.0
func (s *ChatSession) ResetWithSystem(message string)
ResetWithSystem resets the session with a new system message
func (*ChatSession) SendWithMetadata ¶ added in v1.1.0
func (s *ChatSession) SendWithMetadata(ctx context.Context, message string) (*AiResponse, error)
SendWithMetadata sends a message and gets a response with metadata
func (*ChatSession) Stream ¶ added in v1.1.0
func (s *ChatSession) Stream(ctx context.Context, message string) (<-chan StreamChunk, error)
Stream sends a message and returns a channel for streaming chunks
type ClaudeClient ¶
type ClaudeClient struct {
// contains filtered or unexported fields
}
ClaudeClient implements the AIClient interface for Anthropic's Claude API
func NewClaudeClient ¶
func NewClaudeClient(apiKey, model string, config *ClientConfig) (*ClaudeClient, error)
NewClaudeClient creates a new Claude client
func (*ClaudeClient) Model ¶
func (c *ClaudeClient) Model() string
Model returns the model identifier
func (*ClaudeClient) SendConversation ¶
func (c *ClaudeClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
SendConversation sends a conversation to Claude
func (*ClaudeClient) SendPrompt ¶
SendPrompt sends a single prompt to Claude
func (*ClaudeClient) StreamConversation ¶
func (c *ClaudeClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
StreamConversation streams a response for a conversation
func (*ClaudeClient) StreamPrompt ¶
func (c *ClaudeClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
StreamPrompt streams a response for a single prompt
func (*ClaudeClient) SupportsConversations ¶
func (c *ClaudeClient) SupportsConversations() bool
SupportsConversations returns true (Claude supports conversations)
func (*ClaudeClient) SupportsStreaming ¶
func (c *ClaudeClient) SupportsStreaming() bool
SupportsStreaming returns true (Claude supports streaming)
type ClientConfig ¶
type ClientConfig struct { Timeout time.Duration Retries int Temperature *float64 MaxTokens *int TopP *float64 FrequencyPenalty *float64 PresencePenalty *float64 SystemMessage *string BaseURL *string RetryStrategy RetryStrategy }
ClientConfig holds configuration options for AI clients
Example ¶
ExampleClientConfig demonstrates configuration options
config := chatdelta.NewClientConfig(). SetTimeout(60 * time.Second). // 60 second timeout SetRetries(5). // 5 retry attempts SetTemperature(0.8). // Creative temperature SetMaxTokens(1024). // Limit output length SetTopP(0.9). // Nucleus sampling SetFrequencyPenalty(0.1). // Reduce repetition SetPresencePenalty(0.1). // Encourage topic diversity SetSystemMessage("You are a creative writing assistant.") client, err := chatdelta.CreateClient("openai", "your-api-key", "gpt-4", config) if err != nil { log.Fatal(err) } fmt.Printf("Created %s client with custom configuration\n", client.Name())
func NewClientConfig ¶
func NewClientConfig() *ClientConfig
NewClientConfig creates a new ClientConfig with default values
func (*ClientConfig) SetBaseURL ¶ added in v1.1.0
func (c *ClientConfig) SetBaseURL(url string) *ClientConfig
SetBaseURL sets the custom base URL for API endpoint
func (*ClientConfig) SetFrequencyPenalty ¶
func (c *ClientConfig) SetFrequencyPenalty(penalty float64) *ClientConfig
SetFrequencyPenalty sets the frequency penalty parameter
func (*ClientConfig) SetMaxTokens ¶
func (c *ClientConfig) SetMaxTokens(maxTokens int) *ClientConfig
SetMaxTokens sets the maximum number of tokens
func (*ClientConfig) SetPresencePenalty ¶
func (c *ClientConfig) SetPresencePenalty(penalty float64) *ClientConfig
SetPresencePenalty sets the presence penalty parameter
func (*ClientConfig) SetRetries ¶
func (c *ClientConfig) SetRetries(retries int) *ClientConfig
SetRetries sets the number of retries
func (*ClientConfig) SetRetryStrategy ¶ added in v1.1.0
func (c *ClientConfig) SetRetryStrategy(strategy RetryStrategy) *ClientConfig
SetRetryStrategy sets the retry strategy
func (*ClientConfig) SetSystemMessage ¶
func (c *ClientConfig) SetSystemMessage(message string) *ClientConfig
SetSystemMessage sets the system message
func (*ClientConfig) SetTemperature ¶
func (c *ClientConfig) SetTemperature(temperature float64) *ClientConfig
SetTemperature sets the temperature parameter
func (*ClientConfig) SetTimeout ¶
func (c *ClientConfig) SetTimeout(timeout time.Duration) *ClientConfig
SetTimeout sets the timeout duration
func (*ClientConfig) SetTopP ¶
func (c *ClientConfig) SetTopP(topP float64) *ClientConfig
SetTopP sets the top-p parameter
type ClientError ¶
type ClientError struct { Type ErrorType `json:"type"` Code string `json:"code,omitempty"` Message string `json:"message"` Cause error `json:"-"` }
ClientError represents an error that occurred during client operations
func NewBadRequestError ¶
func NewBadRequestError(message string) *ClientError
NewBadRequestError creates a new bad request error
func NewConfigError ¶
func NewConfigError(message string) *ClientError
NewConfigError creates a configuration error (helper for ExecuteParallelConversation)
func NewConnectionError ¶
func NewConnectionError(err error) *ClientError
NewConnectionError creates a new connection error
func NewDNSError ¶
func NewDNSError(hostname string, err error) *ClientError
NewDNSError creates a new DNS resolution error
func NewExpiredTokenError ¶
func NewExpiredTokenError() *ClientError
NewExpiredTokenError creates a new expired token error
func NewInvalidAPIKeyError ¶
func NewInvalidAPIKeyError() *ClientError
NewInvalidAPIKeyError creates a new invalid API key error
func NewInvalidModelError ¶
func NewInvalidModelError(model string) *ClientError
NewInvalidModelError creates a new invalid model error
func NewInvalidParameterError ¶
func NewInvalidParameterError(parameter, value string) *ClientError
NewInvalidParameterError creates a new invalid parameter error
func NewJSONParseError ¶
func NewJSONParseError(err error) *ClientError
NewJSONParseError creates a new JSON parsing error
func NewMissingConfigError ¶
func NewMissingConfigError(config string) *ClientError
NewMissingConfigError creates a new missing configuration error
func NewMissingFieldError ¶
func NewMissingFieldError(field string) *ClientError
NewMissingFieldError creates a new missing field error
func NewPermissionDeniedError ¶
func NewPermissionDeniedError(resource string) *ClientError
NewPermissionDeniedError creates a new permission denied error
func NewQuotaExceededError ¶
func NewQuotaExceededError() *ClientError
NewQuotaExceededError creates a new quota exceeded error
func NewRateLimitError ¶
func NewRateLimitError(retryAfter *time.Duration) *ClientError
NewRateLimitError creates a new rate limit error
func NewServerError ¶
func NewServerError(statusCode int, message string) *ClientError
NewServerError creates a new server error
func NewStreamClosedError ¶
func NewStreamClosedError() *ClientError
NewStreamClosedError creates a new stream closed error
func NewStreamReadError ¶
func NewStreamReadError(err error) *ClientError
NewStreamReadError creates a new stream read error
func NewTimeoutError ¶
func NewTimeoutError(timeout time.Duration) *ClientError
NewTimeoutError creates a new timeout error
func (*ClientError) Error ¶
func (e *ClientError) Error() string
Error implements the error interface
func (*ClientError) Is ¶
func (e *ClientError) Is(target error) bool
Is implements error matching for error types
func (*ClientError) Unwrap ¶
func (e *ClientError) Unwrap() error
Unwrap returns the underlying error
type ClientInfo ¶
type ClientInfo struct { Name string `json:"name"` Model string `json:"model"` SupportsStreaming bool `json:"supports_streaming"` SupportsConversations bool `json:"supports_conversations"` }
ClientInfo holds information about a client
func GetClientInfo ¶
func GetClientInfo(client AIClient) ClientInfo
GetClientInfo returns information about a client
type Conversation ¶
type Conversation struct {
Messages []Message `json:"messages"`
}
Conversation represents a collection of messages
Example ¶
ExampleConversation demonstrates conversation building
conversation := chatdelta.NewConversation() // Add messages to build a conversation conversation.AddSystemMessage("You are a helpful math tutor.") conversation.AddUserMessage("I need help with algebra.") conversation.AddAssistantMessage("I'd be happy to help you with algebra! What specific topic are you working on?") conversation.AddUserMessage("How do I solve linear equations?") fmt.Printf("Conversation has %d messages:\n", len(conversation.Messages)) for i, msg := range conversation.Messages { fmt.Printf("%d. %s: %s\n", i+1, msg.Role, msg.Content) }
func NewConversation ¶
func NewConversation() *Conversation
NewConversation creates a new conversation
func (*Conversation) AddAssistantMessage ¶
func (c *Conversation) AddAssistantMessage(content string)
AddAssistantMessage adds an assistant message to the conversation
func (*Conversation) AddMessage ¶
func (c *Conversation) AddMessage(role, content string)
AddMessage adds a message to the conversation
func (*Conversation) AddSystemMessage ¶
func (c *Conversation) AddSystemMessage(content string)
AddSystemMessage adds a system message to the conversation
func (*Conversation) AddUserMessage ¶
func (c *Conversation) AddUserMessage(content string)
AddUserMessage adds a user message to the conversation
type GeminiClient ¶
type GeminiClient struct {
// contains filtered or unexported fields
}
GeminiClient implements the AIClient interface for Google's Gemini API
func NewGeminiClient ¶
func NewGeminiClient(apiKey, model string, config *ClientConfig) (*GeminiClient, error)
NewGeminiClient creates a new Gemini client
func (*GeminiClient) Model ¶
func (c *GeminiClient) Model() string
Model returns the model identifier
func (*GeminiClient) SendConversation ¶
func (c *GeminiClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
SendConversation sends a conversation to Gemini
func (*GeminiClient) SendPrompt ¶
SendPrompt sends a single prompt to Gemini
func (*GeminiClient) StreamConversation ¶
func (c *GeminiClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
StreamConversation streams a response for a conversation (not implemented for Gemini yet)
func (*GeminiClient) StreamPrompt ¶
func (c *GeminiClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
StreamPrompt streams a response for a single prompt (not implemented for Gemini yet)
func (*GeminiClient) SupportsConversations ¶
func (c *GeminiClient) SupportsConversations() bool
SupportsConversations returns true (Gemini supports conversations)
func (*GeminiClient) SupportsStreaming ¶
func (c *GeminiClient) SupportsStreaming() bool
SupportsStreaming returns false (Gemini streaming not implemented yet)
type OpenAIClient ¶
type OpenAIClient struct {
// contains filtered or unexported fields
}
OpenAIClient implements the AIClient interface for OpenAI's API
func NewOpenAIClient ¶
func NewOpenAIClient(apiKey, model string, config *ClientConfig) (*OpenAIClient, error)
NewOpenAIClient creates a new OpenAI client
func (*OpenAIClient) Model ¶
func (c *OpenAIClient) Model() string
Model returns the model identifier
func (*OpenAIClient) SendConversation ¶
func (c *OpenAIClient) SendConversation(ctx context.Context, conversation *Conversation) (string, error)
SendConversation sends a conversation to OpenAI
func (*OpenAIClient) SendPrompt ¶
SendPrompt sends a single prompt to OpenAI
func (*OpenAIClient) StreamConversation ¶
func (c *OpenAIClient) StreamConversation(ctx context.Context, conversation *Conversation) (<-chan StreamChunk, error)
StreamConversation streams a response for a conversation
func (*OpenAIClient) StreamPrompt ¶
func (c *OpenAIClient) StreamPrompt(ctx context.Context, prompt string) (<-chan StreamChunk, error)
StreamPrompt streams a response for a single prompt
func (*OpenAIClient) SupportsConversations ¶
func (c *OpenAIClient) SupportsConversations() bool
SupportsConversations returns true (OpenAI supports conversations)
func (*OpenAIClient) SupportsStreaming ¶
func (c *OpenAIClient) SupportsStreaming() bool
SupportsStreaming returns true (OpenAI supports streaming)
type ParallelResult ¶
ParallelResult represents the result of a parallel execution
func ExecuteParallel ¶
func ExecuteParallel(ctx context.Context, clients []AIClient, prompt string) []ParallelResult
ExecuteParallel executes multiple AI clients in parallel with the same prompt
Example ¶
ExampleExecuteParallel demonstrates parallel execution across multiple providers
// Create multiple clients var clients []chatdelta.AIClient if openaiClient, err := chatdelta.CreateClient("openai", os.Getenv("OPENAI_API_KEY"), "", nil); err == nil { clients = append(clients, openaiClient) } if claudeClient, err := chatdelta.CreateClient("claude", os.Getenv("CLAUDE_API_KEY"), "", nil); err == nil { clients = append(clients, claudeClient) } if geminiClient, err := chatdelta.CreateClient("gemini", os.Getenv("GEMINI_API_KEY"), "", nil); err == nil { clients = append(clients, geminiClient) } if len(clients) == 0 { fmt.Println("No clients available (check API keys)") return } // Execute the same prompt across all providers ctx := context.Background() prompt := "What is the meaning of life?" results := chatdelta.ExecuteParallel(ctx, clients, prompt) fmt.Printf("Results from %d providers:\n", len(results)) for _, result := range results { fmt.Printf("\n=== %s ===\n", result.ClientName) if result.Error != nil { fmt.Printf("Error: %v\n", result.Error) } else { fmt.Printf("Response: %s\n", result.Result) } }
func ExecuteParallelConversation ¶
func ExecuteParallelConversation(ctx context.Context, clients []AIClient, conversation *Conversation) []ParallelResult
ExecuteParallelConversation executes multiple AI clients in parallel with the same conversation
type ResponseMetadata ¶ added in v1.1.0
type ResponseMetadata struct { ModelUsed string `json:"model_used,omitempty"` PromptTokens int `json:"prompt_tokens,omitempty"` CompletionTokens int `json:"completion_tokens,omitempty"` TotalTokens int `json:"total_tokens,omitempty"` FinishReason string `json:"finish_reason,omitempty"` SafetyRatings interface{} `json:"safety_ratings,omitempty"` RequestID string `json:"request_id,omitempty"` LatencyMs int64 `json:"latency_ms,omitempty"` }
ResponseMetadata contains additional information from the AI provider
type RetryStrategy ¶ added in v1.1.0
type RetryStrategy string
RetryStrategy defines the retry behavior
const ( RetryStrategyFixed RetryStrategy = "fixed" RetryStrategyLinear RetryStrategy = "linear" RetryStrategyExponentialBackoff RetryStrategy = "exponential" RetryStrategyExponentialWithJitter RetryStrategy = "exponential_with_jitter" )
type StreamChunk ¶
type StreamChunk struct { Content string `json:"content"` Finished bool `json:"finished"` Metadata *ResponseMetadata `json:"metadata,omitempty"` }
StreamChunk represents a chunk of streaming response