Documentation
¶
Index ¶
- func ClearProviderAPICooldown(providerName string)
- func ClearProviderRuntimeHistory(providerName string)
- func ConfigureProviderRuntime(providerName string, pc config.ProviderConfig)
- func EstimateOpenAICompatRequestTokens(body map[string]interface{}) (int, error)
- func EstimatePromptTokens(messages []Message, tools []ToolDefinition, model string, ...) int
- func ExecutionErrorCode(err error) string
- func GetProviderRuntimeSnapshot(cfg *config.Config) map[string]interface{}
- func GetProviderRuntimeView(cfg *config.Config, query ProviderRuntimeQuery) map[string]interface{}
- func NotifyAIStudioRelayConnected(channelID string)
- func NotifyAIStudioRelayDisconnected(channelID string, cause error)
- func ProviderSupportsResponsesCompact(cfg *config.Config, name string) bool
- func RerankProviderRuntime(cfg *config.Config, providerName string) ([]providerRuntimeCandidate, error)
- func SetAIStudioRelayManager(manager *wsrelay.Manager)
- type AistudioProvider
- func (p *AistudioProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *AistudioProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *AistudioProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *AistudioProvider) GetDefaultModel() string
- type AntigravityProvider
- func (p *AntigravityProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *AntigravityProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *AntigravityProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *AntigravityProvider) GetDefaultModel() string
- type ClaudeProvider
- func (p *ClaudeProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *ClaudeProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *ClaudeProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *ClaudeProvider) GetDefaultModel() string
- type CodexProvider
- func (p *CodexProvider) BuildSummaryViaResponsesCompact(ctx context.Context, model string, existingSummary string, messages []Message, ...) (string, error)
- func (p *CodexProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *CodexProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *CodexProvider) CloseExecutionSession(sessionID string)
- func (p *CodexProvider) GetDefaultModel() string
- func (p *CodexProvider) SupportsResponsesCompact() bool
- type ExecutionSessionCloser
- type FunctionCall
- type GeminiCLIProvider
- func (p *GeminiCLIProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *GeminiCLIProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *GeminiCLIProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *GeminiCLIProvider) GetDefaultModel() string
- type GeminiProvider
- func (p *GeminiProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *GeminiProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *GeminiProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *GeminiProvider) GetDefaultModel() string
- type HTTPProvider
- func (p *HTTPProvider) BuildSummaryViaResponsesCompact(ctx context.Context, model string, existingSummary string, messages []Message, ...) (string, error)
- func (p *HTTPProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *HTTPProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *HTTPProvider) GetDefaultModel() string
- func (p *HTTPProvider) SupportsResponsesCompact() bool
- type IFlowProvider
- func (p *IFlowProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *IFlowProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *IFlowProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *IFlowProvider) GetDefaultModel() string
- type KimiProvider
- func (p *KimiProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *KimiProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *KimiProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *KimiProvider) GetDefaultModel() string
- type LLMProvider
- type LLMResponse
- type Message
- type MessageContentPart
- type OAuthAccountInfo
- type OAuthLoginManager
- func (m *OAuthLoginManager) ClearCooldown(credentialFile string) error
- func (m *OAuthLoginManager) CompleteManualFlowWithOptions(ctx context.Context, apiBase string, flow *OAuthPendingFlow, ...) (*OAuthSessionInfo, []string, error)
- func (m *OAuthLoginManager) CredentialFile() string
- func (m *OAuthLoginManager) DeleteAccount(credentialFile string) error
- func (m *OAuthLoginManager) ImportAuthJSONWithOptions(ctx context.Context, apiBase string, fileName string, data []byte, ...) (*OAuthSessionInfo, []string, error)
- func (m *OAuthLoginManager) ListAccounts() ([]OAuthAccountInfo, error)
- func (m *OAuthLoginManager) Login(ctx context.Context, apiBase string, opts OAuthLoginOptions) (*OAuthSessionInfo, []string, error)
- func (m *OAuthLoginManager) RefreshAccount(ctx context.Context, credentialFile string) (*OAuthAccountInfo, error)
- func (m *OAuthLoginManager) StartManualFlowWithOptions(opts OAuthLoginOptions) (*OAuthPendingFlow, error)
- type OAuthLoginOptions
- type OAuthPendingFlow
- type OAuthSessionInfo
- type ProviderExecutionError
- type ProviderExecutionResult
- type ProviderRefreshAccountResult
- type ProviderRefreshResult
- type ProviderRuntimeQuery
- type ProviderRuntimeSummary
- type ProviderRuntimeSummaryItem
- type QwenProvider
- func (p *QwenProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *QwenProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *QwenProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *QwenProvider) GetDefaultModel() string
- type ResponsesCompactor
- type StreamingLLMProvider
- type TokenCounter
- type ToolCall
- type ToolDefinition
- type ToolFunctionDefinition
- type UsageInfo
- type VertexProvider
- func (p *VertexProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *VertexProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*LLMResponse, error)
- func (p *VertexProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, ...) (*UsageInfo, error)
- func (p *VertexProvider) GetDefaultModel() string
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func ClearProviderAPICooldown ¶
func ClearProviderAPICooldown(providerName string)
func ClearProviderRuntimeHistory ¶
func ClearProviderRuntimeHistory(providerName string)
func ConfigureProviderRuntime ¶
func ConfigureProviderRuntime(providerName string, pc config.ProviderConfig)
func EstimateOpenAICompatRequestTokens ¶ added in v1.2.3
EstimateOpenAICompatRequestTokens estimates prompt tokens for an OpenAI-compatible chat request without calling an upstream tokenizer. It intentionally errs a bit high for structured fields so compaction triggers before providers reject a prompt.
func EstimatePromptTokens ¶ added in v1.2.3
func EstimatePromptTokens(messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) int
EstimatePromptTokens estimates tokens directly from provider-native message and tool structures. Providers with native count APIs should still prefer those.
func ExecutionErrorCode ¶ added in v1.1.0
func GetProviderRuntimeView ¶
func GetProviderRuntimeView(cfg *config.Config, query ProviderRuntimeQuery) map[string]interface{}
func NotifyAIStudioRelayConnected ¶
func NotifyAIStudioRelayConnected(channelID string)
func RerankProviderRuntime ¶
func SetAIStudioRelayManager ¶
Types ¶
type AistudioProvider ¶
type AistudioProvider struct {
// contains filtered or unexported fields
}
func NewAistudioProvider ¶
func (*AistudioProvider) Chat ¶
func (p *AistudioProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*AistudioProvider) ChatStream ¶
func (p *AistudioProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*AistudioProvider) CountTokens ¶
func (p *AistudioProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*AistudioProvider) GetDefaultModel ¶
func (p *AistudioProvider) GetDefaultModel() string
type AntigravityProvider ¶
type AntigravityProvider struct {
// contains filtered or unexported fields
}
func NewAntigravityProvider ¶
func (*AntigravityProvider) Chat ¶
func (p *AntigravityProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*AntigravityProvider) ChatStream ¶
func (p *AntigravityProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*AntigravityProvider) CountTokens ¶
func (p *AntigravityProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*AntigravityProvider) GetDefaultModel ¶
func (p *AntigravityProvider) GetDefaultModel() string
type ClaudeProvider ¶
type ClaudeProvider struct {
// contains filtered or unexported fields
}
func NewClaudeProvider ¶
func (*ClaudeProvider) Chat ¶
func (p *ClaudeProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*ClaudeProvider) ChatStream ¶
func (p *ClaudeProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*ClaudeProvider) CountTokens ¶
func (p *ClaudeProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*ClaudeProvider) GetDefaultModel ¶
func (p *ClaudeProvider) GetDefaultModel() string
type CodexProvider ¶
type CodexProvider struct {
// contains filtered or unexported fields
}
func NewCodexProvider ¶
func (*CodexProvider) BuildSummaryViaResponsesCompact ¶
func (*CodexProvider) Chat ¶
func (p *CodexProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*CodexProvider) ChatStream ¶
func (p *CodexProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*CodexProvider) CloseExecutionSession ¶
func (p *CodexProvider) CloseExecutionSession(sessionID string)
func (*CodexProvider) GetDefaultModel ¶
func (p *CodexProvider) GetDefaultModel() string
func (*CodexProvider) SupportsResponsesCompact ¶
func (p *CodexProvider) SupportsResponsesCompact() bool
type ExecutionSessionCloser ¶
type ExecutionSessionCloser interface {
CloseExecutionSession(sessionID string)
}
ExecutionSessionCloser is an optional capability for providers that keep reusable upstream execution sessions, such as websocket-backed Codex sessions.
type FunctionCall ¶
type GeminiCLIProvider ¶
type GeminiCLIProvider struct {
// contains filtered or unexported fields
}
func NewGeminiCLIProvider ¶
func (*GeminiCLIProvider) Chat ¶
func (p *GeminiCLIProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*GeminiCLIProvider) ChatStream ¶
func (p *GeminiCLIProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*GeminiCLIProvider) CountTokens ¶
func (p *GeminiCLIProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*GeminiCLIProvider) GetDefaultModel ¶
func (p *GeminiCLIProvider) GetDefaultModel() string
type GeminiProvider ¶
type GeminiProvider struct {
// contains filtered or unexported fields
}
func NewGeminiProvider ¶
func (*GeminiProvider) Chat ¶
func (p *GeminiProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*GeminiProvider) ChatStream ¶
func (p *GeminiProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*GeminiProvider) CountTokens ¶
func (p *GeminiProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*GeminiProvider) GetDefaultModel ¶
func (p *GeminiProvider) GetDefaultModel() string
type HTTPProvider ¶
type HTTPProvider struct {
// contains filtered or unexported fields
}
func NewHTTPProvider ¶
func (*HTTPProvider) BuildSummaryViaResponsesCompact ¶
func (*HTTPProvider) Chat ¶
func (p *HTTPProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*HTTPProvider) ChatStream ¶
func (p *HTTPProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*HTTPProvider) GetDefaultModel ¶
func (p *HTTPProvider) GetDefaultModel() string
func (*HTTPProvider) SupportsResponsesCompact ¶
func (p *HTTPProvider) SupportsResponsesCompact() bool
type IFlowProvider ¶
type IFlowProvider struct {
// contains filtered or unexported fields
}
func NewIFlowProvider ¶
func (*IFlowProvider) Chat ¶
func (p *IFlowProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*IFlowProvider) ChatStream ¶
func (p *IFlowProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*IFlowProvider) CountTokens ¶
func (p *IFlowProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*IFlowProvider) GetDefaultModel ¶
func (p *IFlowProvider) GetDefaultModel() string
type KimiProvider ¶
type KimiProvider struct {
// contains filtered or unexported fields
}
func NewKimiProvider ¶
func (*KimiProvider) Chat ¶
func (p *KimiProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*KimiProvider) ChatStream ¶
func (p *KimiProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*KimiProvider) CountTokens ¶
func (p *KimiProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*KimiProvider) GetDefaultModel ¶
func (p *KimiProvider) GetDefaultModel() string
type LLMProvider ¶
type LLMProvider interface {
Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
GetDefaultModel() string
}
func CreateProvider ¶
func CreateProvider(cfg *config.Config) (LLMProvider, error)
func CreateProviderByName ¶
func CreateProviderByName(cfg *config.Config, name string) (LLMProvider, error)
type LLMResponse ¶
type Message ¶
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
ReasoningContent string `json:"reasoning_content,omitempty"`
ContentParts []MessageContentPart `json:"content_parts,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
}
type MessageContentPart ¶
type MessageContentPart struct {
Type string `json:"type"`
Text string `json:"text,omitempty"`
ImageURL string `json:"image_url,omitempty"`
Detail string `json:"detail,omitempty"`
MIMEType string `json:"mime_type,omitempty"`
Filename string `json:"filename,omitempty"`
FileData string `json:"file_data,omitempty"`
FileID string `json:"file_id,omitempty"`
FileURL string `json:"file_url,omitempty"`
}
type OAuthAccountInfo ¶
type OAuthAccountInfo struct {
Email string `json:"email"`
AccountID string `json:"account_id"`
CredentialFile string `json:"credential_file"`
Expire string `json:"expire,omitempty"`
LastRefresh string `json:"last_refresh,omitempty"`
ProjectID string `json:"project_id,omitempty"`
AccountLabel string `json:"account_label,omitempty"`
DeviceID string `json:"device_id,omitempty"`
ResourceURL string `json:"resource_url,omitempty"`
NetworkProxy string `json:"network_proxy,omitempty"`
Disabled bool `json:"disabled,omitempty"`
DisableReason string `json:"disable_reason,omitempty"`
CooldownUntil string `json:"cooldown_until,omitempty"`
FailureCount int `json:"failure_count,omitempty"`
LastFailure string `json:"last_failure,omitempty"`
HealthScore int `json:"health_score,omitempty"`
PlanType string `json:"plan_type,omitempty"`
QuotaSource string `json:"quota_source,omitempty"`
BalanceLabel string `json:"balance_label,omitempty"`
BalanceDetail string `json:"balance_detail,omitempty"`
SubActiveStart string `json:"subscription_active_start,omitempty"`
SubActiveUntil string `json:"subscription_active_until,omitempty"`
}
type OAuthLoginManager ¶
type OAuthLoginManager struct {
// contains filtered or unexported fields
}
func NewOAuthLoginManager ¶
func NewOAuthLoginManager(pc config.ProviderConfig, timeout time.Duration) (*OAuthLoginManager, error)
func (*OAuthLoginManager) ClearCooldown ¶
func (m *OAuthLoginManager) ClearCooldown(credentialFile string) error
func (*OAuthLoginManager) CompleteManualFlowWithOptions ¶
func (m *OAuthLoginManager) CompleteManualFlowWithOptions(ctx context.Context, apiBase string, flow *OAuthPendingFlow, callbackURL string, opts OAuthLoginOptions) (*OAuthSessionInfo, []string, error)
func (*OAuthLoginManager) CredentialFile ¶
func (m *OAuthLoginManager) CredentialFile() string
func (*OAuthLoginManager) DeleteAccount ¶
func (m *OAuthLoginManager) DeleteAccount(credentialFile string) error
func (*OAuthLoginManager) ImportAuthJSONWithOptions ¶
func (m *OAuthLoginManager) ImportAuthJSONWithOptions(ctx context.Context, apiBase string, fileName string, data []byte, opts OAuthLoginOptions) (*OAuthSessionInfo, []string, error)
func (*OAuthLoginManager) ListAccounts ¶
func (m *OAuthLoginManager) ListAccounts() ([]OAuthAccountInfo, error)
func (*OAuthLoginManager) Login ¶
func (m *OAuthLoginManager) Login(ctx context.Context, apiBase string, opts OAuthLoginOptions) (*OAuthSessionInfo, []string, error)
func (*OAuthLoginManager) RefreshAccount ¶
func (m *OAuthLoginManager) RefreshAccount(ctx context.Context, credentialFile string) (*OAuthAccountInfo, error)
func (*OAuthLoginManager) StartManualFlowWithOptions ¶
func (m *OAuthLoginManager) StartManualFlowWithOptions(opts OAuthLoginOptions) (*OAuthPendingFlow, error)
type OAuthLoginOptions ¶
type OAuthPendingFlow ¶
type OAuthPendingFlow struct {
Mode string `json:"mode,omitempty"`
State string `json:"state,omitempty"`
PKCEVerifier string `json:"pkce_verifier,omitempty"`
AuthURL string `json:"auth_url,omitempty"`
UserCode string `json:"user_code,omitempty"`
Instructions string `json:"instructions,omitempty"`
DeviceCode string `json:"device_code,omitempty"`
IntervalSec int `json:"interval_sec,omitempty"`
ExpiresAt string `json:"expires_at,omitempty"`
}
type OAuthSessionInfo ¶
type ProviderExecutionError ¶ added in v0.0.3
type ProviderExecutionError struct {
Code string `json:"code,omitempty"`
Message string `json:"message,omitempty"`
Stage string `json:"stage,omitempty"`
Retryable bool `json:"retryable,omitempty"`
Source string `json:"source,omitempty"`
}
func NewProviderExecutionError ¶ added in v1.1.0
func NewProviderExecutionError(code, message, stage string, retryable bool, source string) *ProviderExecutionError
func (*ProviderExecutionError) Error ¶ added in v1.1.0
func (e *ProviderExecutionError) Error() string
type ProviderExecutionResult ¶ added in v0.0.3
type ProviderExecutionResult struct {
Body []byte `json:"-"`
StatusCode int `json:"status_code,omitempty"`
ContentType string `json:"content_type,omitempty"`
AttemptKind string `json:"attempt_kind,omitempty"`
Retryable bool `json:"retryable,omitempty"`
Failure oauthFailureReason `json:"failure_reason,omitempty"`
Error *ProviderExecutionError `json:"error,omitempty"`
}
type ProviderRefreshResult ¶
type ProviderRefreshResult struct {
Provider string `json:"provider,omitempty"`
Checked int `json:"checked,omitempty"`
Refreshed int `json:"refreshed,omitempty"`
Skipped int `json:"skipped,omitempty"`
Failed int `json:"failed,omitempty"`
Accounts []ProviderRefreshAccountResult `json:"accounts,omitempty"`
}
type ProviderRuntimeQuery ¶
type ProviderRuntimeSummary ¶
type ProviderRuntimeSummary struct {
TotalProviders int `json:"total_providers"`
Healthy int `json:"healthy"`
Degraded int `json:"degraded"`
Critical int `json:"critical"`
InCooldown int `json:"in_cooldown"`
LowHealth int `json:"low_health"`
RecentErrors int `json:"recent_errors"`
Providers []ProviderRuntimeSummaryItem `json:"providers,omitempty"`
}
func GetProviderRuntimeSummary ¶
func GetProviderRuntimeSummary(cfg *config.Config, query ProviderRuntimeQuery) ProviderRuntimeSummary
type ProviderRuntimeSummaryItem ¶
type ProviderRuntimeSummaryItem struct {
Name string `json:"name,omitempty"`
Auth string `json:"auth,omitempty"`
Status string `json:"status,omitempty"`
APIState providerAPIRuntimeState `json:"api_state,omitempty"`
OAuthAccounts []OAuthAccountInfo `json:"oauth_accounts,omitempty"`
CandidateOrder []providerRuntimeCandidate `json:"candidate_order,omitempty"`
LastSuccess *providerRuntimeEvent `json:"last_success,omitempty"`
LastSuccessAt string `json:"last_success_at,omitempty"`
LastError *providerRuntimeEvent `json:"last_error,omitempty"`
LastErrorAt string `json:"last_error_at,omitempty"`
LastErrorReason string `json:"last_error_reason,omitempty"`
TopCandidateChangedAt string `json:"top_candidate_changed_at,omitempty"`
StaleForSec int64 `json:"stale_for_sec,omitempty"`
InCooldown bool `json:"in_cooldown"`
LowHealth bool `json:"low_health"`
HasRecentErrors bool `json:"has_recent_errors"`
TopCandidate *providerRuntimeCandidate `json:"top_candidate,omitempty"`
}
type QwenProvider ¶
type QwenProvider struct {
// contains filtered or unexported fields
}
func NewQwenProvider ¶
func (*QwenProvider) Chat ¶
func (p *QwenProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*QwenProvider) ChatStream ¶
func (p *QwenProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*QwenProvider) CountTokens ¶
func (p *QwenProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*QwenProvider) GetDefaultModel ¶
func (p *QwenProvider) GetDefaultModel() string
type ResponsesCompactor ¶
type ResponsesCompactor interface {
SupportsResponsesCompact() bool
BuildSummaryViaResponsesCompact(ctx context.Context, model string, existingSummary string, messages []Message, maxSummaryChars int) (string, error)
}
ResponsesCompactor is an optional capability interface. Providers that support OpenAI /v1/responses/compact can implement this.
type StreamingLLMProvider ¶
type StreamingLLMProvider interface {
ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
}
StreamingLLMProvider is an optional capability interface for token-level streaming.
type TokenCounter ¶
type TokenCounter interface {
CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
}
TokenCounter is an optional capability for providers that expose a native token counting endpoint.
type ToolCall ¶
type ToolCall struct {
ID string `json:"id"`
Type string `json:"type,omitempty"`
Function *FunctionCall `json:"function,omitempty"`
Name string `json:"name,omitempty"`
Arguments map[string]interface{} `json:"arguments,omitempty"`
}
type ToolDefinition ¶
type ToolFunctionDefinition ¶
type VertexProvider ¶
type VertexProvider struct {
// contains filtered or unexported fields
}
func NewVertexProvider ¶
func (*VertexProvider) Chat ¶
func (p *VertexProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error)
func (*VertexProvider) ChatStream ¶
func (p *VertexProvider) ChatStream(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}, onDelta func(string)) (*LLMResponse, error)
func (*VertexProvider) CountTokens ¶
func (p *VertexProvider) CountTokens(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*UsageInfo, error)
func (*VertexProvider) GetDefaultModel ¶
func (p *VertexProvider) GetDefaultModel() string
Source Files
¶
- aistudio_provider.go
- aistudio_relay.go
- anthropic_transport.go
- antigravity_provider.go
- claude_provider.go
- codex_provider.go
- execution.go
- gemini_cli_provider.go
- gemini_provider.go
- http_provider.go
- http_proxy.go
- iflow_provider.go
- kimi_provider.go
- oauth.go
- openai_compat_adapter.go
- openai_compat_provider.go
- provider_registry.go
- provider_request_options.go
- provider_runtime.go
- qwen_provider.go
- responses_adapter.go
- token_estimator.go
- types.go
- vertex_provider.go