config

package
v0.5.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: May 11, 2026 License: MIT Imports: 7 Imported by: 0

Documentation

Index

Constants

View Source
const (
	OllamaDefaultBaseURL     = "http://localhost:11434/v1"
	OllamaDefaultModel       = "llama3.1:8b"
	OpenCodeGoDefaultBaseURL = "https://opencode.ai/zen/go/v1"
	OpenCodeGoDefaultModel   = "kimi-k2.5"
)
View Source
const (
	DefaultOpenAIBaseURL           = "https://api.openai.com/v1"
	DefaultOpenRouterOpenAIBaseURL = "https://openrouter.ai/api/v1"
	DefaultCanopyWaveOpenAIBaseURL = "https://inference.canopywave.io/v1"
	DefaultGeminiOpenAIBaseURL     = "https://generativelanguage.googleapis.com/v1beta/openai"
	DefaultAnthropicOpenAIBaseURL  = "https://api.anthropic.com/v1"
	DefaultGrokOpenAIBaseURL       = "https://api.x.ai/v1"
	DefaultOpenCodeGoBaseURL       = "https://opencode.ai/zen/go/v1"
)

Default base URLs for each provider.

Variables

View Source
var (
	AnthropicRuntimeProfile = RuntimeProviderProfile{
		Mode: "anthropic", DefaultBaseURL: DefaultAnthropicOpenAIBaseURL, DefaultModel: "claude-3-5-sonnet-latest",
		DetectionEnv: []string{"ANTHROPIC_API_KEY"},
		ModelEnv:     []string{"ANTHROPIC_MODEL", "OPENAI_MODEL"},
		BaseURLEnv:   []string{"ANTHROPIC_BASE_URL", "OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "ANTHROPIC_API_KEY", Source: "anthropic"}, {Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	OpenAIRuntimeProfile = RuntimeProviderProfile{
		Mode: "openai", DefaultBaseURL: DefaultOpenAIBaseURL, DefaultModel: "gpt-4o",
		DetectionEnv: []string{"OPENAI_API_KEY"},
		ModelEnv:     []string{"OPENAI_MODEL"},
		BaseURLEnv:   []string{"OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	GrokRuntimeProfile = RuntimeProviderProfile{
		Mode: "grok", DefaultBaseURL: DefaultGrokOpenAIBaseURL, DefaultModel: "grok-2",
		DetectionEnv: []string{"GROK_API_KEY", "XAI_API_KEY"},
		ModelEnv:     []string{"GROK_MODEL", "XAI_MODEL", "OPENAI_MODEL"},
		BaseURLEnv:   []string{"GROK_BASE_URL", "XAI_BASE_URL", "OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "GROK_API_KEY", Source: "grok"}, {Env: "XAI_API_KEY", Source: "xai"}, {Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	GeminiRuntimeProfile = RuntimeProviderProfile{
		Mode: "gemini", DefaultBaseURL: DefaultGeminiOpenAIBaseURL, DefaultModel: "gemini-2.0-flash",
		DetectionEnv: []string{"GEMINI_API_KEY"},
		ModelEnv:     []string{"GEMINI_MODEL", "OPENAI_MODEL"},
		BaseURLEnv:   []string{"GEMINI_BASE_URL", "OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "GEMINI_API_KEY", Source: "gemini"}, {Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	OpenRouterRuntimeProfile = RuntimeProviderProfile{
		Mode: "openrouter", DefaultBaseURL: DefaultOpenRouterOpenAIBaseURL, DefaultModel: "openai/gpt-4o-mini",
		DetectionEnv: []string{"OPENROUTER_API_KEY"},
		ModelEnv:     []string{"OPENROUTER_MODEL", "OPENAI_MODEL"},
		BaseURLEnv:   []string{"OPENROUTER_BASE_URL", "OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "OPENROUTER_API_KEY", Source: "openrouter"}, {Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	CanopyWaveRuntimeProfile = RuntimeProviderProfile{
		Mode: "openai", DefaultBaseURL: DefaultCanopyWaveOpenAIBaseURL, DefaultModel: "zai/glm-4.6",
		DetectionEnv: []string{"CANOPYWAVE_API_KEY"},
		ModelEnv:     []string{"CANOPYWAVE_MODEL", "OPENAI_MODEL"},
		BaseURLEnv:   []string{"CANOPYWAVE_BASE_URL", "OPENAI_BASE_URL", "OPENAI_API_BASE"},
		APIKeys:      []APIKeyDef{{Env: "CANOPYWAVE_API_KEY", Source: "canopywave"}, {Env: "OPENAI_API_KEY", Source: "openai"}},
	}
	OpenCodeGoRuntimeProfile = RuntimeProviderProfile{
		Mode: "opencodego", DefaultBaseURL: DefaultOpenCodeGoBaseURL, DefaultModel: "kimi-k2.5",
		DetectionEnv: []string{"OPENCODEGO_API_KEY"},
		ModelEnv:     []string{"OPENCODEGO_MODEL"},
		BaseURLEnv:   []string{"OPENCODEGO_BASE_URL"},
		APIKeys:      []APIKeyDef{{Env: "OPENCODEGO_API_KEY", Source: "opencodego"}},
	}
)

Provider runtime profiles.

APIProviderDetectionOrder is the priority order for provider detection.

View Source
var OpenAICompatibleRuntimeProfileOrder = []string{
	"openrouter", "grok", "gemini", "anthropic", "canopywave", "openai", "opencodego",
}

OpenAICompatibleRuntimeProfileOrder is the detection order for runtime profiles.

View Source
var OpenAICompatibleRuntimeProfiles = map[string]RuntimeProviderProfile{
	"anthropic":  AnthropicRuntimeProfile,
	"grok":       GrokRuntimeProfile,
	"gemini":     GeminiRuntimeProfile,
	"canopywave": CanopyWaveRuntimeProfile,
	"openai":     OpenAIRuntimeProfile,
	"openrouter": OpenRouterRuntimeProfile,
	"opencodego": OpenCodeGoRuntimeProfile,
}

OpenAICompatibleRuntimeProfiles maps profile key to its runtime profile.

View Source
var ProviderDetectionOrder = APIProviderDetectionOrder

ProviderDetectionOrder is the priority order for provider detection.

ProviderModelEnvKeys maps each provider to its model env var keys.

Functions

func ApplyOpenAICompatibleProvider

func ApplyOpenAICompatibleProvider(prefix, apiKey, model, baseURL string, overwrite bool)

ApplyOpenAICompatibleProvider sets env vars for an OpenAI-compatible provider.

func ApplyProviderConfigToEnv

func ApplyProviderConfigToEnv(config *ProviderConfig, overwrite bool, cat *catalog.ModelCatalog) string

ApplyProviderConfigToEnv applies the full provider config to env vars. Returns the detected provider or empty string.

func ApplyProviderEnv

func ApplyProviderEnv(provider string, config *ProviderConfig, activeModel string, overwrite bool, cat *catalog.ModelCatalog) map[string]string

ApplyProviderEnv computes the env vars for a specific provider and returns them as a map without modifying the process environment.

func ApplyProviderEnvToProcess

func ApplyProviderEnvToProcess(provider string, config *ProviderConfig, activeModel string, overwrite bool, cat *catalog.ModelCatalog)

ApplyProviderEnvToProcess applies the env vars for a specific provider directly to the process environment via os.Setenv.

func AsNonEmptyString

func AsNonEmptyString(v string) string

AsNonEmptyString returns trimmed string or empty.

func ClearProviderRuntimeEnv

func ClearProviderRuntimeEnv()

ClearProviderRuntimeEnv clears all provider-related env vars.

func DefaultProviderFromConfig

func DefaultProviderFromConfig(config *ProviderConfig) string

DefaultProviderFromConfig determines the default provider from config.

func GetProviderAPIKey

func GetProviderAPIKey(config *ProviderConfig, provider string) string

GetProviderAPIKey returns the configured API key for a provider.

func GetProviderActiveModel

func GetProviderActiveModel(config *ProviderConfig, provider string) string

GetProviderActiveModel gets the active model for a provider from config.

func GetProviderConfigDir

func GetProviderConfigDir() string

GetProviderConfigDir returns the config directory path.

func GetProviderConfigPath

func GetProviderConfigPath() string

GetProviderConfigPath returns the full path to provider.json.

func GetProviderModel

func GetProviderModel(config *ProviderConfig, provider string) string

GetProviderModel returns the configured model for a provider.

func IsLocalProviderURL

func IsLocalProviderURL(baseURL string) bool

IsLocalProviderURL checks if a base URL points to localhost.

func IsOpenAICompatibleRuntimeEnabled

func IsOpenAICompatibleRuntimeEnabled() bool

IsOpenAICompatibleRuntimeEnabled checks if any provider API key is set.

func IsProviderConfigured

func IsProviderConfigured(config *ProviderConfig, provider string) bool

IsProviderConfigured checks if a provider has valid configuration.

func NormalizeOllamaOpenAIBaseURL

func NormalizeOllamaOpenAIBaseURL(baseURL string) string

NormalizeOllamaOpenAIBaseURL ensures the URL ends with /v1.

func SaveProviderConfig

func SaveProviderConfig(config *ProviderConfig, path string) error

SaveProviderConfig saves provider config to disk.

func SetEnvValue

func SetEnvValue(key, value string, overwrite bool)

SetEnvValue sets an env var if value is non-empty and overwrite is allowed.

func ValidateAPIKey

func ValidateAPIKey(apiKey, providerName string) string

ValidateAPIKey validates an API key.

func ValidateBaseURL

func ValidateBaseURL(baseURL string) string

ValidateBaseURL validates a base URL.

Types

type APIKeyDef

type APIKeyDef struct {
	Env    string `json:"env"`
	Source string `json:"source"`
}

APIKeyDef maps an env var to a key source name.

type APIProvider

type APIProvider = string

APIProvider is the type for supported LLM providers.

const (
	ProviderAnthropic  APIProvider = "anthropic"
	ProviderOpenAI     APIProvider = "openai"
	ProviderCanopyWave APIProvider = "canopywave"
	ProviderOpenRouter APIProvider = "openrouter"
	ProviderGrok       APIProvider = "grok"
	ProviderGemini     APIProvider = "gemini"
	ProviderOllama     APIProvider = "ollama"
	ProviderOpenCodeGo APIProvider = "opencodego"
)

type OpenAICompatibleApiKeySource

type OpenAICompatibleApiKeySource = string

OpenAICompatibleApiKeySource identifies where the API key came from.

type OpenAICompatibleRuntimeMode

type OpenAICompatibleRuntimeMode = string

OpenAICompatibleRuntimeMode identifies the runtime mode.

type ProviderConfig

type ProviderConfig struct {
	Version           string `json:"_version,omitempty"`
	ActiveProvider    string `json:"active_provider,omitempty"`
	AnthropicAPIKey   string `json:"anthropic_api_key,omitempty"`
	GrokAPIKey        string `json:"grok_api_key,omitempty"`
	XAIAPIKey         string `json:"xai_api_key,omitempty"`
	OpenAIAPIKey      string `json:"openai_api_key,omitempty"`
	CanopyWaveAPIKey  string `json:"canopywave_api_key,omitempty"`
	OpenRouterAPIKey  string `json:"openrouter_api_key,omitempty"`
	GeminiAPIKey      string `json:"gemini_api_key,omitempty"`
	OllamaBaseURL     string `json:"ollama_base_url,omitempty"`
	OpenCodeGoAPIKey  string `json:"opencodego_api_key,omitempty"`
	AnthropicBaseURL  string `json:"anthropic_base_url,omitempty"`
	CanopyWaveBaseURL string `json:"canopywave_base_url,omitempty"`
	GrokBaseURL       string `json:"grok_base_url,omitempty"`
	XAIBaseURL        string `json:"xai_base_url,omitempty"`
	OpenAIBaseURL     string `json:"openai_base_url,omitempty"`
	OpenRouterBaseURL string `json:"openrouter_base_url,omitempty"`
	GeminiBaseURL     string `json:"gemini_base_url,omitempty"`
	OpenCodeGoBaseURL string `json:"opencodego_base_url,omitempty"`
	AnthropicModel    string `json:"anthropic_model,omitempty"`
	OpenAIModel       string `json:"openai_model,omitempty"`
	CanopyWaveModel   string `json:"canopywave_model,omitempty"`
	GrokModel         string `json:"grok_model,omitempty"`
	XAIModel          string `json:"xai_model,omitempty"`
	OpenRouterModel   string `json:"openrouter_model,omitempty"`
	GeminiModel       string `json:"gemini_model,omitempty"`
	OllamaModel       string `json:"ollama_model,omitempty"`
	OpenCodeGoModel   string `json:"opencodego_model,omitempty"`
	ActiveModel       string `json:"active_model,omitempty"`
	ExplorationModel  string `json:"exploration_model,omitempty"`
	AnthropicVersion  string `json:"anthropic_version,omitempty"`
}

ProviderConfig mirrors ~/.hawk/provider.json.

func LoadProviderConfig

func LoadProviderConfig(path string) *ProviderConfig

LoadProviderConfig loads provider config from disk. Returns nil if file doesn't exist. Returns error for corrupt JSON or permission issues.

func LoadProviderConfigWithError

func LoadProviderConfigWithError(path string) (*ProviderConfig, error)

LoadProviderConfigWithError loads provider config from disk with detailed error reporting. Returns (nil, nil) if file doesn't exist. Returns (nil, error) for corrupt JSON or permission issues.

type ProviderTransport

type ProviderTransport string

ProviderTransport is the transport type for provider requests.

const TransportChatCompletions ProviderTransport = "chat_completions"

type ReasoningEffort

type ReasoningEffort string

ReasoningEffort levels.

const (
	ReasoningLow    ReasoningEffort = "low"
	ReasoningMedium ReasoningEffort = "medium"
	ReasoningHigh   ReasoningEffort = "high"
)

type ResolvedOpenAICompatibleRuntime

type ResolvedOpenAICompatibleRuntime struct {
	Mode         string                  `json:"mode"`
	Request      ResolvedProviderRequest `json:"request"`
	APIKey       string                  `json:"api_key"`
	APIKeySource string                  `json:"api_key_source"`
}

ResolvedOpenAICompatibleRuntime holds the resolved runtime config.

func ResolveOpenAICompatibleRuntime

func ResolveOpenAICompatibleRuntime(model, baseURL, fallbackModel string) ResolvedOpenAICompatibleRuntime

ResolveOpenAICompatibleRuntime resolves the full OpenAI-compatible runtime config.

type ResolvedProviderRequest

type ResolvedProviderRequest struct {
	Transport      ProviderTransport `json:"transport"`
	RequestedModel string            `json:"requested_model"`
	ResolvedModel  string            `json:"resolved_model"`
	BaseURL        string            `json:"base_url"`
	Reasoning      *struct {
		Effort ReasoningEffort `json:"effort"`
	} `json:"reasoning,omitempty"`
}

ResolvedProviderRequest holds the resolved provider request details.

func ResolveProviderRequest

func ResolveProviderRequest(model, baseURL, fallbackModel string) ResolvedProviderRequest

ResolveProviderRequest resolves model/baseUrl/transport from options and env.

type RuntimeProviderProfile

type RuntimeProviderProfile struct {
	Mode           string      `json:"mode"`
	DefaultBaseURL string      `json:"default_base_url"`
	DefaultModel   string      `json:"default_model"`
	DetectionEnv   []string    `json:"detection_env"`
	ModelEnv       []string    `json:"model_env"`
	BaseURLEnv     []string    `json:"base_url_env"`
	APIKeys        []APIKeyDef `json:"api_keys"`
}

RuntimeProviderProfile defines how a provider is detected and configured at runtime.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL