Documentation
¶
Index ¶
- Constants
- Variables
- func AvailableEmbeddingTool(toolName string) bool
- func AvailableMCPTool(toolName string, client *MCPClient) bool
- func AvailableSearchTool(toolName string) bool
- func BuildAnthropicMessages(messages []UniversalMessage) []anthropic.MessageParam
- func BuildGeminiMessages(messages []UniversalMessage) []*gemini.Content
- func BuildOpenAIMessages(messages []UniversalMessage) []openai.ChatCompletionMessage
- func BuildOpenChatMessages(messages []UniversalMessage) []*model.ChatCompletionMessage
- func CallAgent(op *AgentOptions) error
- func CheckIfImageFromBytes(data []byte) (bool, string, error)
- func CheckIfImageFromPath(filePath string) (bool, string, error)
- func ClearTokenCache()
- func Contains(list []string, item string) bool
- func ConvertMessages(data []byte, sourceProvider, targetProvider string) ([]byte, error)
- func Debugf(format string, args ...interface{})
- func Debugln(args ...interface{})
- func DetectAnthropicKeyMessage(msg *anthropic.MessageParam) bool
- func DetectGeminiKeyMessage(msg *gemini.Content) bool
- func DetectMessageProvider(data []byte) string
- func DetectModelProvider(endPoint string, modelName string) string
- func DetectOpenAIKeyMessage(msg *openai.ChatCompletionMessage) bool
- func Diff(content1, content2, file1, file2 string, contextLines int) string
- func DisableCodeExecution()
- func EnableCodeExecution()
- func EndWithNewline(s string) bool
- func Errorf(format string, args ...interface{})
- func Errorln(args ...interface{})
- func EstimateAnthropicMessageTokens(msg anthropic.MessageParam) int
- func EstimateAnthropicMessagesTokens(messages []anthropic.MessageParam) int
- func EstimateAnthropicToolTokens(tools []anthropic.ToolUnionParam) int
- func EstimateGeminiMessageTokens(msg *genai.Content) int
- func EstimateGeminiMessagesTokens(messages []*genai.Content) int
- func EstimateGeminiToolTokens(tools []*genai.Tool) int
- func EstimateJSONTokens(data interface{}) int
- func EstimateOpenAIMessageTokens(msg openai.ChatCompletionMessage) int
- func EstimateOpenAIMessagesTokens(messages []openai.ChatCompletionMessage) int
- func EstimateOpenAIToolTokens(tools []openai.Tool) int
- func EstimateOpenChatMessageTokens(msg *openchat.ChatCompletionMessage) int
- func EstimateOpenChatMessagesTokens(messages []*openchat.ChatCompletionMessage) int
- func EstimateOpenChatToolTokens(tools []*openchat.Tool) int
- func EstimateTokens(text string) int
- func ExtractTextFromURL(url string, config *ExtractorConfig) ([]string, error)
- func ExtractThinkTags(content string) (thinking, cleaned string)
- func FetchProcess(urls []string) []string
- func FilterToolArguments(argsMap map[string]interface{}, ignoreKeys []string) map[string]interface{}
- func FindConvosByIndex(idx string) (string, error)
- func FormatMinutesSeconds(d time.Duration) string
- func GenerateTempFileName() string
- func GetAllEmbeddingTools() []string
- func GetAllSearchTools() []string
- func GetAnthropicMessageKey(msg anthropic.MessageParam) string
- func GetConvoDir() string
- func GetCurrentTokenCount(messages []openai.ChatCompletionMessage) int
- func GetCurrentTokenCountGemini(messages []*genai.Content) int
- func GetCurrentTokenCountOpenChat(messages []*model.ChatCompletionMessage) int
- func GetDefaultSearchEngineName() string
- func GetFileContent(filePath string) (string, error)
- func GetFilePath(dir string, filename string) string
- func GetGeminiMessageKey(msg *genai.Content) string
- func GetLogger() *log.Logger
- func GetMIMEType(filePath string) string
- func GetMIMETypeByContent(data []byte) string
- func GetNoneSearchEngineName() string
- func GetOpenAIMessageKey(msg openai.ChatCompletionMessage) string
- func GetOpenChatMessageKey(msg *model.ChatCompletionMessage) string
- func GetSanitizeTitle(title string) string
- func GetStringValue(data map[string]interface{}, key string) string
- func GetTerminalWidth() int
- func GetUserConfigDir() string
- func HasContent(s *string) bool
- func Infof(format string, args ...interface{})
- func InitLogger()
- func IsAudioMIMEType(mimeType string) bool
- func IsCodeExecutionEnabled() bool
- func IsExcelMIMEType(mimeType string) bool
- func IsImageMIMEType(mimeType string) bool
- func IsModelGemini3(modelName string) bool
- func IsPDFMIMEType(mimeType string) bool
- func IsStdinPipe(source string) bool
- func IsSwitchAgentError(err error) bool
- func IsTextMIMEType(mimeType string) bool
- func IsUnknownMIMEType(mimeType string) bool
- func IsValidEmbeddingTool(toolName string) bool
- func IsVideoMIMEType(mimeType string) bool
- func MakeUserSubDir(subparts ...string) string
- func NeedUserConfirm(info string, prompt string) (bool, error)
- func NewLogger() *log.Logger
- func Ptr[T any](t T) *T
- func RenderAnthropicConversationLog(data []byte) string
- func RenderGeminiConversationLog(data []byte) string
- func RenderOpenAIConversationLog(data []byte) string
- func RunWorkflow(config *WorkflowConfig, prompt string) error
- func Successf(format string, args ...interface{})
- func TerminalSupportsTrueColor() bool
- func TruncateString(s string, maxLen int) string
- func Warnf(format string, args ...interface{})
- func Warnln(args ...interface{})
- type Agent
- func (ag *Agent) CompleteReasoning()
- func (ag *Agent) Error(text string)
- func (ag *Agent) Gemini2CopyToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2CreateDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2DeleteDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2DeleteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2EditFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2ListDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2ListMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2MCPToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2MoveToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2ReadFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2ReadMultipleFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2SaveMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2SearchFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2SearchTextInFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2ShellToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2SwitchAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2WebFetchToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) Gemini2WriteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
- func (ag *Agent) GenerateAnthropicStream() error
- func (ag *Agent) GenerateGemini2Stream() error
- func (ag *Agent) GenerateOpenAIStream() error
- func (ag *Agent) GenerateOpenChatStream() error
- func (ag *Agent) SortAnthropicMessagesByOrder() error
- func (ag *Agent) SortOpenAIMessagesByOrder() error
- func (ag *Agent) SortOpenChatMessagesByOrder() error
- func (ag *Agent) StartIndicator(text string)
- func (ag *Agent) StartReasoning()
- func (ag *Agent) StopIndicator()
- func (ag *Agent) Warn(text string)
- func (ag *Agent) WriteDiffConfirm(text string)
- func (ag *Agent) WriteEnd()
- func (ag *Agent) WriteFunctionCall(text string)
- func (ag *Agent) WriteMarkdown()
- func (ag *Agent) WriteReasoning(text string)
- func (ag *Agent) WriteText(text string)
- func (ag *Agent) WriteUsage()
- type AgentOptions
- type Anthropic
- type AnthropicConversation
- func (c *AnthropicConversation) Clear() error
- func (c *AnthropicConversation) GetMessages() interface{}
- func (c *AnthropicConversation) Load() error
- func (c *AnthropicConversation) Push(messages ...interface{})
- func (c *AnthropicConversation) Save() error
- func (c *AnthropicConversation) SetMessages(messages interface{})
- type AtRefProcessor
- func (p *AtRefProcessor) AddExcludePattern(pattern string)
- func (p *AtRefProcessor) ParseAtReferences(text string) []AtReference
- func (p *AtRefProcessor) ProcessReferences(text string, references []AtReference) (string, error)
- func (p *AtRefProcessor) ProcessText(text string) (string, error)
- func (p *AtRefProcessor) SetMaxDirItems(count int)
- func (p *AtRefProcessor) SetMaxFileSize(size int64)
- type AtReference
- type BaseConversation
- func (c *BaseConversation) Clear() error
- func (c *BaseConversation) GetMessages() interface{}
- func (c *BaseConversation) GetPath() string
- func (c *BaseConversation) Load() error
- func (c *BaseConversation) Open(title string) error
- func (c *BaseConversation) Push(messages ...interface{})
- func (c *BaseConversation) Save() error
- func (c *BaseConversation) SetMessages(messages interface{})
- func (c *BaseConversation) SetPath(title string)
- type ContextManager
- func (cm *ContextManager) PrepareAnthropicMessages(messages []anthropic.MessageParam, systemPrompt string, ...) ([]anthropic.MessageParam, bool)
- func (cm *ContextManager) PrepareGeminiMessages(messages []*genai.Content, systemPrompt string, tools []*genai.Tool) ([]*genai.Content, bool)
- func (cm *ContextManager) PrepareOpenAIMessages(messages []openai.ChatCompletionMessage, tools []openai.Tool) ([]openai.ChatCompletionMessage, bool)
- func (cm *ContextManager) PrepareOpenChatMessages(messages []*model.ChatCompletionMessage, tools []*model.Tool) ([]*model.ChatCompletionMessage, bool)
- type ConversationManager
- type ConvoMeta
- type ExtractorConfig
- type FileData
- type FileRenderer
- type Gemini2Agent
- type Gemini2Conversation
- func (g *Gemini2Conversation) Clear() error
- func (g *Gemini2Conversation) GetMessages() interface{}
- func (g *Gemini2Conversation) Load() error
- func (g *Gemini2Conversation) Push(messages ...interface{})
- func (g *Gemini2Conversation) Save() error
- func (g *Gemini2Conversation) SetMessages(messages interface{})
- type Indicator
- type MCPClient
- func (mc *MCPClient) AddHttpServer(name string, url string, headers map[string]string) error
- func (mc *MCPClient) AddSseServer(name string, url string, headers map[string]string) error
- func (mc *MCPClient) AddStdServer(name string, cmd string, env map[string]string, cwd string, args ...string) error
- func (mc *MCPClient) CallTool(toolName string, args map[string]any) (*MCPToolResponse, error)
- func (mc *MCPClient) Close()
- func (mc *MCPClient) FindTool(toolName string) *MCPSession
- func (mc *MCPClient) GetAllServers() []*MCPServer
- func (mc *MCPClient) GetPrompts(session *MCPSession) (*[]MCPPrompt, error)
- func (mc *MCPClient) GetResources(session *MCPSession) (*[]MCPResource, error)
- func (mc *MCPClient) GetTools(session *MCPSession) (*[]MCPTool, error)
- func (mc *MCPClient) Init(servers map[string]*data.MCPServer, option MCPLoadOption) error
- type MCPLoadOption
- type MCPPrompt
- type MCPResource
- type MCPServer
- type MCPSession
- type MCPTool
- type MCPToolResponse
- type MCPToolResponseType
- type Markdown
- type ModelInfo
- type ModelLimits
- type OpenAI
- type OpenAIConversation
- func (c *OpenAIConversation) Clear() error
- func (c *OpenAIConversation) GetMessages() interface{}
- func (c *OpenAIConversation) Load() error
- func (c *OpenAIConversation) Push(messages ...interface{})
- func (c *OpenAIConversation) Save() error
- func (c *OpenAIConversation) SetMessages(messages interface{})
- type OpenChat
- type OpenChatConversation
- func (c *OpenChatConversation) Clear() error
- func (c *OpenChatConversation) GetMessages() interface{}
- func (c *OpenChatConversation) Load() error
- func (c *OpenChatConversation) Push(messages ...interface{})
- func (c *OpenChatConversation) Save() error
- func (c *OpenChatConversation) SetMessages(messages interface{})
- type OpenFunctionDefinition
- type OpenProcessor
- func (op *OpenProcessor) AnthropicCopyToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicCreateDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicDeleteDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicDeleteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicEditFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicListMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicMCPToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicMoveToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicReadFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicReadMultipleFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSaveMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSearchFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSearchTextInFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicShellToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicSwitchAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWebFetchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWebSearchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) AnthropicWriteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
- func (op *OpenProcessor) OpenAICopyToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAICreateDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIDeleteDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIDeleteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIEditFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIListMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIMCPToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIMoveToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIReadFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIReadMultipleFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISaveMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISearchFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISearchTextInFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIShellToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAISwitchAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWebFetchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWebSearchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenAIWriteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatCopyToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatCreateDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatDeleteDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatDeleteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatEditFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatListMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatMCPToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatMoveToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatReadFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatReadMultipleFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSaveMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSearchFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSearchTextInFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatShellToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatSwitchAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWebFetchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWebSearchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- func (op *OpenProcessor) OpenChatWriteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
- type OpenTool
- type Render
- type SearchEngine
- func (s *SearchEngine) BingSearch(query string) (map[string]any, error)
- func (s *SearchEngine) GoogleSearch(query string) (map[string]any, error)
- func (s *SearchEngine) NoneSearch(query string) (map[string]any, error)
- func (s *SearchEngine) RetrieveQueries(queries []string) string
- func (s *SearchEngine) RetrieveReferences(references []map[string]any) string
- func (s *SearchEngine) SerpAPISearch(query string, engine string) (map[string]any, error)
- func (s *SearchEngine) TavilySearch(query string) (map[string]any, error)
- type StatusStack
- func (s *StatusStack) ChangeTo(proc chan<- StreamNotify, notify StreamNotify, proceed <-chan bool)
- func (s *StatusStack) Clear()
- func (s *StatusStack) Debug()
- func (s *StatusStack) IsEmpty() bool
- func (s *StatusStack) IsTop(status StreamStatus) bool
- func (s *StatusStack) Peek() StreamStatus
- func (s *StatusStack) Pop() StreamStatus
- func (s *StatusStack) Push(status StreamStatus)
- func (s *StatusStack) Size() int
- type StdRenderer
- type StreamData
- type StreamDataType
- type StreamNotify
- type StreamStatus
- type SwitchAgentError
- type TavilyError
- type TavilyErrorDetail
- type TavilyResponse
- type TavilyResult
- type ThinkingLevel
- func (t ThinkingLevel) Display() string
- func (t ThinkingLevel) IsEnabled() bool
- func (t ThinkingLevel) String() string
- func (t ThinkingLevel) ToAnthropicParams() anthropic.ThinkingConfigParamUnion
- func (t ThinkingLevel) ToGeminiConfig(modelName string) *genai.ThinkingConfig
- func (t ThinkingLevel) ToOpenAIReasoningEffort() string
- func (t ThinkingLevel) ToOpenChatParams() (*model.Thinking, *model.ReasoningEffort)
- type TokenCache
- func (tc *TokenCache) Clear()
- func (tc *TokenCache) Get(key string) (int, bool)
- func (tc *TokenCache) GetOrComputeAnthropicTokens(msg anthropic.MessageParam) int
- func (tc *TokenCache) GetOrComputeGeminiTokens(msg *genai.Content) int
- func (tc *TokenCache) GetOrComputeOpenAITokens(msg openai.ChatCompletionMessage) int
- func (tc *TokenCache) GetOrComputeOpenChatTokens(msg *model.ChatCompletionMessage) int
- func (tc *TokenCache) Set(key string, count int)
- func (tc *TokenCache) Size() int
- func (tc *TokenCache) Stats() (hits, misses int64, size int)
- type TokenUsage
- type ToolType
- type ToolsUse
- type TruncationStrategy
- type UniversalMessage
- func ParseAnthropicMessages(messages []anthropic.MessageParam) []UniversalMessage
- func ParseGeminiMessages(messages []*gemini.Content) []UniversalMessage
- func ParseOpenAIMessages(messages []openai.ChatCompletionMessage) []UniversalMessage
- func ParseOpenChatMessages(messages []*model.ChatCompletionMessage) []UniversalMessage
- type UniversalRole
- type WorkflowAgent
- type WorkflowAgentType
- type WorkflowConfig
Constants ¶
const ( // ToolRespConfirmShell is the template for the response to the user before executing a command. ToolRespConfirmShell = "```\n%s\n```\n%s" // ToolRespShellOutput is the template for the response to the user after executing a command. ToolRespShellOutput = `shell executed: %s Status: %s %s` ToolUserConfirmPrompt = "Do you want to proceed?" // ToolRespConfirmEdityFile is the template for the response to the user before modifying a file, including the diff. ToolRespDiscardEditFile = "Based on your request, the OPERATION is CANCELLED: " + "Cancel edit file: %s\n" + "The user has explicitly declined to apply these file edits. The file will remain unchanged. Do not proceed with any file modifications or ask for further confirmation without explicit new user instruction." )
const ( // Model types ModelProviderGemini string = "gemini" // for google gemini models ModelProviderOpenAI string = "openai" ModelProviderOpenAICompatible string = "openai-compatible" ModelProviderAnthropic string = "anthropic" // for anthropic models (official sdk) ModelProviderUnknown string = "unknown" )
const ( TavilyUrl = "https://api.tavily.com/search" GoogleSearchEngine = "google" BingSearchEngine = "bing" TavilySearchEngine = "tavily" NoneSearchEngine = "none" )
const ( CharsPerTokenEnglish = 4.0 // Average for English text CharsPerTokenChinese = 2.5 // Tuned: 3 bytes/char / 2.5 = 1.2 tokens/char (balanced) CharsPerTokenJapanese = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char CharsPerTokenKorean = 2.0 // 3 bytes / 2.0 = 1.5 tokens/char CharsPerTokenCode = 3.5 // Tuned: Code is dense. 3.5 chars/token. CharsPerTokenJSON = 3.7 // JSON: Typically 3.5-4 characters per token. Tuned: 3.7 chars/token. CharsPerTokenDefault = 4.0 // Default fallback MessageOverheadTokens = 3 // Standard overhead per message (<|start|>role and <|end|>) ToolCallOverhead = 24 // Reduced from 100 to 24 (closer to reality for JSON overhead) // Media Token Costs (Heuristics) // 1MB = 1000 tokens TokenCostImageDefault = 1000 // Safe upper bound average for high-res images (OpenAI high detail is ~1105, low is 85) TokenCostImageGemini = 1000 // Fixed cost for Gemini images <= 384px (often tiled, but 258 is the base unit) // Video/Audio Heuristics (Tokens per MB - heavily estimated as we don't have duration) // Assumptions: // - Video: 2Mbps (.25MB/s). 1MB = 4s. Gemini Video: 263 tokens/s. 4s * 263 = 1052 tokens. // - Audio: 128kbps (16KB/s). 1MB = 64s. Gemini Audio: 32 tokens/s. 64s * 32 = 2048 tokens. TokenCostVideoPerMBGemini = 1000 TokenCostVideoPerMBOpenChat = 1000 // For base64 encoded video TokenCostAudioPerMBGemini = 2000 )
Token estimation constants These are refined based on modern tokenizer behavior (cl100k_base, qwen, etc.):
- English: ~4 chars/token (ASCII)
- Chinese: ~0.6-2.0 tokens/char (Qwen is efficient, OpenAI is 2.0). due to the different tokenization methods used by different models, the conversion ratios can vary We use 2.5 bytes/token => ~1.2 tokens/char as a balanced estimate.
- Japanese/Korean: ~1.5 tokens/char. 3 bytes/char / 2.0 => 1.5 tokens/char.
- Tool Calls: JSON structure overhead is small (~20 tokens), not 100.
const ( DefaultShellTimeout = 60 * time.Second MaxFileSize = 20 * 1024 * 1024 // 20MB )
Tool robustness constants
const ( WokflowConfirmPrompt = "\033[96mDo you want to proceed with this agent? (y/N):\033[0m " // use for wait for user confirm WokflowProceedPrompt = "\033[96mDoes that work for you? Proceed with next step? (y/N):\033[0m " // use for wait for proceed prompt WokflowModifyPrompt = "\033[96mPlease specify any changes you would like to make:\033[0m " // use for wait for user modify prompt )
const DefaultMaxCacheSize = 10000
DefaultMaxCacheSize is the default maximum number of entries in the cache
Variables ¶
var ( // RoleColors for message roles (initialized in init) RoleColors map[string]string // ContentTypeColors for special content (initialized in init) ContentTypeColors map[string]string )
var DefaultLimitsLegacy = ModelLimits{
ContextWindow: 32000,
MaxOutputTokens: 4096,
}
DefaultLimits is the fallback for unknown models
var DefaultLimitsModern = ModelLimits{
ContextWindow: 128000,
MaxOutputTokens: 8192,
}
var DefaultModelLimits = map[string]ModelLimits{}/* 126 elements not displayed */
DefaultModelLimits is the registry of known model limits. Context window values must be from official documentation or verified by tests
var ExecutorPath string
ExecutorPath is the path to the executable to run for filtering. Defaults to os.Executable(). Can be overridden for testing.
Functions ¶
func AvailableEmbeddingTool ¶ added in v1.9.2
func AvailableMCPTool ¶ added in v1.11.4
func AvailableSearchTool ¶ added in v1.9.12
func BuildAnthropicMessages ¶ added in v1.13.10
func BuildAnthropicMessages(messages []UniversalMessage) []anthropic.MessageParam
BuildAnthropicMessages converts universal messages to Anthropic format. Handles: System role is inlined into the first user message. Preserves: OfText, OfThinking blocks
func BuildGeminiMessages ¶ added in v1.13.10
func BuildGeminiMessages(messages []UniversalMessage) []*gemini.Content
BuildGeminiMessages converts universal messages to Gemini format. Handles: System role is inlined into the first user message. Preserves: Parts with Text, Thought Maps: "assistant" → "model"
func BuildOpenAIMessages ¶ added in v1.13.10
func BuildOpenAIMessages(messages []UniversalMessage) []openai.ChatCompletionMessage
BuildOpenAIMessages converts universal messages to OpenAI format. Preserves: system role, Content, ReasoningContent
func BuildOpenChatMessages ¶ added in v1.13.10
func BuildOpenChatMessages(messages []UniversalMessage) []*model.ChatCompletionMessage
BuildOpenChatMessages converts universal messages to OpenChat (Volcengine) format. Preserves: system role, Content, ReasoningContent
func CallAgent ¶ added in v1.9.4
func CallAgent(op *AgentOptions) error
func CheckIfImageFromPath ¶
checkIfImage attempts to decode a file as an image
func ClearTokenCache ¶ added in v1.12.14
func ClearTokenCache()
ClearTokenCache clears the global token cache (useful for testing)
func ConvertMessages ¶ added in v1.13.10
ConvertMessages parses source provider data and builds target provider messages. Returns the converted data encoded as JSON.
Supported source/target providers: - ModelProviderOpenAI - ModelProviderOpenAICompatible (OpenChat) - ModelProviderAnthropic - ModelProviderGemini
func DetectAnthropicKeyMessage ¶ added in v1.13.10
func DetectAnthropicKeyMessage(msg *anthropic.MessageParam) bool
Detects if a message is definitely an Anthropic message
func DetectGeminiKeyMessage ¶ added in v1.13.10
Detects if a message is definitely a Gemini message
func DetectMessageProvider ¶ added in v1.5.1
* Detects the conversation provider based on message format * OpenAICompatible: OpenAI messages that are pure text content * OpenAI: OpenAI messages that are unique to OpenAI * Anthropic: Anthropic messages that are unique to Anthropic * Gemini: Gemini messages that are unique to Gemini
func DetectModelProvider ¶ added in v1.6.0
DetectModelProvider detects the model provider based on endpoint and model name. It first checks the endpoint domain, then falls back to model name patterns. This dual detection handles Chinese models hosted on US platforms (AWS, CoreWeave, etc.)
func DetectOpenAIKeyMessage ¶ added in v1.13.10
func DetectOpenAIKeyMessage(msg *openai.ChatCompletionMessage) bool
Detects if a message is definitely an OpenAI message
func DisableCodeExecution ¶ added in v1.7.1
func DisableCodeExecution()
func EnableCodeExecution ¶ added in v1.7.1
func EnableCodeExecution()
func EndWithNewline ¶ added in v1.9.7
func EstimateAnthropicMessageTokens ¶ added in v1.13.5
func EstimateAnthropicMessageTokens(msg anthropic.MessageParam) int
EstimateAnthropicMessageTokens estimates tokens for an Anthropic message.
func EstimateAnthropicMessagesTokens ¶ added in v1.13.5
func EstimateAnthropicMessagesTokens(messages []anthropic.MessageParam) int
EstimateAnthropicMessagesTokens estimates total tokens for a slice of Anthropic messages.
func EstimateAnthropicToolTokens ¶ added in v1.13.5
func EstimateAnthropicToolTokens(tools []anthropic.ToolUnionParam) int
EstimateAnthropicToolTokens estimates tokens for a slice of Anthropic tools.
func EstimateGeminiMessageTokens ¶ added in v1.12.14
EstimateGeminiMessageTokens estimates tokens for a Gemini content message.
func EstimateGeminiMessagesTokens ¶ added in v1.12.14
EstimateGeminiMessagesTokens estimates total tokens for a slice of Gemini messages.
func EstimateGeminiToolTokens ¶ added in v1.12.14
EstimateGeminiToolTokens estimates tokens for a slice of Gemini tools
func EstimateJSONTokens ¶ added in v1.12.14
func EstimateJSONTokens(data interface{}) int
EstimateJSONTokens estimates tokens for arbitrary JSON data. Useful for estimating tool results or complex structured content.
func EstimateOpenAIMessageTokens ¶ added in v1.12.14
func EstimateOpenAIMessageTokens(msg openai.ChatCompletionMessage) int
EstimateOpenAIMessageTokens estimates tokens for an OpenAI chat message. This accounts for role tokens, content, and tool calls.
func EstimateOpenAIMessagesTokens ¶ added in v1.12.14
func EstimateOpenAIMessagesTokens(messages []openai.ChatCompletionMessage) int
EstimateOpenAIMessagesTokens estimates total tokens for a slice of OpenAI messages.
func EstimateOpenAIToolTokens ¶ added in v1.12.14
EstimateOpenAIToolTokens estimates tokens for a slice of OpenAI tools
func EstimateOpenChatMessageTokens ¶ added in v1.12.14
func EstimateOpenChatMessageTokens(msg *openchat.ChatCompletionMessage) int
EstimateOpenChatMessageTokens estimates tokens for an OpenChat (Volcengine) message.
func EstimateOpenChatMessagesTokens ¶ added in v1.12.14
func EstimateOpenChatMessagesTokens(messages []*openchat.ChatCompletionMessage) int
EstimateOpenChatMessagesTokens estimates total tokens for a slice of OpenChat messages.
func EstimateOpenChatToolTokens ¶ added in v1.12.14
EstimateOpenChatToolTokens estimates tokens for a slice of OpenChat tools
func EstimateTokens ¶ added in v1.12.14
EstimateTokens provides fast character-based estimation for text. This is approximately 90% accurate compared to tiktoken.
func ExtractTextFromURL ¶ added in v1.6.14
func ExtractTextFromURL(url string, config *ExtractorConfig) ([]string, error)
ExtractTextFromURL fetches a URL and extracts the main text content Automatically detects content type and routes to appropriate handler: - text/plain, text/markdown: returns content directly - application/pdf: extracts text using PDF reader - text/html: parses and extracts text with boilerplate removal
func ExtractThinkTags ¶ added in v1.12.11
ExtractThinkTags extracts thinking content from <think>...</think> tags. Some providers (like MiniMax, some Qwen endpoints) embed reasoning content in <think> tags within the regular content field instead of using a separate reasoning_content field.
Returns:
- thinking: the extracted thinking content (empty if no tags found)
- cleaned: the content with <think> tags removed
func FetchProcess ¶ added in v1.6.14
func FilterToolArguments ¶ added in v1.12.19
func FindConvosByIndex ¶ added in v1.10.6
func FormatMinutesSeconds ¶ added in v1.10.1
func GenerateTempFileName ¶ added in v1.10.9
func GenerateTempFileName() string
func GetAllEmbeddingTools ¶ added in v1.9.2
func GetAllEmbeddingTools() []string
func GetAllSearchTools ¶ added in v1.9.12
func GetAllSearchTools() []string
func GetAnthropicMessageKey ¶ added in v1.13.5
func GetAnthropicMessageKey(msg anthropic.MessageParam) string
GetAnthropicMessageKey generates a cache key for an Anthropic message.
func GetConvoDir ¶ added in v1.6.10
func GetConvoDir() string
func GetCurrentTokenCount ¶ added in v1.12.14
func GetCurrentTokenCount(messages []openai.ChatCompletionMessage) int
GetCurrentTokenCount returns the current token count for OpenAI messages
func GetCurrentTokenCountGemini ¶ added in v1.12.14
GetCurrentTokenCountGemini returns the current token count for Gemini messages
func GetCurrentTokenCountOpenChat ¶ added in v1.12.14
func GetCurrentTokenCountOpenChat(messages []*model.ChatCompletionMessage) int
GetCurrentTokenCountOpenChat returns the current token count for OpenChat messages
func GetDefaultSearchEngineName ¶ added in v1.6.0
func GetDefaultSearchEngineName() string
func GetFileContent ¶ added in v1.10.0
func GetFilePath ¶ added in v1.5.1
func GetGeminiMessageKey ¶ added in v1.12.14
GetGeminiMessageKey generates a cache key for a Gemini message.
func GetMIMEType ¶ added in v1.4.0
func GetMIMETypeByContent ¶ added in v1.4.0
func GetNoneSearchEngineName ¶ added in v1.6.2
func GetNoneSearchEngineName() string
func GetOpenAIMessageKey ¶ added in v1.12.14
func GetOpenAIMessageKey(msg openai.ChatCompletionMessage) string
GetOpenAIMessageKey generates a cache key for an OpenAI message by JSON marshaling. This captures ALL fields (Content, ReasoningContent, ToolCalls, MultiContent, etc.) ensuring different messages never produce the same key.
func GetOpenChatMessageKey ¶ added in v1.12.14
func GetOpenChatMessageKey(msg *model.ChatCompletionMessage) string
GetOpenChatMessageKey generates a cache key for an OpenChat (Volcengine) message.
func GetSanitizeTitle ¶ added in v1.5.1
func GetStringValue ¶ added in v1.6.2
Helper function to safely extract string values
func GetTerminalWidth ¶ added in v1.12.1
func GetTerminalWidth() int
GetTerminalWidth returns the width of the terminal using a robust fallback chain: 1. Direct TTY query via golang.org/x/term (most reliable) 2. Tmux pane width (if inside tmux) 3. COLUMNS environment variable 4. tput cols command 5. Default fallback of 80
func GetUserConfigDir ¶ added in v1.5.1
func GetUserConfigDir() string
func HasContent ¶ added in v1.8.1
func InitLogger ¶ added in v1.2.0
func InitLogger()
func IsAudioMIMEType ¶ added in v1.7.1
func IsCodeExecutionEnabled ¶ added in v1.7.1
func IsCodeExecutionEnabled() bool
func IsExcelMIMEType ¶ added in v1.4.0
func IsImageMIMEType ¶ added in v1.4.0
func IsModelGemini3 ¶ added in v1.13.5
IsModelGemini3 checks if the model name is a Gemini 3 model
func IsPDFMIMEType ¶ added in v1.4.0
func IsStdinPipe ¶ added in v1.4.0
func IsSwitchAgentError ¶ added in v1.13.10
func IsTextMIMEType ¶ added in v1.4.0
func IsUnknownMIMEType ¶ added in v1.4.0
func IsValidEmbeddingTool ¶ added in v1.13.1
IsValidEmbeddingTool checks if a tool name is a valid embedding tool
func IsVideoMIMEType ¶ added in v1.13.1
func MakeUserSubDir ¶ added in v1.5.1
func NeedUserConfirm ¶ added in v1.11.10
NeedUserConfirm prompts the user for confirmation using charmbracelet/huh.
func RenderAnthropicConversationLog ¶ added in v1.13.6
RenderAnthropicConversationLog returns a string summary of Anthropic conversation
func RenderGeminiConversationLog ¶ added in v1.13.6
RenderGeminiConversationLog returns a string summary of Gemini conversation
func RenderOpenAIConversationLog ¶ added in v1.13.6
RenderOpenAIConversationLog returns a string summary of OpenAI conversation
func RunWorkflow ¶ added in v1.10.0
func RunWorkflow(config *WorkflowConfig, prompt string) error
RunWorkflow executes a workflow using the provided WorkflowConfig and initial prompt. It processes a series of agents in order, handling agent roles, interactive mode, and prompt modifications for the first agent as needed. Returns an error if the workflow encounters issues such as missing agent roles or agent execution errors.
RunWorkflow executes the defined workflow.
func TerminalSupportsTrueColor ¶ added in v1.12.19
func TerminalSupportsTrueColor() bool
TerminalSupportsTrueColor detects if the terminal supports true color (24-bit) Returns true if COLORTERM is set to "truecolor" or "24bit"
func TruncateString ¶ added in v1.5.1
Helper function to truncate strings with ellipsis
Types ¶
type Agent ¶ added in v1.9.4
type Agent struct {
Model *ModelInfo
SystemPrompt string
UserPrompt string
Files []*FileData // Attachment files
NotifyChan chan<- StreamNotify // Sub Channel to send notifications
DataChan chan<- StreamData // Sub Channel to receive streamed text data
ProceedChan <-chan bool // Sub Channel to receive proceed signal
SearchEngine *SearchEngine // Search engine name
ToolsUse ToolsUse // Use tools
EnabledTools []string // List of enabled embedding tools
UseCodeTool bool // Use code tool
ThinkingLevel ThinkingLevel // Thinking level: off, low, medium, high
MCPClient *MCPClient // MCP client for MCP tools
MaxRecursions int // Maximum number of recursions for model calls
Markdown *Markdown // Markdown renderer
TokenUsage *TokenUsage // Token usage metainfo
Std *StdRenderer // Standard renderer
OutputFile *FileRenderer // File renderer
Status StatusStack // Stack to manage streaming status
Convo ConversationManager // Conversation manager
Indicator *Indicator // Indicator Spinner
LastWrittenData string // Last written data
}
func (*Agent) CompleteReasoning ¶ added in v1.9.7
func (ag *Agent) CompleteReasoning()
func (*Agent) Gemini2CopyToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2CopyToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2CreateDirectoryToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2CreateDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2DeleteDirectoryToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2DeleteDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2DeleteFileToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2DeleteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2EditFileToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2EditFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2ListDirectoryToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2ListDirectoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2ListMemoryToolCall ¶ added in v1.12.22
func (ag *Agent) Gemini2ListMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2MCPToolCall ¶ added in v1.11.4
func (ag *Agent) Gemini2MCPToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2MoveToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2MoveToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2ReadFileToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2ReadFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2ReadMultipleFilesToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2ReadMultipleFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2SaveMemoryToolCall ¶ added in v1.12.22
func (ag *Agent) Gemini2SaveMemoryToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2SearchFilesToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2SearchFilesToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2SearchTextInFileToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2SearchTextInFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2ShellToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2ShellToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2SwitchAgentToolCall ¶ added in v1.13.10
func (ag *Agent) Gemini2SwitchAgentToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2WebFetchToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2WebFetchToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) Gemini2WriteFileToolCall ¶ added in v1.10.4
func (ag *Agent) Gemini2WriteFileToolCall(call *genai.FunctionCall) (*genai.FunctionResponse, error)
func (*Agent) GenerateAnthropicStream ¶ added in v1.13.5
GenerateAnthropicStream generates a streaming response using Anthropic API
func (*Agent) GenerateGemini2Stream ¶ added in v1.9.4
func (*Agent) GenerateOpenAIStream ¶ added in v1.10.4
GenerateOpenAIStream generates a streaming response using OpenAI API
func (*Agent) GenerateOpenChatStream ¶ added in v1.9.4
In current openchat api, we can't use cached tokens The context api and response api are not available for current golang lib
func (*Agent) SortAnthropicMessagesByOrder ¶ added in v1.13.5
func (*Agent) SortOpenAIMessagesByOrder ¶ added in v1.12.20
* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt
func (*Agent) SortOpenChatMessagesByOrder ¶ added in v1.12.20
* Sort the messages by order * 1. System Prompt -- always at the top * 2. History Prompts * - User Prompt * - Assistant Prompt
func (*Agent) StartIndicator ¶ added in v1.9.7
func (*Agent) StartReasoning ¶ added in v1.9.7
func (ag *Agent) StartReasoning()
StartReasoning notifies the user and logs to file that the agent has started thinking. It writes a status message to both Std and OutputFile if they are available.
func (*Agent) StopIndicator ¶ added in v1.9.7
func (ag *Agent) StopIndicator()
func (*Agent) WriteDiffConfirm ¶ added in v1.11.10
func (*Agent) WriteFunctionCall ¶ added in v1.9.7
func (*Agent) WriteMarkdown ¶ added in v1.9.7
func (ag *Agent) WriteMarkdown()
func (*Agent) WriteReasoning ¶ added in v1.9.7
WriteReasoning writes the provided reasoning text to both the standard output and an output file, applying specific formatting to each if they are available.
func (*Agent) WriteText ¶ added in v1.9.7
WriteText writes the given text to the Agent's Std, Markdown, and OutputFile writers if they are set.
func (*Agent) WriteUsage ¶ added in v1.9.7
func (ag *Agent) WriteUsage()
type AgentOptions ¶ added in v1.9.7
type AgentOptions struct {
Prompt string
SysPrompt string
Files []*FileData
ModelInfo *data.Model
SearchEngine *data.SearchEngine
MaxRecursions int
ThinkingLevel string
EnabledTools []string // List of enabled embedding tools
UseMCP bool
YoloMode bool // Whether to automatically approve tools
AppendMarkdown bool
AppendUsage bool
OutputFile string
QuietMode bool
ConvoName string
MCPConfig map[string]*data.MCPServer
}
type Anthropic ¶ added in v1.13.5
type Anthropic struct {
// contains filtered or unexported fields
}
type AnthropicConversation ¶ added in v1.13.5
type AnthropicConversation struct {
BaseConversation
Messages []anthropic.MessageParam
}
AnthropicConversation represents a conversation using Anthropic format
func (*AnthropicConversation) Clear ¶ added in v1.13.5
func (c *AnthropicConversation) Clear() error
Clear removes all messages from the conversation
func (*AnthropicConversation) GetMessages ¶ added in v1.13.5
func (c *AnthropicConversation) GetMessages() interface{}
func (*AnthropicConversation) Load ¶ added in v1.13.5
func (c *AnthropicConversation) Load() error
Load retrieves the conversation from disk
func (*AnthropicConversation) Push ¶ added in v1.13.5
func (c *AnthropicConversation) Push(messages ...interface{})
PushMessages adds multiple messages to the conversation
func (*AnthropicConversation) Save ¶ added in v1.13.5
func (c *AnthropicConversation) Save() error
Save persists the conversation to disk
func (*AnthropicConversation) SetMessages ¶ added in v1.13.5
func (c *AnthropicConversation) SetMessages(messages interface{})
type AtRefProcessor ¶ added in v1.12.9
type AtRefProcessor struct {
// contains filtered or unexported fields
}
AtRefProcessor handles @ reference processing
func NewAtRefProcessor ¶ added in v1.12.9
func NewAtRefProcessor() *AtRefProcessor
NewAtRefProcessor creates a new @ reference processor
func (*AtRefProcessor) AddExcludePattern ¶ added in v1.12.9
func (p *AtRefProcessor) AddExcludePattern(pattern string)
AddExcludePattern adds a pattern to exclude from directory listings
func (*AtRefProcessor) ParseAtReferences ¶ added in v1.12.9
func (p *AtRefProcessor) ParseAtReferences(text string) []AtReference
ParseAtReferences finds all @ references in the given text
func (*AtRefProcessor) ProcessReferences ¶ added in v1.12.9
func (p *AtRefProcessor) ProcessReferences(text string, references []AtReference) (string, error)
ProcessReferences processes all @ references and returns augmented text
func (*AtRefProcessor) ProcessText ¶ added in v1.12.9
func (p *AtRefProcessor) ProcessText(text string) (string, error)
ProcessText processes text containing @ references and returns augmented text
func (*AtRefProcessor) SetMaxDirItems ¶ added in v1.12.9
func (p *AtRefProcessor) SetMaxDirItems(count int)
SetMaxDirItems sets the maximum number of directory items to list
func (*AtRefProcessor) SetMaxFileSize ¶ added in v1.12.9
func (p *AtRefProcessor) SetMaxFileSize(size int64)
SetMaxFileSize sets the maximum file size to include
type AtReference ¶ added in v1.12.9
type AtReference struct {
Original string // Original @ reference text (e.g., "@main.go")
Path string // Resolved file/directory path
}
AtReference represents a single @ reference found in text
type BaseConversation ¶ added in v1.6.0
BaseConversation holds common fields and methods for all conversation types
func (*BaseConversation) Clear ¶ added in v1.6.0
func (c *BaseConversation) Clear() error
func (*BaseConversation) GetMessages ¶ added in v1.10.6
func (c *BaseConversation) GetMessages() interface{}
func (*BaseConversation) GetPath ¶ added in v1.6.2
func (c *BaseConversation) GetPath() string
func (*BaseConversation) Load ¶ added in v1.10.6
func (c *BaseConversation) Load() error
func (*BaseConversation) Open ¶ added in v1.10.6
func (c *BaseConversation) Open(title string) error
Open initializes an OpenChatConversation with the provided title, resolving an index to the actual conversation name if necessary. It resets the messages, sanitizes the conversation name for the path, and sets the internal path accordingly. Returns an error if the title cannot be resolved.
func (*BaseConversation) Push ¶ added in v1.10.6
func (c *BaseConversation) Push(messages ...interface{})
func (*BaseConversation) Save ¶ added in v1.10.6
func (c *BaseConversation) Save() error
func (*BaseConversation) SetMessages ¶ added in v1.10.6
func (c *BaseConversation) SetMessages(messages interface{})
func (*BaseConversation) SetPath ¶ added in v1.6.0
func (c *BaseConversation) SetPath(title string)
SetPath sets the file path for saving the conversation
type ContextManager ¶ added in v1.12.14
type ContextManager struct {
MaxInputTokens int // Maximum input tokens allowed
MaxOutputTokens int // Maximum output tokens allowed (new field for Anthropic)
Strategy TruncationStrategy // Strategy for handling overflow
BufferPercent float64 // Safety buffer (0.0-1.0)
}
ContextManager handles context window limits for LLM conversations
func NewContextManager ¶ added in v1.12.14
func NewContextManager(limits ModelLimits, strategy TruncationStrategy) *ContextManager
NewContextManager creates a context manager with the given model limits
func NewContextManagerForModel ¶ added in v1.12.14
func NewContextManagerForModel(modelName string, strategy TruncationStrategy) *ContextManager
NewContextManagerForModel creates a context manager by looking up the model name
func (*ContextManager) PrepareAnthropicMessages ¶ added in v1.13.5
func (cm *ContextManager) PrepareAnthropicMessages(messages []anthropic.MessageParam, systemPrompt string, tools []anthropic.ToolUnionParam) ([]anthropic.MessageParam, bool)
PrepareAnthropicMessages processes messages to fit within context window limits.
func (*ContextManager) PrepareGeminiMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareGeminiMessages(messages []*genai.Content, systemPrompt string, tools []*genai.Tool) ([]*genai.Content, bool)
PrepareGeminiMessages processes messages to fit within context window limits.
func (*ContextManager) PrepareOpenAIMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareOpenAIMessages(messages []openai.ChatCompletionMessage, tools []openai.Tool) ([]openai.ChatCompletionMessage, bool)
PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred. PrepareOpenAIMessages processes messages to fit within context window limits. Returns the processed messages and a boolean indicating if truncation occurred.
func (*ContextManager) PrepareOpenChatMessages ¶ added in v1.12.14
func (cm *ContextManager) PrepareOpenChatMessages(messages []*model.ChatCompletionMessage, tools []*model.Tool) ([]*model.ChatCompletionMessage, bool)
PrepareOpenChatMessages processes messages to fit within context window limits for OpenChat format.
type ConversationManager ¶ added in v1.6.0
type ConversationManager interface {
SetPath(title string)
GetPath() string
Load() error
Save() error
Open(title string) error
Clear() error
Push(messages ...interface{})
GetMessages() interface{}
SetMessages(messages interface{})
}
ConversationManager is an interface for handling conversation history
func ConstructConversationManager ¶ added in v1.10.6
func ConstructConversationManager(convoName string, provider string) (ConversationManager, error)
type ConvoMeta ¶ added in v1.6.10
func ListSortedConvos ¶ added in v1.6.10
listSortedConvos returns a slice of convoMeta sorted by modTime descending
type ExtractorConfig ¶ added in v1.6.14
type ExtractorConfig struct {
UserAgent string
HeaderAccept string
Timeout time.Duration
MinTextLength int
BoilerplateIDs []string
BoilerplateClasses []string
}
Configuration options for the text extractor
type FileData ¶ added in v1.4.0
type FileData struct {
// contains filtered or unexported fields
}
func NewFileData ¶ added in v1.4.0
type FileRenderer ¶ added in v1.9.7
type FileRenderer struct {
// contains filtered or unexported fields
}
FileRenderer is a renderer that writes output to a file
func NewFileRenderer ¶ added in v1.9.7
func NewFileRenderer(filename string) (*FileRenderer, error)
NewFileRenderer creates a new instance of FileRenderer
func (*FileRenderer) Close ¶ added in v1.9.7
func (fr *FileRenderer) Close() error
Close closes the file renderer and its underlying file
func (*FileRenderer) GetFilename ¶ added in v1.9.7
func (fr *FileRenderer) GetFilename() string
GetFilename returns the name of the file being written to
func (*FileRenderer) Write ¶ added in v1.9.7
func (fr *FileRenderer) Write(args ...interface{})
func (*FileRenderer) Writef ¶ added in v1.9.7
func (fr *FileRenderer) Writef(format string, args ...interface{})
Writef writes formatted output to the file
func (*FileRenderer) Writeln ¶ added in v1.9.7
func (fr *FileRenderer) Writeln(args ...interface{})
type Gemini2Agent ¶ added in v1.9.10
type Gemini2Agent struct {
// contains filtered or unexported fields
}
type Gemini2Conversation ¶ added in v1.7.1
type Gemini2Conversation struct {
BaseConversation
Messages []*genai.Content
}
- Google Gemini2.0 Conversation
GeminiConversation manages conversations for Google's Gemini model
func (*Gemini2Conversation) Clear ¶ added in v1.10.6
func (g *Gemini2Conversation) Clear() error
Clear removes all messages from the conversation
func (*Gemini2Conversation) GetMessages ¶ added in v1.10.6
func (g *Gemini2Conversation) GetMessages() interface{}
func (*Gemini2Conversation) Load ¶ added in v1.7.1
func (g *Gemini2Conversation) Load() error
Load retrieves the Gemini conversation from disk
func (*Gemini2Conversation) Push ¶ added in v1.10.6
func (g *Gemini2Conversation) Push(messages ...interface{})
Open initializes a Gemini2Conversation with the provided title PushContents adds multiple content items to the history
func (*Gemini2Conversation) Save ¶ added in v1.7.1
func (g *Gemini2Conversation) Save() error
Save persists the Gemini conversation to disk
func (*Gemini2Conversation) SetMessages ¶ added in v1.10.6
func (g *Gemini2Conversation) SetMessages(messages interface{})
type Indicator ¶ added in v1.9.7
type Indicator struct {
// contains filtered or unexported fields
}
func NewIndicator ¶ added in v1.9.7
type MCPClient ¶ added in v1.11.4
type MCPClient struct {
// contains filtered or unexported fields
}
func GetMCPClient ¶ added in v1.11.4
func GetMCPClient() *MCPClient
func (*MCPClient) AddHttpServer ¶ added in v1.11.4
func (*MCPClient) AddSseServer ¶ added in v1.11.4
func (*MCPClient) AddStdServer ¶ added in v1.11.4
func (*MCPClient) FindTool ¶ added in v1.11.4
func (mc *MCPClient) FindTool(toolName string) *MCPSession
func (*MCPClient) GetAllServers ¶ added in v1.11.4
Returns a map grouping tools by MCP server session name, with each session containing a slice of its available tools.
func (*MCPClient) GetPrompts ¶ added in v1.11.8
func (mc *MCPClient) GetPrompts(session *MCPSession) (*[]MCPPrompt, error)
func (*MCPClient) GetResources ¶ added in v1.11.8
func (mc *MCPClient) GetResources(session *MCPSession) (*[]MCPResource, error)
func (*MCPClient) GetTools ¶ added in v1.11.4
func (mc *MCPClient) GetTools(session *MCPSession) (*[]MCPTool, error)
type MCPLoadOption ¶ added in v1.11.8
type MCPResource ¶ added in v1.11.8
type MCPServer ¶ added in v1.11.4
type MCPServer struct {
Name string
Allowed bool
Tools *[]MCPTool
Resources *[]MCPResource
Prompts *[]MCPPrompt
}
type MCPSession ¶ added in v1.11.4
type MCPSession struct {
// contains filtered or unexported fields
}
type MCPToolResponse ¶ added in v1.11.6
type MCPToolResponse struct {
Types []MCPToolResponseType
Contents []string
}
type MCPToolResponseType ¶ added in v1.11.6
type MCPToolResponseType string
const ( MCPResponseText MCPToolResponseType = "text" MCPResponseImage MCPToolResponseType = "image" MCPResponseAudio MCPToolResponseType = "audio" )
type Markdown ¶ added in v1.9.7
type Markdown struct {
// contains filtered or unexported fields
}
func NewMarkdown ¶ added in v1.9.7
func NewMarkdown() *Markdown
NewMarkdown creates a new instance of Markdown
type ModelLimits ¶ added in v1.12.14
type ModelLimits struct {
ContextWindow int // Total context window in tokens
MaxOutputTokens int // Maximum output tokens allowed
}
ModelLimits contains context window configuration for a model
func GetModelLimits ¶ added in v1.12.14
func GetModelLimits(modelName string) ModelLimits
GetModelLimits retrieves the limits for a given model name. It performs exact match first, then pattern matching, then returns defaults.
func (ModelLimits) MaxInputTokens ¶ added in v1.12.14
func (ml ModelLimits) MaxInputTokens(bufferPercent float64) int
MaxInputTokens calculates the maximum input tokens with a safety buffer. The buffer ensures there's always room for the model's response.
type OpenAI ¶ added in v1.10.4
type OpenAI struct {
// contains filtered or unexported fields
}
OpenAI manages the state of an ongoing conversation with an AI assistant
type OpenAIConversation ¶ added in v1.10.4
type OpenAIConversation struct {
BaseConversation
Messages []openai.ChatCompletionMessage
}
OpenAIConversation represents a conversation using OpenAI format
func (*OpenAIConversation) Clear ¶ added in v1.10.4
func (c *OpenAIConversation) Clear() error
Clear removes all messages from the conversation
func (*OpenAIConversation) GetMessages ¶ added in v1.10.6
func (c *OpenAIConversation) GetMessages() interface{}
func (*OpenAIConversation) Load ¶ added in v1.10.4
func (c *OpenAIConversation) Load() error
Load retrieves the conversation from disk
func (*OpenAIConversation) Push ¶ added in v1.10.6
func (c *OpenAIConversation) Push(messages ...interface{})
PushMessages adds multiple messages to the conversation
func (*OpenAIConversation) Save ¶ added in v1.10.4
func (c *OpenAIConversation) Save() error
Save persists the conversation to disk
func (*OpenAIConversation) SetMessages ¶ added in v1.10.6
func (c *OpenAIConversation) SetMessages(messages interface{})
type OpenChat ¶ added in v1.5.1
type OpenChat struct {
// contains filtered or unexported fields
}
Conversation manages the state of an ongoing conversation with an AI assistant
type OpenChatConversation ¶ added in v1.6.0
type OpenChatConversation struct {
BaseConversation
Messages []*model.ChatCompletionMessage
}
OpenChatConversation manages conversations for Volcengine model
func (*OpenChatConversation) Clear ¶ added in v1.10.6
func (c *OpenChatConversation) Clear() error
Clear removes all messages from the conversation
func (*OpenChatConversation) GetMessages ¶ added in v1.10.6
func (c *OpenChatConversation) GetMessages() interface{}
func (*OpenChatConversation) Load ¶ added in v1.6.0
func (c *OpenChatConversation) Load() error
Load retrieves the conversation from disk
func (*OpenChatConversation) Push ¶ added in v1.10.6
func (c *OpenChatConversation) Push(messages ...interface{})
PushMessages adds multiple messages to the conversation
func (*OpenChatConversation) Save ¶ added in v1.6.0
func (c *OpenChatConversation) Save() error
Save persists the conversation to disk
func (*OpenChatConversation) SetMessages ¶ added in v1.10.6
func (c *OpenChatConversation) SetMessages(messages interface{})
type OpenFunctionDefinition ¶ added in v1.10.4
type OpenFunctionDefinition struct {
Name string
Description string
Parameters map[string]interface{}
}
OpenFunctionDefinition is a generic function definition that is not tied to any specific model.
type OpenProcessor ¶ added in v1.10.4
type OpenProcessor struct {
// contains filtered or unexported fields
}
OpenProcessor is the main processor for OpenAI-like models For tools implementation - It manages the context, notifications, data streaming, and tool usage - It handles queries and references, and maintains the status stack
func (*OpenProcessor) AnthropicCopyToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicCopyToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicCreateDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicCreateDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicDeleteDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicDeleteDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicDeleteFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicDeleteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicEditFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicEditFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListDirectoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicListDirectoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicListMemoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicListMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicMCPToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicMCPToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicMoveToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicMoveToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicReadFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicReadFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicReadMultipleFilesToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicReadMultipleFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSaveMemoryToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSaveMemoryToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSearchFilesToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSearchFilesToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicSearchTextInFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicSearchTextInFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicShellToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicShellToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
Anthropic tool implementations (wrapper functions)
func (*OpenProcessor) AnthropicSwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) AnthropicSwitchAgentToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWebFetchToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWebFetchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWebSearchToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWebSearchToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) AnthropicWriteFileToolCall ¶ added in v1.13.5
func (op *OpenProcessor) AnthropicWriteFileToolCall(toolCall anthropic.ToolUseBlockParam, argsMap *map[string]interface{}) (anthropic.MessageParam, error)
func (*OpenProcessor) OpenAICopyToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAICopyToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAICreateDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAICreateDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIDeleteDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIDeleteDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIDeleteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIDeleteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIEditFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIEditFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIListDirectoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIListMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenAIListMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIMCPToolCall ¶ added in v1.11.4
func (op *OpenProcessor) OpenAIMCPToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIMoveToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIMoveToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIReadFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIReadFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIReadMultipleFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIReadMultipleFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISaveMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenAISaveMemoryToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISearchFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAISearchFilesToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAISearchTextInFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAISearchTextInFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIShellToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIShellToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
OpenAI tool implementations (wrapper functions)
func (*OpenProcessor) OpenAISwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) OpenAISwitchAgentToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWebFetchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWebFetchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWebSearchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWebSearchToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenAIWriteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenAIWriteFileToolCall(toolCall openai.ToolCall, argsMap *map[string]interface{}) (openai.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatCopyToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatCopyToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatCreateDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatCreateDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatDeleteDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatDeleteDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatDeleteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatDeleteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatEditFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatEditFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListDirectoryToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatListDirectoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatListMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenChatListMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatMCPToolCall ¶ added in v1.11.4
func (op *OpenProcessor) OpenChatMCPToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatMoveToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatMoveToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatReadFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatReadFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
OpenChat tool implementations (wrapper functions)
func (*OpenProcessor) OpenChatReadMultipleFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatReadMultipleFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSaveMemoryToolCall ¶ added in v1.12.22
func (op *OpenProcessor) OpenChatSaveMemoryToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSearchFilesToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatSearchFilesToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSearchTextInFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatSearchTextInFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatShellToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatShellToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatSwitchAgentToolCall ¶ added in v1.13.10
func (op *OpenProcessor) OpenChatSwitchAgentToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWebFetchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWebFetchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWebSearchToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWebSearchToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
func (*OpenProcessor) OpenChatWriteFileToolCall ¶ added in v1.10.4
func (op *OpenProcessor) OpenChatWriteFileToolCall(toolCall *model.ToolCall, argsMap *map[string]interface{}) (*model.ChatCompletionMessage, error)
type OpenTool ¶ added in v1.10.4
type OpenTool struct {
Type ToolType
Function *OpenFunctionDefinition
}
OpenTool is a generic tool definition that is not tied to any specific model.
func GetOpenEmbeddingToolsFiltered ¶ added in v1.13.1
GetOpenEmbeddingToolsFiltered returns embedding tools filtered by the allowed list. If allowedTools is nil or empty, returns all embedding tools. Unknown tool names are gracefully ignored.
func MCPToolsToOpenTool ¶ added in v1.11.4
MCPToolsToOpenTool converts an MCPTools struct to an OpenTool with proper JSON schema
func (*OpenTool) ToAnthropicTool ¶ added in v1.13.5
func (ot *OpenTool) ToAnthropicTool() anthropic.ToolUnionParam
ToAnthropicTool converts a GenericTool to an anthropic.ToolUnionParam
func (*OpenTool) ToGeminiFunctions ¶ added in v1.10.4
func (ot *OpenTool) ToGeminiFunctions() *genai.FunctionDeclaration
ToGeminiFunctions converts a GenericTool to a genai.Tool
func (*OpenTool) ToOpenAITool ¶ added in v1.10.4
ToOpenAITool converts a GenericTool to an openai.Tool
func (*OpenTool) ToOpenChatTool ¶ added in v1.10.4
ToOpenChatTool converts a GenericTool to a model.Tool
type Render ¶ added in v1.9.7
type Render interface {
Writeln(args ...interface{})
Writef(format string, args ...interface{})
Write(args ...interface{})
}
type SearchEngine ¶ added in v1.9.4
type SearchEngine struct {
UseSearch bool
Name string
ApiKey string
CxKey string
MaxReferences int
// DeepDive indicates how many links to fetch content from
// If 0, it defaults to a small number (e.g. 3) for efficiency.
DeepDive int
}
func (*SearchEngine) BingSearch ¶ added in v1.9.4
func (s *SearchEngine) BingSearch(query string) (map[string]any, error)
--- Simulation of Bing Search ---
func (*SearchEngine) GoogleSearch ¶ added in v1.9.4
func (s *SearchEngine) GoogleSearch(query string) (map[string]any, error)
Alternative approach with explicit conversions for protocol buffer compatibility
func (*SearchEngine) NoneSearch ¶ added in v1.9.4
func (s *SearchEngine) NoneSearch(query string) (map[string]any, error)
func (*SearchEngine) RetrieveQueries ¶ added in v1.9.4
func (s *SearchEngine) RetrieveQueries(queries []string) string
func (*SearchEngine) RetrieveReferences ¶ added in v1.9.4
func (s *SearchEngine) RetrieveReferences(references []map[string]any) string
func (*SearchEngine) SerpAPISearch ¶ added in v1.9.4
func (*SearchEngine) TavilySearch ¶ added in v1.9.4
func (s *SearchEngine) TavilySearch(query string) (map[string]any, error)
type StatusStack ¶ added in v1.9.2
type StatusStack struct {
// contains filtered or unexported fields
}
StateStack is a stack data structure for managing states.
func (*StatusStack) ChangeTo ¶ added in v1.9.2
func (s *StatusStack) ChangeTo( proc chan<- StreamNotify, notify StreamNotify, proceed <-chan bool)
func (*StatusStack) Clear ¶ added in v1.9.2
func (s *StatusStack) Clear()
func (*StatusStack) Debug ¶ added in v1.9.2
func (s *StatusStack) Debug()
func (*StatusStack) IsEmpty ¶ added in v1.9.2
func (s *StatusStack) IsEmpty() bool
func (*StatusStack) IsTop ¶ added in v1.9.2
func (s *StatusStack) IsTop(status StreamStatus) bool
func (*StatusStack) Peek ¶ added in v1.9.2
func (s *StatusStack) Peek() StreamStatus
Peek returns the state from the top of the stack without removing it. If the stack is empty, it returns StateNormal.
func (*StatusStack) Pop ¶ added in v1.9.2
func (s *StatusStack) Pop() StreamStatus
Pop removes and returns the state from the top of the stack. If the stack is empty, it returns StateNormal.
func (*StatusStack) Push ¶ added in v1.9.2
func (s *StatusStack) Push(status StreamStatus)
Push adds a state to the top of the stack.
func (*StatusStack) Size ¶ added in v1.9.2
func (s *StatusStack) Size() int
type StdRenderer ¶ added in v1.9.5
type StdRenderer struct {
}
func NewStdRenderer ¶ added in v1.9.5
func NewStdRenderer() *StdRenderer
func (*StdRenderer) Write ¶ added in v1.9.7
func (r *StdRenderer) Write(args ...interface{})
func (*StdRenderer) Writef ¶ added in v1.9.7
func (r *StdRenderer) Writef(format string, args ...interface{})
func (*StdRenderer) Writeln ¶ added in v1.9.7
func (r *StdRenderer) Writeln(args ...interface{})
type StreamData ¶ added in v1.9.2
type StreamData struct {
Text string
Type StreamDataType
}
type StreamDataType ¶ added in v1.9.2
type StreamDataType int
const ( DataTypeUnknown StreamDataType = iota DataTypeNormal // 1 DataTypeReasoning // 2 DataTypeFinished // 3 )
type StreamNotify ¶
type StreamNotify struct {
Status StreamStatus
Data string // For text content or error messages
Extra interface{} // For additional metadata (e.g., switch instruction)
}
type StreamStatus ¶
type StreamStatus int
const ( StatusUnknown StreamStatus = iota StatusProcessing // 1 StatusStarted // 2 StatusFinished // 3 StatusWarn // 4 StatusError // 5 StatusReasoning // 6 StatusReasoningOver // 7 StatusFunctionCalling // 8 StatusFunctionCallingOver // 9 StatusDiffConfirm // 10 StatusDiffConfirmOver // 11 StatusSwitchAgent // 12 )
type SwitchAgentError ¶ added in v1.13.10
func (*SwitchAgentError) Error ¶ added in v1.13.10
func (e *SwitchAgentError) Error() string
type TavilyError ¶ added in v1.2.0
type TavilyError struct {
Detail TavilyErrorDetail `json:"detail"`
}
type TavilyErrorDetail ¶ added in v1.2.0
type TavilyErrorDetail struct {
Error string `json:"error"`
}
type TavilyResponse ¶ added in v1.2.0
type TavilyResponse struct {
Query string `json:"query"`
Answer string `json:"answer"`
Images []string `json:"images"`
Results []TavilyResult `json:"results"`
ResponseTime float32 `json:"response_time"` // e.g., "1.67"
}
Define a struct for the overall Tavily API response.
type TavilyResult ¶ added in v1.2.0
type TavilyResult struct {
Title string `json:"title"`
URL string `json:"url"`
Content string `json:"content"`
Score float64 `json:"score"`
RawContent *string `json:"raw_content"`
}
Define a struct for each result in the Tavily API response.
type ThinkingLevel ¶ added in v1.13.7
type ThinkingLevel string
ThinkingLevel represents the unified thinking/reasoning level across providers. Maps to provider-specific configurations: - OpenAI: reasoning_effort ("low"/"medium"/"high") - OpenChat: model.Thinking + ReasoningEffort - Gemini 2.5: ThinkingBudget (token count, -1 for dynamic) - Gemini 3: ThinkingLevel ("LOW"/"MEDIUM"/"HIGH") - Anthropic: thinking.budget_tokens
const ( ThinkingLevelOff ThinkingLevel = "off" ThinkingLevelLow ThinkingLevel = "low" ThinkingLevelMedium ThinkingLevel = "medium" ThinkingLevelHigh ThinkingLevel = "high" )
func AllThinkingLevels ¶ added in v1.13.7
func AllThinkingLevels() []ThinkingLevel
AllThinkingLevels returns all valid thinking levels in order
func ParseThinkingLevel ¶ added in v1.13.7
func ParseThinkingLevel(s string) ThinkingLevel
ParseThinkingLevel normalizes user input to a valid ThinkingLevel. Supports backward compatibility with boolean values.
func (ThinkingLevel) Display ¶ added in v1.13.7
func (t ThinkingLevel) Display() string
Display returns a colorized display string for CLI output
func (ThinkingLevel) IsEnabled ¶ added in v1.13.7
func (t ThinkingLevel) IsEnabled() bool
IsEnabled returns true if thinking is enabled (not off)
func (ThinkingLevel) String ¶ added in v1.13.7
func (t ThinkingLevel) String() string
String returns the string representation
func (ThinkingLevel) ToAnthropicParams ¶ added in v1.13.7
func (t ThinkingLevel) ToAnthropicParams() anthropic.ThinkingConfigParamUnion
ToAnthropicParams returns the thinking budget tokens for Anthropic. Returns 0 for ThinkingLevelOff.
func (ThinkingLevel) ToGeminiConfig ¶ added in v1.13.7
func (t ThinkingLevel) ToGeminiConfig(modelName string) *genai.ThinkingConfig
ToGeminiConfig returns the Gemini ThinkingConfig based on model version. Gemini 3 uses ThinkingLevel, Gemini 2.5 uses ThinkingBudget.
func (ThinkingLevel) ToOpenAIReasoningEffort ¶ added in v1.13.7
func (t ThinkingLevel) ToOpenAIReasoningEffort() string
ToOpenAIReasoningEffort returns the OpenAI reasoning_effort parameter value. Returns empty string for ThinkingLevelOff (no param should be set).
func (ThinkingLevel) ToOpenChatParams ¶ added in v1.13.7
func (t ThinkingLevel) ToOpenChatParams() (*model.Thinking, *model.ReasoningEffort)
ToOpenChatParams returns the OpenChat model.Thinking and ReasoningEffort params.
type TokenCache ¶ added in v1.12.14
type TokenCache struct {
// contains filtered or unexported fields
}
TokenCache provides a thread-safe cache for storing token counts of LLM messages. It uses JSON-marshaled message content as keys to ensure correct uniqueness.
func GetGlobalTokenCache ¶ added in v1.12.14
func GetGlobalTokenCache() *TokenCache
GetGlobalTokenCache returns the global token cache instance
func NewTokenCache ¶ added in v1.12.14
func NewTokenCache(maxSize int) *TokenCache
NewTokenCache creates a new TokenCache with the specified maximum size
func (*TokenCache) Clear ¶ added in v1.12.14
func (tc *TokenCache) Clear()
Clear removes all entries from the cache
func (*TokenCache) Get ¶ added in v1.12.14
func (tc *TokenCache) Get(key string) (int, bool)
Get retrieves a cached token count for the given key. Returns the count and true if found, or 0 and false if not found.
func (*TokenCache) GetOrComputeAnthropicTokens ¶ added in v1.13.5
func (tc *TokenCache) GetOrComputeAnthropicTokens(msg anthropic.MessageParam) int
GetOrComputeAnthropicTokens retrieves cached tokens or computes and caches them.
func (*TokenCache) GetOrComputeGeminiTokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeGeminiTokens(msg *genai.Content) int
GetOrComputeGeminiTokens retrieves cached tokens or computes and caches them for Gemini.
func (*TokenCache) GetOrComputeOpenAITokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeOpenAITokens(msg openai.ChatCompletionMessage) int
GetOrComputeOpenAITokens retrieves cached tokens or computes and caches them.
func (*TokenCache) GetOrComputeOpenChatTokens ¶ added in v1.12.14
func (tc *TokenCache) GetOrComputeOpenChatTokens(msg *model.ChatCompletionMessage) int
GetOrComputeOpenChatTokens retrieves cached tokens or computes and caches them.
func (*TokenCache) Set ¶ added in v1.12.14
func (tc *TokenCache) Set(key string, count int)
Set stores a token count for the given key. If the cache is full, it evicts approximately half of the entries.
func (*TokenCache) Size ¶ added in v1.12.14
func (tc *TokenCache) Size() int
Size returns the current number of entries in the cache
func (*TokenCache) Stats ¶ added in v1.12.14
func (tc *TokenCache) Stats() (hits, misses int64, size int)
Stats returns cache statistics (hits, misses, size)
type TokenUsage ¶ added in v1.9.5
type TokenUsage struct {
InputTokens int
OutputTokens int
CachedTokens int
ThoughtTokens int
TotalTokens int
// For providers like Anthropic, cached tokens are not included in the prompt tokens
// OpenAI, OpenChat and Gemini all include cached tokens in the prompt tokens
CachedTokensInPrompt bool
}
func NewTokenUsage ¶ added in v1.9.7
func NewTokenUsage() *TokenUsage
func (*TokenUsage) RecordTokenUsage ¶ added in v1.9.5
func (tu *TokenUsage) RecordTokenUsage(input, output, cached, thought, total int)
func (*TokenUsage) Render ¶ added in v1.9.7
func (tu *TokenUsage) Render(render Render)
type ToolType ¶ added in v1.10.4
type ToolType string
const (
ToolTypeFunction ToolType = "function"
)
type ToolsUse ¶ added in v1.9.6
type ToolsUse struct {
AutoApprove bool // Whether tools can be used without user confirmation
}
type TruncationStrategy ¶ added in v1.12.14
type TruncationStrategy string
TruncationStrategy defines how to handle context overflow
const ( // StrategyTruncateOldest removes oldest messages first, preserving system prompt StrategyTruncateOldest TruncationStrategy = "truncate_oldest" // StrategySummarize replaces old context with a summary (future implementation) StrategySummarize TruncationStrategy = "summarize" // StrategyNone disables truncation - will fail if context exceeds limit StrategyNone TruncationStrategy = "none" // DefaultBufferPercent is the default safety buffer (80% of available space) DefaultBufferPercent = 0.80 )
type UniversalMessage ¶ added in v1.13.10
type UniversalMessage struct {
Role UniversalRole // "system", "user", "assistant"
Content string // Main text content
Reasoning string // Thinking/reasoning content (if any)
}
* UniversalMessage is a provider-agnostic representation of a chat message. * It extracts only the essential semantic content for cross-provider conversion. * * Key Design Decisions: * 1. Only text content and reasoning are preserved. * 2. Tool calls, tool responses, images, and other multimodal content are discarded. * 3. Role normalization: "model" (Gemini) → "assistant"
func ParseAnthropicMessages ¶ added in v1.13.10
func ParseAnthropicMessages(messages []anthropic.MessageParam) []UniversalMessage
ParseAnthropicMessages converts Anthropic messages to universal format. Extracts: OfText blocks, OfThinking/OfRedactedThinking blocks Ignores: OfToolUse, OfToolResult, OfImage, OfDocument
func ParseGeminiMessages ¶ added in v1.13.10
func ParseGeminiMessages(messages []*gemini.Content) []UniversalMessage
ParseGeminiMessages converts Gemini messages to universal format. Extracts: Parts.Text, Parts.Thought Ignores: FunctionCall, FunctionResponse, InlineData Maps: "model" → "assistant"
func ParseOpenAIMessages ¶ added in v1.13.10
func ParseOpenAIMessages(messages []openai.ChatCompletionMessage) []UniversalMessage
ParseOpenAIMessages converts OpenAI messages to universal format. Extracts: Content, MultiContent[].Text, ReasoningContent Ignores: ToolCalls, FunctionCall, ImageURL
func ParseOpenChatMessages ¶ added in v1.13.10
func ParseOpenChatMessages(messages []*model.ChatCompletionMessage) []UniversalMessage
ParseOpenChatMessages converts OpenChat (Volcengine) messages to universal format.
type UniversalRole ¶ added in v1.13.10
type UniversalRole string
const ( UniversalRoleSystem UniversalRole = "system" UniversalRoleUser UniversalRole = "user" UniversalRoleAssistant UniversalRole = "assistant" )
func ConvertToUniversalRole ¶ added in v1.13.10
func ConvertToUniversalRole(role string) UniversalRole
func (UniversalRole) ConvertToAnthropic ¶ added in v1.13.10
func (r UniversalRole) ConvertToAnthropic() anthropic.MessageParamRole
func (UniversalRole) ConvertToGemini ¶ added in v1.13.10
func (r UniversalRole) ConvertToGemini() string
func (UniversalRole) ConvertToOpenAI ¶ added in v1.13.10
func (r UniversalRole) ConvertToOpenAI() string
func (UniversalRole) ConvertToOpenChat ¶ added in v1.13.10
func (r UniversalRole) ConvertToOpenChat() string
func (UniversalRole) String ¶ added in v1.13.10
func (r UniversalRole) String() string
type WorkflowAgent ¶ added in v1.10.0
type WorkflowAgent struct {
Name string
Role WorkflowAgentType
Model *data.Model
Search *data.SearchEngine
Template string
SystemPrompt string
EnabledTools []string
Think string
MCP bool
Usage bool
Markdown bool
InputDir string
OutputDir string
MaxRecursions int
OutputFile string
PassThrough bool // pass through current agent, only for debugging
ConvoName string // conversation name, for iterate prompt
}
WorkflowAgent defines the structure for a single agent in the workflow.
type WorkflowAgentType ¶ added in v1.10.0
type WorkflowAgentType string
const ( WorkflowAgentTypeMaster WorkflowAgentType = "master" WorkflowAgentTypeWorker WorkflowAgentType = "worker" )
type WorkflowConfig ¶ added in v1.10.0
type WorkflowConfig struct {
Agents []WorkflowAgent
InterActiveMode bool // Allow user confirm at each agent
}
WorkflowConfig defines the structure for the entire workflow.
Source Files
¶
- agent.go
- anthropic.go
- atools.go
- atref.go
- cache.go
- coder.go
- color.go
- context.go
- conversation.go
- converter.go
- diff.go
- errors.go
- fetch.go
- files.go
- gconversation.go
- gemini2.go
- gtools.go
- indicator.go
- logger.go
- markdown.go
- mcp.go
- models.go
- openai.go
- openchat.go
- opentools.go
- package.go
- provider.go
- readline.go
- render.go
- search.go
- serializer.go
- status.go
- term.go
- think.go
- tokenizer.go
- tools.go
- usage.go
- utils.go
- workflow.go