Documentation
¶
Index ¶
- Constants
- func AppendToHistory(filename string, message types.Message) error
- func Chat(ctx context.Context, req types.Request) (*types.Response, error)
- func CreateMessage(msgType types.MsgType, role types.Role, model, content string) types.Message
- func CreateToolCallMessage(role types.Role, model, toolName, toolUseID, content string) types.Message
- func CreateToolResultMessage(role types.Role, model, toolName, toolUseID, content string) types.Message
- func ExecuteBuiltinTool(ctx context.Context, call types.ToolCall) (types.ToolResult, error)
- func FilterHistoryByType(messages []types.Message, msgType types.MsgType) []types.Message
- func GetLastUserMessage(messages []types.Message) *types.Message
- func GetSystemPrompts(messages []types.Message) []string
- func LoadHistory(filename string) ([]types.Message, error)
- func SaveHistory(filename string, messages []types.Message) error
- func WithCache(enabled bool) types.ChatOption
- func WithDefaultToolCwd(cwd string) types.ChatOption
- func WithEventCallback(callback types.EventCallback) types.ChatOption
- func WithHistory(messages []types.Message) types.ChatOption
- func WithMCPServers(servers ...string) types.ChatOption
- func WithMaxRounds(rounds int) types.ChatOption
- func WithStdStream(stdin io.Reader, stdout io.Writer) types.ChatOption
- func WithSystemPrompt(prompt string) types.ChatOption
- func WithToolCallback(callback types.ToolCallback) types.ChatOption
- func WithToolFiles(files ...string) types.ChatOption
- func WithToolJSONs(jsons ...string) types.ChatOption
- func WithTools(tools ...string) types.ChatOption
- type AnthropicResponseResult
- type CliHandler
- type CliOptions
- type Client
- type ClientUnion
- type Config
- type GeminiResponseResult
- type JSONLogEntry
- type MessageHistoryUnion
- type Messages
- func (messages Messages) ToAnthropic() (msgs []anthropic.MessageParam, systemPrompts []string, err error)
- func (messages Messages) ToGemini() (msgs []*genai.Content, systemPrompts []string, err error)
- func (messages Messages) ToOpenAI(keepSystemPrompts bool) (msgs []openai.ChatCompletionMessageParamUnion, systemPrompts []string, ...)
- type MessagesUnion
- type ResponseResult
- type ToolInfo
- type ToolInfoMapping
Examples ¶
Constants ¶
const MAX_PRINT_LIMIT = 2048
Variables ¶
This section is empty.
Functions ¶
func AppendToHistory ¶
AppendToHistory appends a single message to a history file
func CreateMessage ¶
CreateMessage creates a new message with timestamp
func CreateToolCallMessage ¶
func CreateToolCallMessage(role types.Role, model, toolName, toolUseID, content string) types.Message
CreateToolCallMessage creates a tool call message
func CreateToolResultMessage ¶
func CreateToolResultMessage(role types.Role, model, toolName, toolUseID, content string) types.Message
CreateToolResultMessage creates a tool result message
func ExecuteBuiltinTool ¶
ExecuteBuiltinTool executes a builtin tool with the given call
func FilterHistoryByType ¶
FilterHistoryByType filters messages by type
func GetLastUserMessage ¶
GetLastUserMessage returns the last user message from history
func GetSystemPrompts ¶
GetSystemPrompts extracts all system prompts from message history
func LoadHistory ¶
LoadHistory loads historical messages from a file
func SaveHistory ¶
SaveHistory saves messages to a file (overwrites existing file)
func WithCache ¶
func WithCache(enabled bool) types.ChatOption
WithCache controls whether caching is enabled (default: true)
func WithDefaultToolCwd ¶
func WithDefaultToolCwd(cwd string) types.ChatOption
WithDefaultToolCwd sets the default working directory for tool execution
func WithEventCallback ¶
func WithEventCallback(callback types.EventCallback) types.ChatOption
WithEventCallback sets a callback for receiving events during chat processing
func WithHistory ¶
func WithHistory(messages []types.Message) types.ChatOption
WithHistory provides historical messages for conversation context
func WithMCPServers ¶
func WithMCPServers(servers ...string) types.ChatOption
WithMCPServers specifies MCP servers to connect to
func WithMaxRounds ¶
func WithMaxRounds(rounds int) types.ChatOption
WithMaxRounds sets the maximum number of conversation rounds
func WithStdStream ¶
WithStdStream sets stdin and stdout for bidirectional tool callback communication
func WithSystemPrompt ¶
func WithSystemPrompt(prompt string) types.ChatOption
WithSystemPrompt sets the system prompt for the conversation
func WithToolCallback ¶
func WithToolCallback(callback types.ToolCallback) types.ChatOption
WithToolCallback sets a custom tool execution callback
func WithToolFiles ¶
func WithToolFiles(files ...string) types.ChatOption
WithToolFiles specifies custom tool definition files to load
func WithToolJSONs ¶
func WithToolJSONs(jsons ...string) types.ChatOption
WithToolJSONs specifies custom tool definitions as JSON strings
func WithTools ¶
func WithTools(tools ...string) types.ChatOption
WithTools specifies the builtin tools to make available
Types ¶
type AnthropicResponseResult ¶
type AnthropicResponseResult struct {
Messages []types.Message
ToolCalls []types.ToolCall
TokenUsage types.TokenUsage
ToolUseNum int
Stopped bool
RespMessages []anthropic.ContentBlockParamUnion
ToolResults []anthropic.ContentBlockParamUnion
}
type CliHandler ¶
type CliHandler struct {
// contains filtered or unexported fields
}
CliHandler wraps the core client with CLI-specific functionality
Example ¶
ExampleCliHandler demonstrates CLI usage
package main
import (
"context"
"fmt"
"os"
"github.com/xhd2015/kode-ai/chat"
)
func main() {
client, err := chat.NewClient(chat.Config{
Model: "claude-3-7-sonnet",
Token: os.Getenv("ANTHROPIC_API_KEY"),
})
if err != nil {
fmt.Printf("Error creating client: %v\n", err)
return
}
// CLI wrapper for command-line usage
cliHandler := chat.NewCliHandler(client, chat.CliOptions{
RecordFile: "session.json",
LogChat: true,
Verbose: false,
})
err = cliHandler.HandleCli(context.Background(), "Hello, how are you?",
chat.WithTools("file_read"),
chat.WithMaxRounds(2),
)
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Println("CLI chat completed")
}
func NewCliHandler ¶
func NewCliHandler(client *Client, opts CliOptions) *CliHandler
NewCliHandler creates a new CLI handler
func (*CliHandler) HandleCli ¶
func (h *CliHandler) HandleCli(ctx context.Context, message string, coreOpts ...types.ChatOption) error
HandleCLI handles a chat request with CLI-specific behavior
type CliOptions ¶
type CliOptions struct {
RecordFile string // File recording for session persistence
IgnoreDuplicateMsg bool // Interactive duplicate message handling
LogRequest bool // Debug request logging
LogChat bool // Chat progress logging
Verbose bool // Verbose output
JSONOutput bool // Output response as JSON
StreamPair *types.StreamPair
}
CliOptions represents CLI-specific options that don't belong in core library
type Client ¶
type Client struct {
// contains filtered or unexported fields
}
Client represents the chat client
Example ¶
ExampleClient demonstrates basic usage of the chat library
package main
import (
"context"
"fmt"
"os"
"github.com/xhd2015/kode-ai/chat"
)
func main() {
// Create a client
client, err := chat.NewClient(chat.Config{
Model: "claude-3-7-sonnet",
Token: os.Getenv("ANTHROPIC_API_KEY"),
})
if err != nil {
fmt.Printf("Error creating client: %v\n", err)
return
}
// Simple chat
response, err := client.Chat(context.Background(), "What is Go programming language?")
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Response: %s\n", response.LastAssistantMsg[:50]+"...")
fmt.Printf("Token usage: %d\n", response.TokenUsage.Total)
}
Example (MultiRound) ¶
ExampleClient_multiRound demonstrates multi-round conversation
package main
import (
"context"
"fmt"
"os"
"github.com/xhd2015/kode-ai/chat"
"github.com/xhd2015/kode-ai/types"
)
func main() {
client, err := chat.NewClient(chat.Config{
Model: "gpt-4o",
Token: os.Getenv("OPENAI_API_KEY"),
})
if err != nil {
fmt.Printf("Error creating client: %v\n", err)
return
}
var history []types.Message
// First message
_, err = client.Chat(context.Background(), "My name is Alice",
chat.WithHistory(history))
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
// Note: Response doesn't have Messages field, so we can't append to history in this simple way
// Follow-up message
response2, err := client.Chat(context.Background(), "What is my name?",
chat.WithHistory(history))
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Second response: %s\n", response2.LastAssistantMsg[:50]+"...")
}
Example (WithCustomToolCallback) ¶
ExampleClient_withCustomToolCallback demonstrates custom tool handling
package main
import (
"context"
"fmt"
"os"
"github.com/xhd2015/kode-ai/chat"
"github.com/xhd2015/kode-ai/types"
)
func main() {
client, err := chat.NewClient(chat.Config{
Model: "claude-3-7-sonnet",
Token: os.Getenv("ANTHROPIC_API_KEY"),
})
if err != nil {
fmt.Printf("Error creating client: %v\n", err)
return
}
// Custom tool handler
toolHandler := func(ctx context.Context, stream types.StreamContext, call types.ToolCall) (types.ToolResult, bool, error) {
switch call.Name {
case "custom_database_query":
sql := call.Arguments["sql"].(string)
// Simulate database query
result := map[string]interface{}{
"rows": []string{"user1", "user2"},
"count": 2,
"query": sql,
"message": "Query executed successfully",
}
return types.ToolResult{Content: result}, true, nil // handled=true
default:
// Don't handle this tool, fallback to built-in tools
return types.ToolResult{}, false, nil // handled=false, no error
}
}
response, err := client.Chat(context.Background(), "Query the database for users",
chat.WithToolCallback(toolHandler))
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Response received: %s\n", response.LastAssistantMsg)
}
Example (WithTools) ¶
ExampleClient_withTools demonstrates chat with tools
package main
import (
"context"
"fmt"
"os"
"github.com/xhd2015/kode-ai/chat"
"github.com/xhd2015/kode-ai/types"
)
func main() {
client, err := chat.NewClient(chat.Config{
Model: "gpt-4o",
Token: os.Getenv("OPENAI_API_KEY"),
})
if err != nil {
fmt.Printf("Error creating client: %v\n", err)
return
}
// Chat with tools and custom callback
response, err := client.Chat(context.Background(), "List files in current directory",
chat.WithTools("file_list"),
chat.WithEventCallback(func(event types.Message) {
switch event.Type {
case types.MsgType_Msg:
fmt.Print(event.Content)
case types.MsgType_ToolCall:
fmt.Printf("\n🔧 Calling tool: %s\n", event.ToolName)
case types.MsgType_ToolResult:
fmt.Printf("✅ Tool completed\n")
}
}),
)
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Response: %s\n", response.LastAssistantMsg)
}
type ClientUnion ¶
Provider-specific message unions for internal use
type Config ¶
type Config struct {
Model string // Required: Model name (e.g., "claude-3-7-sonnet")
Token string // Required: API token
BaseURL string // Optional: Custom API base URL
Provider providers.Provider // Optional: Auto-detected from model if not specified
LogLevel types.LogLevel // Optional: None, Request, Response, Debug
Logger types.Logger
}
Config represents the client configuration with provider-specific fields
type GeminiResponseResult ¶
type JSONLogEntry ¶
type JSONLogEntry struct {
Type string `json:"type"`
Content string `json:"content,omitempty"`
Metadata interface{} `json:"metadata,omitempty"`
Timestamp string `json:"timestamp,omitempty"`
}
JSONLogEntry represents a structured log entry for JSON output
type MessageHistoryUnion ¶
type MessageHistoryUnion struct {
FullHistory Messages
SystemPrompts []string
OpenAI []openai.ChatCompletionMessageParamUnion
Anthropic []anthropic.MessageParam
Gemini []*genai.Content
}
type Messages ¶
Messages is a local wrapper for conversion methods
func (Messages) ToAnthropic ¶
func (messages Messages) ToAnthropic() (msgs []anthropic.MessageParam, systemPrompts []string, err error)
ToAnthropic converts unified messages to Anthropic format
type MessagesUnion ¶
type MessagesUnion struct {
OpenAI []openai.ChatCompletionMessageParamUnion
Anthropic []anthropic.MessageParam
Gemini []*genai.Content
}
type ResponseResult ¶
type ResponseResult struct {
Messages []types.Message
ToolCalls []types.ToolCall
TokenUsage types.TokenUsage
ToolUseNum int
Stopped bool
RespMessages []openai.ChatCompletionMessageParamUnion // For OpenAI
ToolResults []openai.ChatCompletionMessageParamUnion // For OpenAI
}
Response processing result types
type ToolInfo ¶
type ToolInfo struct {
Name string
Builtin bool
ToolDefinition *tools.UnifiedTool
MCPServer string
MCPClient *client.Client
}
ToolInfo represents information about a tool
type ToolInfoMapping ¶
ToolInfoMapping maps tool names to their information